Snap for 7604940 from 0ed3f231aa524d165161e4e252b26e381701aa2e to sc-release

Change-Id: I3e3b62101d0dca6c1f99fbda8927dd26bb0eb365
diff --git a/camera/Android.bp b/camera/Android.bp
index e75f72a..f8a534c 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -50,6 +50,8 @@
         "fake-pipeline2/JpegCompressor.cpp",
         "EmulatedCamera3.cpp",
         "EmulatedFakeCamera3.cpp",
+        "CameraRotator.cpp",
+        "EmulatedFakeRotatingCamera3.cpp",
         "EmulatedQemuCamera3.cpp",
         "qemu-pipeline3/QemuSensor.cpp",
         "Exif.cpp",
diff --git a/camera/CameraRotator.cpp b/camera/CameraRotator.cpp
new file mode 100644
index 0000000..9ec8f40
--- /dev/null
+++ b/camera/CameraRotator.cpp
@@ -0,0 +1,678 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Uncomment LOG_NDEBUG to enable verbose logging, and uncomment both LOG_NDEBUG
+// *and* LOG_NNDEBUG to enable very verbose logging.
+
+//#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0
+
+#define LOG_TAG "EmulatedCamera3_CameraRotator"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+
+#ifdef DEBUG_ROTATING_CAMERA
+#define DDD(fmt,...) ALOGD("function: %s line: %d: " fmt, __func__, __LINE__, ##__VA_ARGS__);
+#else
+#define DDD(fmt,...) ((void)0)
+#endif
+
+#include "CameraRotator.h"
+#include "system/camera_metadata.h"
+#include <gralloc_cb_bp.h>
+
+#include <cmath>
+#include <cstdlib>
+#include <linux/videodev2.h>
+#include <log/log.h>
+#include <cutils/properties.h>
+#include <ui/Rect.h>
+#include <utils/Trace.h>
+
+namespace android {
+
+const nsecs_t CameraRotator::kExposureTimeRange[2] =
+        {1000L, 300000000L};  // 1 us - 0.3 sec
+const nsecs_t CameraRotator::kFrameDurationRange[2] =
+        {33331760L, 300000000L};  // ~1/30 s - 0.3 sec
+const nsecs_t CameraRotator::kMinVerticalBlank = 10000L;
+
+const int32_t CameraRotator::kSensitivityRange[2] = {100, 1600};
+const uint32_t CameraRotator::kDefaultSensitivity = 100;
+
+const char CameraRotator::kHostCameraVerString[] = "ro.boot.qemu.camera_protocol_ver";
+
+#define GRALLOC_PROP "ro.hardware.gralloc"
+
+static bool getIsMinigbmFromProperty() {
+    char grallocValue[PROPERTY_VALUE_MAX] = "";
+    property_get(GRALLOC_PROP, grallocValue, "");
+    bool isValid = grallocValue[0] != '\0';
+
+    if (!isValid) return false;
+
+    bool res = 0 == strcmp("minigbm", grallocValue);
+
+    if (res) {
+        DDD("%s: Is using minigbm, in minigbm mode.\n", __func__);
+    } else {
+        DDD("%s: Is not using minigbm, in goldfish mode.\n", __func__);
+    }
+
+    return res;
+}
+
+CameraRotator::CameraRotator(int width, int height):
+        Thread(false),
+        mWidth(width),
+        mHeight(height),
+        mActiveArray{0, 0, width, height},
+        mLastRequestWidth(-1),
+        mLastRequestHeight(-1),
+        mDeviceName("rotatingcamera"),
+        mGBA(&GraphicBufferAllocator::get()),
+        mGBM(nullptr),
+        mGotVSync(false),
+        mFrameDuration(kFrameDurationRange[0]),
+        mNextBuffers(nullptr),
+        mFrameNumber(0),
+        mCapturedBuffers(nullptr),
+        mListener(nullptr),
+        mIsMinigbm(getIsMinigbmFromProperty()) {
+    mHostCameraVer = 0; //property_get_int32(kHostCameraVerString, 0);
+    DDD("CameraRotator created with pixel array %d x %d", width, height);
+}
+
+CameraRotator::~CameraRotator() {
+    shutDown();
+}
+
+status_t CameraRotator::startUp() {
+    DDD("%s: Entered", __FUNCTION__);
+
+    mCapturedBuffers = nullptr;
+    status_t res = run("EmulatedQemuCamera3::CameraRotator",
+            ANDROID_PRIORITY_URGENT_DISPLAY);
+
+    if (res != OK) {
+        ALOGE("Unable to start up sensor capture thread: %d", res);
+    }
+
+    mRender.connectDevice();
+
+    mState = ECDS_CONNECTED;
+
+    return res;
+}
+
+status_t CameraRotator::shutDown() {
+    DDD("%s: Entered", __FUNCTION__);
+
+    status_t res = requestExitAndWait();
+    if (res != OK) {
+        ALOGE("Unable to shut down sensor capture thread: %d", res);
+    }
+
+    if (res == NO_ERROR) {
+        mState = ECDS_CONNECTED;
+    }
+
+    mRender.stopDevice();
+
+    mRender.disconnectDevice();
+
+    return res;
+}
+
+void CameraRotator::setExposureTime(uint64_t ns) {
+    (void)ns;
+}
+
+void CameraRotator::setSensitivity(uint32_t gain) {
+    (void)gain;
+}
+
+void CameraRotator::setFrameDuration(uint64_t ns) {
+    Mutex::Autolock lock(mControlMutex);
+    DDD("Frame duration set to %f", ns/1000000.f);
+    mFrameDuration = ns;
+}
+
+void CameraRotator::setDestinationBuffers(Buffers *buffers) {
+    Mutex::Autolock lock(mControlMutex);
+    mNextBuffers = buffers;
+}
+
+void CameraRotator::setFrameNumber(uint32_t frameNumber) {
+    Mutex::Autolock lock(mControlMutex);
+    mFrameNumber = frameNumber;
+}
+
+bool CameraRotator::waitForVSync(nsecs_t reltime) {
+    int res;
+    Mutex::Autolock lock(mControlMutex);
+
+    mGotVSync = false;
+    res = mVSync.waitRelative(mControlMutex, reltime);
+    if (res != OK && res != TIMED_OUT) {
+        ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
+        return false;
+    }
+    return mGotVSync;
+}
+
+bool CameraRotator::waitForNewFrame(nsecs_t reltime, nsecs_t *captureTime) {
+    Mutex::Autolock lock(mReadoutMutex);
+    if (mCapturedBuffers == nullptr) {
+        int res;
+        res = mReadoutAvailable.waitRelative(mReadoutMutex, reltime);
+        if (res == TIMED_OUT) {
+            return false;
+        } else if (res != OK || mCapturedBuffers == nullptr) {
+            ALOGE("Error waiting for sensor readout signal: %d", res);
+            return false;
+        }
+    }
+    mReadoutComplete.signal();
+
+    *captureTime = mCaptureTime;
+    mCapturedBuffers = nullptr;
+    return true;
+}
+
+CameraRotator::CameraRotatorListener::~CameraRotatorListener() {
+}
+
+void CameraRotator::setCameraRotatorListener(CameraRotatorListener *listener) {
+    Mutex::Autolock lock(mControlMutex);
+    mListener = listener;
+}
+
+status_t CameraRotator::readyToRun() {
+    DDD("Starting up sensor thread");
+    mStartupTime = systemTime();
+    mNextCaptureTime = 0;
+    mNextCapturedBuffers = nullptr;
+    return OK;
+}
+
+bool CameraRotator::threadLoop() {
+    ATRACE_CALL();
+    /*
+     * Stages are out-of-order relative to a single frame's processing, but
+     * in-order in time.
+     */
+
+    /*
+     * Stage 1: Read in latest control parameters.
+     */
+    uint64_t frameDuration;
+    Buffers *nextBuffers;
+    uint32_t frameNumber;
+    CameraRotatorListener *listener = nullptr;
+    {
+        // Lock while we're grabbing readout variables.
+        Mutex::Autolock lock(mControlMutex);
+        frameDuration = mFrameDuration;
+        nextBuffers = mNextBuffers;
+        frameNumber = mFrameNumber;
+        listener = mListener;
+        // Don't reuse a buffer set.
+        mNextBuffers = nullptr;
+
+        // Signal VSync for start of readout.
+        DDD("CameraRotator VSync");
+        mGotVSync = true;
+        mVSync.signal();
+    }
+
+    /*
+     * Stage 3: Read out latest captured image.
+     */
+
+    Buffers *capturedBuffers = nullptr;
+    nsecs_t captureTime = 0;
+
+    nsecs_t startRealTime = systemTime();
+    /*
+     * Stagefright cares about system time for timestamps, so base simulated
+     * time on that.
+     */
+    nsecs_t simulatedTime = startRealTime;
+    nsecs_t frameEndRealTime = startRealTime + frameDuration;
+
+    if (mNextCapturedBuffers != nullptr) {
+        DDD("CameraRotator starting readout");
+        /*
+         * Pretend we're doing readout now; will signal once enough time has
+         * elapsed.
+         */
+        capturedBuffers = mNextCapturedBuffers;
+        captureTime = mNextCaptureTime;
+    }
+
+    /*
+     * TODO: Move this signal to another thread to simulate readout time
+     * properly.
+     */
+    if (capturedBuffers != nullptr) {
+        DDD("CameraRotator readout complete");
+        Mutex::Autolock lock(mReadoutMutex);
+        if (mCapturedBuffers != nullptr) {
+            DDD("Waiting for readout thread to catch up!");
+            mReadoutComplete.wait(mReadoutMutex);
+        }
+
+        mCapturedBuffers = capturedBuffers;
+        mCaptureTime = captureTime;
+        mReadoutAvailable.signal();
+        capturedBuffers = nullptr;
+    }
+
+    /*
+     * Stage 2: Capture new image.
+     */
+    mNextCaptureTime = simulatedTime;
+    mNextCapturedBuffers = nextBuffers;
+
+    if (mNextCapturedBuffers != nullptr) {
+
+        int64_t timestamp = 0L;
+
+        // Might be adding more buffers, so size isn't constant.
+        for (size_t i = 0; i < mNextCapturedBuffers->size(); ++i) {
+            const StreamBuffer &b = (*mNextCapturedBuffers)[i];
+            DDD("CameraRotator capturing buffer %d: stream %d,"
+                    " %d x %d, format 0x%x, stride %d, buf %p, img %p",
+                    i, b.streamId, b.width, b.height, b.format, b.stride,
+                    b.buffer, b.img);
+            switch (b.format) {
+                case HAL_PIXEL_FORMAT_RGB_888:
+                    captureRGB(b.img, b.width, b.height, b.stride, &timestamp);
+                    DDD("here fmt is HAL_PIXEL_FORMAT_RGB_888: 0x%x", HAL_PIXEL_FORMAT_RGB_888);
+                    break;
+                case HAL_PIXEL_FORMAT_RGBA_8888:
+                    if (mHostCameraVer == 1 && !mIsMinigbm) {
+                        captureRGBA(b.width, b.height, b.stride, &timestamp, b.buffer);
+                    DDD("here fmt is HAL_PIXEL_FORMAT_RGBA_8888: 0x%x", HAL_PIXEL_FORMAT_RGBA_8888);
+                    } else {
+                        captureRGBA(b.img, b.width, b.height, b.stride, &timestamp);
+                    DDD("here fmt is HAL_PIXEL_FORMAT_RGBA_8888: 0x%x", HAL_PIXEL_FORMAT_RGBA_8888);
+                    }
+                    break;
+                case HAL_PIXEL_FORMAT_BLOB:
+                    DDD("here fmt is HAL_PIXEL_FORMAT_BLOB : 0x%x", HAL_PIXEL_FORMAT_BLOB);
+                    if (b.dataSpace == HAL_DATASPACE_DEPTH) {
+                        ALOGE("%s: Depth clouds unsupported", __FUNCTION__);
+                    } else {
+                        /*
+                         * Add auxiliary buffer of the right size. Assumes only
+                         * one BLOB (JPEG) buffer is in mNextCapturedBuffers.
+                         */
+                    DDD("blobhere");
+                        StreamBuffer bAux;
+                        bAux.streamId = 0;
+                        bAux.width = b.width;
+                        bAux.height = b.height;
+                        bAux.format = HAL_PIXEL_FORMAT_YCbCr_420_888;
+                        bAux.stride = b.width;
+                        if (mHostCameraVer == 1 && !mIsMinigbm) {
+                            const uint64_t usage =
+                                GRALLOC_USAGE_HW_CAMERA_READ |
+                                GRALLOC_USAGE_HW_CAMERA_WRITE |
+                                GRALLOC_USAGE_HW_TEXTURE;
+                            const uint64_t graphicBufferId = 0; // not used
+                            const uint32_t layerCount = 1;
+                            buffer_handle_t handle;
+                            uint32_t stride;
+
+                    DDD("allocate buffer here fmt is HAL_PIXEL_FORMAT_YCbCr_420_888: 0x%x", HAL_PIXEL_FORMAT_YCbCr_420_888);
+                            status_t status = mGBA->allocate(
+                                bAux.width, bAux.height, bAux.format,
+                                layerCount, usage,
+                                &handle, &stride,
+                                graphicBufferId, "CameraRotator");
+                            if (status != OK) {
+                                LOG_ALWAYS_FATAL("allocate failed");
+                            }
+
+                            android_ycbcr ycbcr = {};
+                            mGBM->lockYCbCr(handle,
+                                            GRALLOC_USAGE_HW_CAMERA_WRITE,
+                                            Rect(0, 0, bAux.width, bAux.height),
+                                            &ycbcr);
+
+                            bAux.buffer = new buffer_handle_t;
+                            *bAux.buffer = handle;
+                            bAux.img = (uint8_t*)ycbcr.y;
+                        } else {
+                            bAux.buffer = nullptr;
+                            // TODO: Reuse these.
+                            bAux.img = new uint8_t[b.width * b.height * 3];
+                        }
+                        mNextCapturedBuffers->push_back(bAux);
+                    }
+                    break;
+                case HAL_PIXEL_FORMAT_YCbCr_420_888:
+                    if (mHostCameraVer == 1 && !mIsMinigbm) {
+                        captureYU12(b.width, b.height, b.stride, &timestamp, b.buffer);
+                        DDD("buffer here fmt is HAL_PIXEL_FORMAT_YCbCr_420_888: 0x%x", HAL_PIXEL_FORMAT_YCbCr_420_888);
+                    DDD("here");
+                    } else {
+                        DDD("buffer here fmt is HAL_PIXEL_FORMAT_YCbCr_420_888: 0x%x", HAL_PIXEL_FORMAT_YCbCr_420_888);
+                        captureYU12(b.img, b.width, b.height, b.stride, &timestamp);
+                    DDD("here");
+                    }
+                    break;
+                default:
+                    ALOGE("%s: Unknown/unsupported format %x, no output",
+                            __FUNCTION__, b.format);
+                    break;
+            }
+        }
+        if (timestamp != 0UL) {
+          mNextCaptureTime = timestamp;
+        }
+        // Note: we have to do this after the actual capture so that the
+        // capture time is accurate as reported from QEMU.
+        if (listener != nullptr) {
+            listener->onCameraRotatorEvent(frameNumber, CameraRotatorListener::EXPOSURE_START,
+                                        mNextCaptureTime);
+        }
+    }
+
+    DDD("CameraRotator vertical blanking interval");
+    nsecs_t workDoneRealTime = systemTime();
+    const nsecs_t timeAccuracy = 2e6;  // 2 ms of imprecision is ok.
+    if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
+        timespec t;
+        t.tv_sec = (frameEndRealTime - workDoneRealTime) / 1000000000L;
+        t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
+
+        int ret;
+        do {
+            ret = nanosleep(&t, &t);
+        } while (ret != 0);
+    }
+    DDD("Frame cycle took %d ms, target %d ms",
+            (int) ((systemTime() - startRealTime) / 1000000),
+            (int) (frameDuration / 1000000));
+    return true;
+}
+
+void CameraRotator::captureRGBA(uint8_t *img, uint32_t width, uint32_t height,
+        uint32_t stride, int64_t *timestamp) {
+    ATRACE_CALL();
+    status_t res;
+    if (width != (uint32_t)mLastRequestWidth ||
+        height != (uint32_t)mLastRequestHeight) {
+        ALOGI("%s: Dimensions for the current request (%dx%d) differ "
+              "from the previous request (%dx%d). Restarting camera",
+                __FUNCTION__, width, height, mLastRequestWidth,
+                mLastRequestHeight);
+
+        if (mLastRequestWidth != -1 || mLastRequestHeight != -1) {
+            // We only need to stop the camera if this isn't the first request.
+
+            // Stop the camera device.
+            res = queryStop();
+            if (res == NO_ERROR) {
+                mState = ECDS_CONNECTED;
+                DDD("%s: Qemu camera device '%s' is stopped",
+                        __FUNCTION__, (const char*) mDeviceName);
+            } else {
+                ALOGE("%s: Unable to stop device '%s'",
+                        __FUNCTION__, (const char*) mDeviceName);
+            }
+        }
+
+        /*
+         * Host Camera always assumes V4L2_PIX_FMT_RGB32 as the preview format,
+         * and asks for the video format from the pixFmt parameter, which is
+         * V4L2_PIX_FMT_YUV420 in our implementation.
+         */
+        uint32_t pixFmt = V4L2_PIX_FMT_YUV420;
+        res = queryStart(pixFmt, width, height);
+        if (res == NO_ERROR) {
+            mLastRequestWidth = width;
+            mLastRequestHeight = height;
+            DDD("%s: Qemu camera device '%s' is started for %.4s[%dx%d] frames",
+                    __FUNCTION__, (const char*) mDeviceName,
+                    reinterpret_cast<const char*>(&pixFmt),
+                    mWidth, mHeight);
+            mState = ECDS_STARTED;
+        } else {
+            ALOGE("%s: Unable to start device '%s' for %.4s[%dx%d] frames",
+                    __FUNCTION__, (const char*) mDeviceName,
+                    reinterpret_cast<const char*>(&pixFmt),
+                    mWidth, mHeight);
+            return;
+        }
+    }
+    if (width != stride) {
+        ALOGW("%s: expect stride (%d), actual stride (%d)", __FUNCTION__,
+              width, stride);
+    }
+
+    // Since the format is V4L2_PIX_FMT_RGB32, we need 4 bytes per pixel.
+      size_t bufferSize = width * height * 4;
+      // Apply no white balance or exposure compensation.
+      float whiteBalance[] = {1.0f, 1.0f, 1.0f};
+      float exposureCompensation = 1.0f;
+      // Read from webcam.
+      queryFrame(nullptr, img, 0, bufferSize, whiteBalance[0],
+              whiteBalance[1], whiteBalance[2],
+              exposureCompensation, timestamp);
+
+    DDD("RGBA sensor image captured");
+}
+
+void CameraRotator::captureRGBA(uint32_t width, uint32_t height,
+        uint32_t stride, int64_t *timestamp, buffer_handle_t* handle) {
+    ATRACE_CALL();
+    status_t res;
+    if (mLastRequestWidth == -1 || mLastRequestHeight == -1) {
+        uint32_t pixFmt = V4L2_PIX_FMT_YUV420;
+        res = queryStart();
+        if (res == NO_ERROR) {
+            mLastRequestWidth = width;
+            mLastRequestHeight = height;
+            DDD("%s: Qemu camera device '%s' is started for %.4s[%dx%d] frames",
+                    __FUNCTION__, (const char*) mDeviceName,
+                    reinterpret_cast<const char*>(&pixFmt),
+                    mWidth, mHeight);
+            mState = ECDS_STARTED;
+        } else {
+            ALOGE("%s: Unable to start device '%s' for %.4s[%dx%d] frames",
+                    __FUNCTION__, (const char*) mDeviceName,
+                    reinterpret_cast<const char*>(&pixFmt),
+                    mWidth, mHeight);
+            return;
+        }
+    }
+    if (width != stride) {
+        ALOGW("%s: expect stride (%d), actual stride (%d)", __FUNCTION__,
+              width, stride);
+    }
+
+    float whiteBalance[] = {1.0f, 1.0f, 1.0f};
+    float exposureCompensation = 1.0f;
+    const cb_handle_t* cb = cb_handle_t::from(*handle);
+    LOG_ALWAYS_FATAL_IF(!cb, "Unexpected buffer handle");
+    const uint64_t offset = cb->getMmapedOffset();
+    queryFrame(width, height, V4L2_PIX_FMT_RGB32, offset,
+                                 whiteBalance[0], whiteBalance[1], whiteBalance[2],
+                                 exposureCompensation, timestamp);
+
+    DDD("RGBA sensor image captured");
+}
+
+void CameraRotator::captureRGB(uint8_t *img, uint32_t width, uint32_t height, uint32_t stride, int64_t *timestamp) {
+    ALOGE("%s: Not implemented", __FUNCTION__);
+}
+
+void CameraRotator::captureYU12(uint8_t *img, uint32_t width, uint32_t height, uint32_t stride,
+                             int64_t *timestamp) {
+    ATRACE_CALL();
+    status_t res;
+    if (width != (uint32_t)mLastRequestWidth ||
+        height != (uint32_t)mLastRequestHeight) {
+        ALOGI("%s: Dimensions for the current request (%dx%d) differ "
+              "from the previous request (%dx%d). Restarting camera",
+                __FUNCTION__, width, height, mLastRequestWidth,
+                mLastRequestHeight);
+
+        if (mLastRequestWidth != -1 || mLastRequestHeight != -1) {
+            // We only need to stop the camera if this isn't the first request.
+            // Stop the camera device.
+            res = queryStop();
+            if (res == NO_ERROR) {
+                mState = ECDS_CONNECTED;
+                DDD("%s: Qemu camera device '%s' is stopped",
+                        __FUNCTION__, (const char*) mDeviceName);
+            } else {
+                ALOGE("%s: Unable to stop device '%s'",
+                        __FUNCTION__, (const char*) mDeviceName);
+            }
+        }
+
+        /*
+         * Host Camera always assumes V4L2_PIX_FMT_RGB32 as the preview format,
+         * and asks for the video format from the pixFmt parameter, which is
+         * V4L2_PIX_FMT_YUV420 in our implementation.
+         */
+        uint32_t pixFmt = mIsMinigbm ? V4L2_PIX_FMT_NV12 : V4L2_PIX_FMT_YUV420;
+        res = queryStart(pixFmt, width, height);
+        if (res == NO_ERROR) {
+            mLastRequestWidth = width;
+            mLastRequestHeight = height;
+            DDD("%s: Qemu camera device '%s' is started for %.4s[%dx%d] frames",
+                    __FUNCTION__, (const char*) mDeviceName,
+                    reinterpret_cast<const char*>(&pixFmt),
+                    mWidth, mHeight);
+            mState = ECDS_STARTED;
+        } else {
+            ALOGE("%s: Unable to start device '%s' for %.4s[%dx%d] frames",
+                    __FUNCTION__, (const char*) mDeviceName,
+                    reinterpret_cast<const char*>(&pixFmt),
+                    mWidth, mHeight);
+            return;
+        }
+    }
+    if (width != stride) {
+        ALOGW("%s: expect stride (%d), actual stride (%d)", __FUNCTION__,
+              width, stride);
+    }
+
+    // Calculate the buffer size for YUV420.
+    size_t bufferSize = (width * height * 12) / 8;
+    // Apply no white balance or exposure compensation.
+    float whiteBalance[] = {1.0f, 1.0f, 1.0f};
+    float exposureCompensation = 1.0f;
+    // Read video frame from webcam.
+    mRender.startDevice(width, height, HAL_PIXEL_FORMAT_YCbCr_420_888);
+    queryFrame(img, nullptr, bufferSize, 0, whiteBalance[0],
+            whiteBalance[1], whiteBalance[2],
+            exposureCompensation, timestamp);
+
+    DDD("YUV420 sensor image captured");
+}
+
+void CameraRotator::captureYU12(uint32_t width, uint32_t height, uint32_t stride,
+                             int64_t *timestamp, buffer_handle_t* handle) {
+    ATRACE_CALL();
+    status_t res;
+    if (mLastRequestWidth == -1 || mLastRequestHeight == -1) {
+        uint32_t pixFmt = V4L2_PIX_FMT_YUV420;
+        res = queryStart();
+        if (res == NO_ERROR) {
+            mLastRequestWidth = width;
+            mLastRequestHeight = height;
+            DDD("%s: Qemu camera device '%s' is started for %.4s[%dx%d] frames",
+                    __FUNCTION__, (const char*) mDeviceName,
+                    reinterpret_cast<const char*>(&pixFmt),
+                    mWidth, mHeight);
+            mState = ECDS_STARTED;
+        } else {
+            ALOGE("%s: Unable to start device '%s' for %.4s[%dx%d] frames",
+                    __FUNCTION__, (const char*) mDeviceName,
+                    reinterpret_cast<const char*>(&pixFmt),
+                    mWidth, mHeight);
+            return;
+        }
+    }
+    if (width != stride) {
+        ALOGW("%s: expect stride (%d), actual stride (%d)", __FUNCTION__,
+              width, stride);
+    }
+
+    float whiteBalance[] = {1.0f, 1.0f, 1.0f};
+    float exposureCompensation = 1.0f;
+    const cb_handle_t* cb = cb_handle_t::from(*handle);
+    LOG_ALWAYS_FATAL_IF(!cb, "Unexpected buffer handle");
+    const uint64_t offset = cb->getMmapedOffset();
+    queryFrame(width, height, V4L2_PIX_FMT_YUV420, offset,
+                                 whiteBalance[0], whiteBalance[1], whiteBalance[2],
+                                 exposureCompensation, timestamp);
+    DDD("YUV420 sensor image captured");
+}
+
+status_t CameraRotator::queryFrame(void* vframe,
+                        void* pframe,
+                        size_t vframe_size,
+                        size_t pframe_size,
+                        float r_scale,
+                        float g_scale,
+                        float b_scale,
+                        float exposure_comp,
+                        int64_t* frame_time) {
+    if (vframe) {
+        DDD("hubo: capture video frames");
+        mRender.produceFrame(vframe, frame_time);
+    } else if (pframe) {
+        DDD("hubo: capture preview frames");
+    } else {
+    }
+    return NO_ERROR;
+}
+
+status_t CameraRotator::queryFrame(int wdith,
+                        int height,
+                        uint32_t pixel_format,
+                        uint64_t offset,
+                        float r_scale,
+                        float g_scale,
+                        float b_scale,
+                        float exposure_comp,
+                        int64_t* frame_time) {
+    return NO_ERROR;
+}
+
+status_t CameraRotator::queryStop() {
+    return NO_ERROR;
+}
+
+status_t CameraRotator::queryStart() {
+    return NO_ERROR;
+}
+
+status_t CameraRotator::queryStart(uint32_t fmt, int w, int h) {
+    (void)fmt;
+    (void)w;
+    (void)h;
+    return NO_ERROR;
+}
+
+}; // end of namespace android
diff --git a/camera/CameraRotator.h b/camera/CameraRotator.h
new file mode 100644
index 0000000..8920d92
--- /dev/null
+++ b/camera/CameraRotator.h
@@ -0,0 +1,218 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#pragma once
+
+#include "fake-pipeline2/Base.h"
+#include "EmulatedFakeRotatingCameraDevice.h"
+
+#include <ui/GraphicBufferAllocator.h>
+#include <ui/GraphicBufferMapper.h>
+#include <utils/Mutex.h>
+#include <utils/Thread.h>
+#include <utils/Timers.h>
+
+namespace android {
+
+class CameraRotator : private Thread, public virtual RefBase {
+public:
+    CameraRotator(int w, int h);
+    ~CameraRotator();
+
+
+    status_t startUp();
+    status_t shutDown();
+
+
+    void setExposureTime(uint64_t ns);
+    void setFrameDuration(uint64_t ns);
+    void setSensitivity(uint32_t gain);
+
+    /*
+     * Each Buffer in "buffers" must be at least stride*height*2 bytes in size.
+     */
+    void setDestinationBuffers(Buffers *buffers);
+    /*
+     * To simplify tracking the sensor's current frame.
+     */
+    void setFrameNumber(uint32_t frameNumber);
+
+    /*
+     * Synchronizing with sensor operation (vertical sync).
+     */
+
+    /*
+     * Wait until the sensor outputs its next vertical sync signal, meaning it
+     * is starting readout of its latest frame of data.
+     *
+     * Returns:
+     *     true if vertical sync is signaled; false if the wait timed out.
+     */
+    bool waitForVSync(nsecs_t reltime);
+
+    /*
+     * Wait until a new frame has been read out, and then return the time
+     * capture started. May return immediately if a new frame has been pushed
+     * since the last wait for a new frame.
+     *
+     * Returns:
+     *     true if new frame is returned; false if timed out.
+     */
+    bool waitForNewFrame(nsecs_t reltime, nsecs_t *captureTime);
+
+    /*
+     * Interrupt event servicing from the sensor. Only triggers for sensor
+     * cycles that have valid buffers to write to.
+     */
+    struct CameraRotatorListener {
+        enum Event {
+            EXPOSURE_START,
+        };
+
+        virtual void onCameraRotatorEvent(uint32_t frameNumber, Event e,
+                nsecs_t timestamp) = 0;
+        virtual ~CameraRotatorListener();
+    };
+
+    void setCameraRotatorListener(CameraRotatorListener *listener);
+
+    /*
+     * Static Sensor Characteristics
+     */
+    const uint32_t mWidth, mHeight;
+    const uint32_t mActiveArray[4];
+
+    static const nsecs_t kExposureTimeRange[2];
+    static const nsecs_t kFrameDurationRange[2];
+    static const nsecs_t kMinVerticalBlank;
+
+    static const int32_t kSensitivityRange[2];
+    static const uint32_t kDefaultSensitivity;
+
+    static const char kHostCameraVerString[];
+
+  private:
+    int32_t mLastRequestWidth, mLastRequestHeight;
+
+    /*
+     * Defines possible states of the emulated camera device object.
+     */
+    enum EmulatedCameraDeviceState {
+        // Object has been constructed.
+        ECDS_CONSTRUCTED,
+        // Object has been initialized.
+        ECDS_INITIALIZED,
+        // Object has been connected to the physical device.
+        ECDS_CONNECTED,
+        // Camera device has been started.
+        ECDS_STARTED,
+    };
+    // Object state.
+    EmulatedCameraDeviceState mState;
+
+    const char *mDeviceName;
+    GraphicBufferAllocator* mGBA;
+    GraphicBufferMapper*    mGBM;
+
+    // Always lock before accessing control parameters.
+    Mutex mControlMutex;
+    /*
+     * Control Parameters
+     */
+    Condition mVSync;
+    bool mGotVSync;
+    uint64_t mFrameDuration;
+    Buffers *mNextBuffers;
+    uint32_t mFrameNumber;
+
+    // Always lock before accessing readout variables.
+    Mutex mReadoutMutex;
+    /*
+     * Readout Variables
+     */
+    Condition mReadoutAvailable;
+    Condition mReadoutComplete;
+    Buffers *mCapturedBuffers;
+    nsecs_t mCaptureTime;
+    CameraRotatorListener *mListener;
+
+    // Time of sensor startup (used for simulation zero-time point).
+    nsecs_t mStartupTime;
+    int32_t mHostCameraVer;
+    bool mIsMinigbm;
+
+  private:
+    /*
+     * Inherited Thread Virtual Overrides
+     */
+    virtual status_t readyToRun() override;
+    /*
+     * CameraRotator capture operation main loop.
+     */
+    virtual bool threadLoop() override;
+
+    /*
+     * Members only used by the processing thread.
+     */
+    nsecs_t mNextCaptureTime;
+    Buffers *mNextCapturedBuffers;
+
+    void captureRGBA(uint32_t width, uint32_t height, uint32_t stride,
+                     int64_t *timestamp, buffer_handle_t* handle);
+    void captureYU12(uint32_t width, uint32_t height, uint32_t stride,
+                     int64_t *timestamp, buffer_handle_t* handle);
+    void captureRGBA(uint8_t *img, uint32_t width, uint32_t height,
+                     uint32_t stride, int64_t *timestamp);
+    void captureYU12(uint8_t *img, uint32_t width, uint32_t height,
+                     uint32_t stride, int64_t *timestamp);
+    void captureRGB(uint8_t *img, uint32_t width, uint32_t height,
+                    uint32_t stride, int64_t *timestamp);
+
+private:
+
+    EmulatedFakeRotatingCameraDevice    mRender;
+
+    status_t queryStart(uint32_t pixel_format, int width, int height);
+
+    status_t queryStart();
+
+    status_t queryStop();
+
+    status_t queryFrame(void* vframe,
+                        void* pframe,
+                        size_t vframe_size,
+                        size_t pframe_size,
+                        float r_scale,
+                        float g_scale,
+                        float b_scale,
+                        float exposure_comp,
+                        int64_t* frame_time);
+
+
+    status_t queryFrame(int wdith,
+                        int height,
+                        uint32_t pixel_format,
+                        uint64_t offset,
+                        float r_scale,
+                        float g_scale,
+                        float b_scale,
+                        float exposure_comp,
+                        int64_t* frame_time);
+
+};
+
+};  // end of namespace android
diff --git a/camera/EmulatedCameraFactory.cpp b/camera/EmulatedCameraFactory.cpp
index fb11b41..867bc7c 100755
--- a/camera/EmulatedCameraFactory.cpp
+++ b/camera/EmulatedCameraFactory.cpp
@@ -27,6 +27,7 @@
 #include "EmulatedFakeCamera.h"
 #include "EmulatedFakeCamera2.h"
 #include "EmulatedFakeCamera3.h"
+#include "EmulatedFakeRotatingCamera3.h"
 #include "EmulatedQemuCamera.h"
 #include "EmulatedQemuCamera3.h"
 
@@ -403,7 +404,7 @@
             char prop[PROPERTY_VALUE_MAX];
 
             if (property_get(key, prop, nullptr) > 0) {
-                return std::make_unique<EmulatedFakeCamera>(cameraId, backCamera, module, mGBM);
+                return std::make_unique<EmulatedFakeRotatingCamera3>(cameraId, backCamera, module, mGBM);
             } else {
                 return std::make_unique<EmulatedFakeCamera3>(cameraId, backCamera, module, mGBM);
             }
diff --git a/camera/EmulatedFakeCamera.cpp b/camera/EmulatedFakeCamera.cpp
index 69fabb3..3e853d3 100755
--- a/camera/EmulatedFakeCamera.cpp
+++ b/camera/EmulatedFakeCamera.cpp
@@ -26,7 +26,6 @@
 #include "EmulatedFakeCamera.h"
 #include "EmulatedCameraFactory.h"
 #include "EmulatedFakeCameraDevice.h"
-#include "EmulatedFakeRotatingCameraDevice.h"
 
 namespace android {
 
@@ -38,13 +37,7 @@
           mFacingBack(facingBack),
           mFakeCameraDevice(nullptr)
 {
-    const char *key = "ro.boot.qemu.camera.fake.rotating";
-    char prop[PROPERTY_VALUE_MAX];
-    if (property_get(key, prop, nullptr) > 0) {
-        mFakeCameraDevice = new EmulatedFakeRotatingCameraDevice(this);
-    } else {
-        mFakeCameraDevice = new EmulatedFakeCameraDevice(this);
-    }
+    mFakeCameraDevice = new EmulatedFakeCameraDevice(this);
 }
 
 EmulatedFakeCamera::~EmulatedFakeCamera()
diff --git a/camera/EmulatedFakeRotatingCamera3.cpp b/camera/EmulatedFakeRotatingCamera3.cpp
new file mode 100644
index 0000000..d32c6bf
--- /dev/null
+++ b/camera/EmulatedFakeRotatingCamera3.cpp
@@ -0,0 +1,2705 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * Contains implementation of a class EmulatedFakeRotatingCamera3 that encapsulates
+ * functionality of an advanced fake camera.
+ */
+
+#include <inttypes.h>
+
+//#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0
+#define LOG_TAG "EmulatedCamera_FakeRotatingCamera3"
+#include <cutils/properties.h>
+#include <log/log.h>
+
+#include "EmulatedFakeRotatingCamera3.h"
+#include "EmulatedCameraFactory.h"
+#include <ui/Fence.h>
+#include <ui/Rect.h>
+
+#include "fake-pipeline2/Sensor.h"
+#include "fake-pipeline2/JpegCompressor.h"
+#include <cmath>
+
+#include <vector>
+#include <algorithm>
+
+#if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0
+#define ALOGVV ALOGV
+#else
+#define ALOGVV(...) ((void)0)
+#endif
+
+namespace android {
+
+/**
+ * Constants for camera capabilities
+ */
+
+const int64_t USEC = 1000LL;
+const int64_t MSEC = USEC * 1000LL;
+
+const int32_t EmulatedFakeRotatingCamera3::kAvailableFormats[] = {
+        HAL_PIXEL_FORMAT_RAW16,
+        HAL_PIXEL_FORMAT_BLOB,
+        HAL_PIXEL_FORMAT_RGBA_8888,
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+        // These are handled by YCbCr_420_888
+        //        HAL_PIXEL_FORMAT_YV12,
+        //        HAL_PIXEL_FORMAT_YCrCb_420_SP,
+        HAL_PIXEL_FORMAT_YCbCr_420_888,
+        HAL_PIXEL_FORMAT_Y16
+};
+
+const uint32_t EmulatedFakeRotatingCamera3::kAvailableRawSizes[6] = {
+    640, 480,
+    1280, 720
+    //    mSensorWidth, mSensorHeight
+};
+
+/**
+ * 3A constants
+ */
+
+// Default exposure and gain targets for different scenarios
+const nsecs_t EmulatedFakeRotatingCamera3::kNormalExposureTime       = 10 * MSEC;
+const nsecs_t EmulatedFakeRotatingCamera3::kFacePriorityExposureTime = 30 * MSEC;
+const int     EmulatedFakeRotatingCamera3::kNormalSensitivity        = 100;
+const int     EmulatedFakeRotatingCamera3::kFacePrioritySensitivity  = 400;
+//CTS requires 8 frames timeout in waitForAeStable
+const float   EmulatedFakeRotatingCamera3::kExposureTrackRate        = 0.2;
+const int     EmulatedFakeRotatingCamera3::kPrecaptureMinFrames      = 10;
+const int     EmulatedFakeRotatingCamera3::kStableAeMaxFrames        = 100;
+const float   EmulatedFakeRotatingCamera3::kExposureWanderMin        = -2;
+const float   EmulatedFakeRotatingCamera3::kExposureWanderMax        = 1;
+
+/**
+ * Camera device lifecycle methods
+ */
+
+EmulatedFakeRotatingCamera3::EmulatedFakeRotatingCamera3(int cameraId, bool facingBack,
+        struct hw_module_t* module, GraphicBufferMapper* gbm) :
+        EmulatedCamera3(cameraId, module),
+        mFacingBack(facingBack), mGBM(gbm) {
+    ALOGI("Constructing emulated fake camera 3: ID %d, facing %s",
+            mCameraID, facingBack ? "back" : "front");
+
+    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
+        mDefaultTemplates[i] = NULL;
+    }
+}
+
+EmulatedFakeRotatingCamera3::~EmulatedFakeRotatingCamera3() {
+    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
+        if (mDefaultTemplates[i] != NULL) {
+            free_camera_metadata(mDefaultTemplates[i]);
+        }
+    }
+}
+
+status_t EmulatedFakeRotatingCamera3::Initialize() {
+    ALOGV("%s: E", __FUNCTION__);
+    status_t res;
+
+    if (mStatus != STATUS_ERROR) {
+        ALOGE("%s: Already initialized!", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
+    res = getCameraCapabilities();
+    if (res != OK) {
+        ALOGE("%s: Unable to get camera capabilities: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
+
+    res = constructStaticInfo();
+    if (res != OK) {
+        ALOGE("%s: Unable to allocate static info: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
+
+    return EmulatedCamera3::Initialize();
+}
+
+status_t EmulatedFakeRotatingCamera3::connectCamera(hw_device_t** device) {
+    ALOGV("%s: E", __FUNCTION__);
+    Mutex::Autolock l(mLock);
+    status_t res;
+
+    if (mStatus != STATUS_CLOSED) {
+        ALOGE("%s: Can't connect in state %d", __FUNCTION__, mStatus);
+        return INVALID_OPERATION;
+    }
+
+    mSensor = new CameraRotator(mSensorWidth, mSensorHeight);
+    mSensor->setCameraRotatorListener(this);
+
+    res = mSensor->startUp();
+    if (res != NO_ERROR) return res;
+
+    mReadoutThread = new ReadoutThread(this);
+    mJpegCompressor = new JpegCompressor(mGBM);
+
+    res = mReadoutThread->run("EmuCam3::readoutThread");
+    if (res != NO_ERROR) return res;
+
+    // Initialize fake 3A
+
+    mControlMode  = ANDROID_CONTROL_MODE_AUTO;
+    mFacePriority = false;
+    mAeMode       = ANDROID_CONTROL_AE_MODE_ON;
+    mAfMode       = ANDROID_CONTROL_AF_MODE_AUTO;
+    mAwbMode      = ANDROID_CONTROL_AWB_MODE_AUTO;
+    mAeState      = ANDROID_CONTROL_AE_STATE_INACTIVE;
+    mAfState      = ANDROID_CONTROL_AF_STATE_INACTIVE;
+    mAwbState     = ANDROID_CONTROL_AWB_STATE_INACTIVE;
+    mAeCounter    = 0;
+    mAeTargetExposureTime = kNormalExposureTime;
+    mAeCurrentExposureTime = kNormalExposureTime;
+    mAeCurrentSensitivity  = kNormalSensitivity;
+
+    return EmulatedCamera3::connectCamera(device);
+}
+
+status_t EmulatedFakeRotatingCamera3::closeCamera() {
+    ALOGV("%s: E", __FUNCTION__);
+    status_t res;
+    {
+        Mutex::Autolock l(mLock);
+        if (mStatus == STATUS_CLOSED) return OK;
+
+        res = mSensor->shutDown();
+        if (res != NO_ERROR) {
+            ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
+            return res;
+        }
+        mSensor.clear();
+
+        mReadoutThread->requestExit();
+    }
+
+    mReadoutThread->join();
+
+    {
+        Mutex::Autolock l(mLock);
+        // Clear out private stream information
+        for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) {
+            PrivateStreamInfo *privStream =
+                    static_cast<PrivateStreamInfo*>((*s)->priv);
+            delete privStream;
+            (*s)->priv = NULL;
+        }
+        mStreams.clear();
+        mReadoutThread.clear();
+    }
+
+    return EmulatedCamera3::closeCamera();
+}
+
+status_t EmulatedFakeRotatingCamera3::getCameraInfo(struct camera_info *info) {
+    info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
+    info->orientation = gEmulatedCameraFactory.getFakeCameraOrientation();
+    return EmulatedCamera3::getCameraInfo(info);
+}
+
+/**
+ * Camera3 interface methods
+ */
+
+status_t EmulatedFakeRotatingCamera3::configureStreams(
+        camera3_stream_configuration *streamList) {
+    Mutex::Autolock l(mLock);
+    ALOGV("%s: %d streams", __FUNCTION__, streamList->num_streams);
+
+    if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) {
+        ALOGE("%s: Cannot configure streams in state %d",
+                __FUNCTION__, mStatus);
+        return NO_INIT;
+    }
+
+    /**
+     * valid-check input list.
+     */
+    if (streamList == NULL) {
+        ALOGE("%s: NULL stream configuration", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    if (streamList->streams == NULL) {
+        ALOGE("%s: NULL stream list", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    if (streamList->num_streams < 1) {
+        ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__,
+                streamList->num_streams);
+        return BAD_VALUE;
+    }
+
+    camera3_stream_t *inputStream = NULL;
+    for (size_t i = 0; i < streamList->num_streams; i++) {
+        camera3_stream_t *newStream = streamList->streams[i];
+
+        if (newStream == NULL) {
+            ALOGE("%s: Stream index %zu was NULL",
+                  __FUNCTION__, i);
+            return BAD_VALUE;
+        }
+
+        ALOGV("%s: Stream %p (id %zu), type %d, usage 0x%x, format 0x%x "
+              "width 0x%x, height 0x%x",
+                __FUNCTION__, newStream, i, newStream->stream_type,
+                newStream->usage,
+                newStream->format,
+                newStream->width,
+                newStream->height);
+
+        if (newStream->stream_type == CAMERA3_STREAM_INPUT ||
+            newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
+            if (inputStream != NULL) {
+
+                ALOGE("%s: Multiple input streams requested!", __FUNCTION__);
+                return BAD_VALUE;
+            }
+            inputStream = newStream;
+        }
+
+        if (newStream->stream_type != CAMERA3_STREAM_INPUT) {
+            if (newStream->rotation < CAMERA3_STREAM_ROTATION_0 ||
+                newStream->rotation > CAMERA3_STREAM_ROTATION_270) {
+                ALOGE("%s: Unsupported stream rotation 0x%x requested",
+                      __FUNCTION__, newStream->rotation);
+                return BAD_VALUE;
+            }
+        }
+
+        if (newStream->width == 0 || newStream->height == 0 ||
+            newStream->width > (uint32_t)mSensorWidth ||
+            newStream->height > (uint32_t)mSensorHeight) {
+            ALOGE("%s: Unsupported stream width 0x%x height 0x%x",
+                  __FUNCTION__, newStream->width, newStream->height);
+            return BAD_VALUE;
+        }
+
+        bool validFormat = false;
+        for (size_t f = 0;
+             f < sizeof(kAvailableFormats)/sizeof(kAvailableFormats[0]);
+             f++) {
+            if (newStream->format == kAvailableFormats[f]) {
+                validFormat = true;
+                break;
+            }
+        }
+        if (!validFormat) {
+            ALOGE("%s: Unsupported stream format 0x%x requested",
+                    __FUNCTION__, newStream->format);
+            return BAD_VALUE;
+        }
+    }
+    mInputStream = inputStream;
+
+    /**
+     * Initially mark all existing streams as not alive
+     */
+    for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) {
+        PrivateStreamInfo *privStream =
+                static_cast<PrivateStreamInfo*>((*s)->priv);
+        privStream->alive = false;
+    }
+
+    /**
+     * Find new streams and mark still-alive ones
+     */
+    for (size_t i = 0; i < streamList->num_streams; i++) {
+        camera3_stream_t *newStream = streamList->streams[i];
+        if (newStream->priv == NULL) {
+            // New stream, construct info
+            PrivateStreamInfo *privStream = new PrivateStreamInfo();
+            privStream->alive = true;
+
+            newStream->max_buffers = kMaxBufferCount;
+            newStream->priv = privStream;
+            mStreams.push_back(newStream);
+        } else {
+            // Existing stream, mark as still alive.
+            PrivateStreamInfo *privStream =
+                    static_cast<PrivateStreamInfo*>(newStream->priv);
+            privStream->alive = true;
+        }
+        // Always update usage and max buffers
+        newStream->max_buffers = kMaxBufferCount;
+        switch (newStream->stream_type) {
+            case CAMERA3_STREAM_OUTPUT:
+                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
+                break;
+            case CAMERA3_STREAM_INPUT:
+                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
+                break;
+            case CAMERA3_STREAM_BIDIRECTIONAL:
+                newStream->usage |= (GRALLOC_USAGE_HW_CAMERA_READ |
+                        GRALLOC_USAGE_HW_CAMERA_WRITE);
+                break;
+        }
+        // Set the buffer format, inline with gralloc implementation
+        if (newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+            if (newStream->usage & GRALLOC_USAGE_HW_CAMERA_WRITE) {
+                if (newStream->usage & GRALLOC_USAGE_HW_TEXTURE) {
+                    newStream->format = HAL_PIXEL_FORMAT_YCbCr_420_888;
+                }
+                else if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) {
+                    newStream->format = HAL_PIXEL_FORMAT_YCbCr_420_888;
+                }
+                else {
+                    newStream->format = HAL_PIXEL_FORMAT_RGB_888;
+                }
+            }
+        }
+    }
+
+    /**
+     * Reap the dead streams
+     */
+    for (StreamIterator s = mStreams.begin(); s != mStreams.end();) {
+        PrivateStreamInfo *privStream =
+                static_cast<PrivateStreamInfo*>((*s)->priv);
+        if (!privStream->alive) {
+            (*s)->priv = NULL;
+            delete privStream;
+            s = mStreams.erase(s);
+        } else {
+            ++s;
+        }
+    }
+
+    /**
+     * Can't reuse settings across configure call
+     */
+    mPrevSettings.clear();
+
+    return OK;
+}
+
+status_t EmulatedFakeRotatingCamera3::registerStreamBuffers(
+        const camera3_stream_buffer_set *bufferSet) {
+    ALOGV("%s: E", __FUNCTION__);
+    Mutex::Autolock l(mLock);
+
+    // Should not be called in HAL versions >= 3.2
+
+    ALOGE("%s: Should not be invoked on new HALs!",
+            __FUNCTION__);
+    return NO_INIT;
+}
+
+const camera_metadata_t* EmulatedFakeRotatingCamera3::constructDefaultRequestSettings(
+        int type) {
+    ALOGV("%s: E", __FUNCTION__);
+    Mutex::Autolock l(mLock);
+
+    if (type < 0 || type >= CAMERA3_TEMPLATE_COUNT) {
+        ALOGE("%s: Unknown request settings template: %d",
+                __FUNCTION__, type);
+        return NULL;
+    }
+
+    if (!hasCapability(BACKWARD_COMPATIBLE) && type != CAMERA3_TEMPLATE_PREVIEW) {
+        ALOGE("%s: Template %d not supported w/o BACKWARD_COMPATIBLE capability",
+                __FUNCTION__, type);
+        return NULL;
+    }
+
+    /**
+     * Cache is not just an optimization - pointer returned has to live at
+     * least as long as the camera device instance does.
+     */
+    if (mDefaultTemplates[type] != NULL) {
+        return mDefaultTemplates[type];
+    }
+
+    CameraMetadata settings;
+
+    /** android.request */
+
+    static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
+    settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
+
+    static const int32_t requestId = 0;
+    settings.update(ANDROID_REQUEST_ID, &requestId, 1);
+
+    static const int32_t frameCount = 0;
+    settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
+
+    /** android.lens */
+
+    static const float focalLength = 5.0f;
+    settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const float focusDistance = 0;
+        settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
+
+        static const float aperture = 2.8f;
+        settings.update(ANDROID_LENS_APERTURE, &aperture, 1);
+
+        static const float filterDensity = 0;
+        settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
+
+        static const uint8_t opticalStabilizationMode =
+                ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
+        settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
+                &opticalStabilizationMode, 1);
+
+        // FOCUS_RANGE set only in frame
+    }
+
+    /** android.sensor */
+
+    if (hasCapability(MANUAL_SENSOR)) {
+        const int64_t exposureTime = 10 * MSEC;
+        settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
+
+        const int64_t frameDuration = 33333333L; // 1/30 s
+        settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
+
+        const int32_t sensitivity = 100;
+        settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
+    }
+
+    // TIMESTAMP set only in frame
+
+    /** android.flash */
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
+        settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
+
+        static const uint8_t flashPower = 10;
+        settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
+
+        static const int64_t firingTime = 0;
+        settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
+    }
+
+    /** Processing block modes */
+    if (hasCapability(MANUAL_POST_PROCESSING)) {
+        uint8_t hotPixelMode = 0;
+        uint8_t demosaicMode = 0;
+        uint8_t noiseMode = 0;
+        uint8_t shadingMode = 0;
+        uint8_t colorMode = 0;
+        uint8_t tonemapMode = 0;
+        uint8_t edgeMode = 0;
+        switch (type) {
+            case CAMERA3_TEMPLATE_STILL_CAPTURE:
+                // fall-through
+            case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
+                // fall-through
+            case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
+                hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
+                demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY;
+                noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
+                shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY;
+                colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
+                tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
+                edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY;
+                break;
+            case CAMERA3_TEMPLATE_PREVIEW:
+                // fall-through
+            case CAMERA3_TEMPLATE_VIDEO_RECORD:
+                // fall-through
+            default:
+                hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
+                demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
+                noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST;
+                shadingMode = ANDROID_SHADING_MODE_FAST;
+                colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
+                tonemapMode = ANDROID_TONEMAP_MODE_FAST;
+                edgeMode = ANDROID_EDGE_MODE_FAST;
+                break;
+        }
+        settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
+        settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
+        settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1);
+        settings.update(ANDROID_SHADING_MODE, &shadingMode, 1);
+        settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
+        settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
+        settings.update(ANDROID_EDGE_MODE, &edgeMode, 1);
+    }
+
+    /** android.colorCorrection */
+
+    if (hasCapability(MANUAL_POST_PROCESSING)) {
+        static const camera_metadata_rational colorTransform[9] = {
+            {1,1}, {0,1}, {0,1},
+            {0,1}, {1,1}, {0,1},
+            {0,1}, {0,1}, {1,1}
+        };
+        settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
+
+        static const float colorGains[4] = {
+            1.0f, 1.0f, 1.0f, 1.0f
+        };
+        settings.update(ANDROID_COLOR_CORRECTION_GAINS, colorGains, 4);
+    }
+
+    /** android.tonemap */
+
+    if (hasCapability(MANUAL_POST_PROCESSING)) {
+        static const float tonemapCurve[4] = {
+            0.f, 0.f,
+            1.f, 1.f
+        };
+        settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
+        settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
+        settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
+    }
+
+    /** android.scaler */
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        const int32_t cropRegion[4] = {
+            0, 0, mSensorWidth, mSensorHeight
+        };
+        settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4);
+    }
+
+    /** android.jpeg */
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const uint8_t jpegQuality = 80;
+        settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
+
+        static const int32_t thumbnailSize[2] = {
+            320, 240
+        };
+        settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
+
+        static const uint8_t thumbnailQuality = 80;
+        settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
+
+        static const double gpsCoordinates[3] = {
+            0, 0, 0
+        };
+        settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3);
+
+        static const uint8_t gpsProcessingMethod[32] = "None";
+        settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
+
+        static const int64_t gpsTimestamp = 0;
+        settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
+
+        static const int32_t jpegOrientation = 0;
+        settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
+    }
+
+    /** android.stats */
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const uint8_t faceDetectMode =
+                ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
+        settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
+
+        static const uint8_t hotPixelMapMode =
+                ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
+        settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
+    }
+
+    // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
+    // sharpnessMap only in frames
+
+    /** android.control */
+
+    uint8_t controlIntent = 0;
+    switch (type) {
+      case CAMERA3_TEMPLATE_PREVIEW:
+        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
+        break;
+      case CAMERA3_TEMPLATE_STILL_CAPTURE:
+        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
+        break;
+      case CAMERA3_TEMPLATE_VIDEO_RECORD:
+        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
+        break;
+      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
+        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
+        break;
+      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
+        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
+        break;
+      case CAMERA3_TEMPLATE_MANUAL:
+        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
+        break;
+      default:
+        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
+        break;
+    }
+    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
+
+    const uint8_t controlMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
+            ANDROID_CONTROL_MODE_OFF :
+            ANDROID_CONTROL_MODE_AUTO;
+    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
+
+    int32_t aeTargetFpsRange[2] = {
+        15, 30
+    };
+    if (type == CAMERA3_TEMPLATE_VIDEO_RECORD || type == CAMERA3_TEMPLATE_VIDEO_SNAPSHOT) {
+        aeTargetFpsRange[0] = 30;
+    }
+    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+
+        static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
+        settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
+
+        const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
+        settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
+
+        const uint8_t aeMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
+                ANDROID_CONTROL_AE_MODE_OFF :
+                ANDROID_CONTROL_AE_MODE_ON;
+        settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
+
+        static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
+        settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
+
+        static const int32_t controlRegions[5] = {
+            0, 0, 0, 0, 0
+        };
+        settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
+
+        static const int32_t aeExpCompensation = 0;
+        settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
+
+
+        static const uint8_t aeAntibandingMode =
+                ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
+        settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
+
+        static const uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
+        settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1);
+
+        const uint8_t awbMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
+                ANDROID_CONTROL_AWB_MODE_OFF :
+                ANDROID_CONTROL_AWB_MODE_AUTO;
+        settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
+
+        static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
+        settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
+
+        uint8_t afMode = 0;
+
+        {
+            switch (type) {
+                case CAMERA3_TEMPLATE_PREVIEW:
+                    afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
+                    break;
+                case CAMERA3_TEMPLATE_STILL_CAPTURE:
+                    afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
+                    break;
+                case CAMERA3_TEMPLATE_VIDEO_RECORD:
+                    afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
+                    break;
+                case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
+                    afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
+                    break;
+                case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
+                    afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
+                    break;
+                case CAMERA3_TEMPLATE_MANUAL:
+                    afMode = ANDROID_CONTROL_AF_MODE_OFF;
+                    break;
+                default:
+                    afMode = ANDROID_CONTROL_AF_MODE_AUTO;
+                    break;
+            }
+        }
+
+        settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1);
+
+        settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
+
+        const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
+        settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1);
+
+        static const uint8_t vstabMode =
+                ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
+        settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
+
+        static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
+        settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
+
+        static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
+        settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMapMode, 1);
+
+        uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
+        if (type == CAMERA3_TEMPLATE_STILL_CAPTURE) {
+            aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
+        }
+        settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &aberrationMode, 1);
+
+        static const int32_t testPatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
+        settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPatternMode, 1);
+    }
+
+    mDefaultTemplates[type] = settings.release();
+
+    return mDefaultTemplates[type];
+}
+
+status_t EmulatedFakeRotatingCamera3::processCaptureRequest(
+        camera3_capture_request *request) {
+
+    Mutex::Autolock l(mLock);
+    status_t res;
+
+    /** Validation */
+
+    if (mStatus < STATUS_READY) {
+        ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__,
+                mStatus);
+        return INVALID_OPERATION;
+    }
+
+    if (request == NULL) {
+        ALOGE("%s: NULL request!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    uint32_t frameNumber = request->frame_number;
+
+    if (request->settings == NULL && mPrevSettings.isEmpty()) {
+        ALOGE("%s: Request %d: NULL settings for first request after"
+                "configureStreams()", __FUNCTION__, frameNumber);
+        return BAD_VALUE;
+    }
+
+    if (request->input_buffer != NULL &&
+            request->input_buffer->stream != mInputStream) {
+        ALOGE("%s: Request %d: Input buffer not from input stream!",
+                __FUNCTION__, frameNumber);
+        ALOGV("%s: Bad stream %p, expected: %p",
+              __FUNCTION__, request->input_buffer->stream,
+              mInputStream);
+        ALOGV("%s: Bad stream type %d, expected stream type %d",
+              __FUNCTION__, request->input_buffer->stream->stream_type,
+              mInputStream ? mInputStream->stream_type : -1);
+
+        return BAD_VALUE;
+    }
+
+    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
+        ALOGE("%s: Request %d: No output buffers provided!",
+                __FUNCTION__, frameNumber);
+        return BAD_VALUE;
+    }
+
+    // Validate all buffers, starting with input buffer if it's given
+
+    ssize_t idx;
+    const camera3_stream_buffer_t *b;
+    if (request->input_buffer != NULL) {
+        idx = -1;
+        b = request->input_buffer;
+    } else {
+        idx = 0;
+        b = request->output_buffers;
+    }
+    do {
+        PrivateStreamInfo *priv =
+                static_cast<PrivateStreamInfo*>(b->stream->priv);
+        if (priv == NULL) {
+            ALOGE("%s: Request %d: Buffer %zu: Unconfigured stream!",
+                    __FUNCTION__, frameNumber, idx);
+            return BAD_VALUE;
+        }
+        if (!priv->alive) {
+            ALOGE("%s: Request %d: Buffer %zu: Dead stream!",
+                    __FUNCTION__, frameNumber, idx);
+            return BAD_VALUE;
+        }
+        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
+            ALOGE("%s: Request %d: Buffer %zu: Status not OK!",
+                    __FUNCTION__, frameNumber, idx);
+            return BAD_VALUE;
+        }
+        if (b->release_fence != -1) {
+            ALOGE("%s: Request %d: Buffer %zu: Has a release fence!",
+                    __FUNCTION__, frameNumber, idx);
+            return BAD_VALUE;
+        }
+        if (b->buffer == NULL) {
+            ALOGE("%s: Request %d: Buffer %zu: NULL buffer handle!",
+                    __FUNCTION__, frameNumber, idx);
+            return BAD_VALUE;
+        }
+        idx++;
+        b = &(request->output_buffers[idx]);
+    } while (idx < (ssize_t)request->num_output_buffers);
+
+    // TODO: Validate settings parameters
+
+    /**
+     * Start processing this request
+     */
+
+    mStatus = STATUS_ACTIVE;
+
+    CameraMetadata settings;
+
+    if (request->settings == NULL) {
+        settings.acquire(mPrevSettings);
+    } else {
+        settings = request->settings;
+    }
+
+    res = process3A(settings);
+    if (res != OK) {
+        return res;
+    }
+
+    // TODO: Handle reprocessing
+
+    /**
+     * Get ready for sensor config
+     */
+
+    nsecs_t  exposureTime;
+    nsecs_t  frameDuration;
+    uint32_t sensitivity;
+    bool     needJpeg = false;
+    camera_metadata_entry_t entry;
+    entry = settings.find(ANDROID_SENSOR_EXPOSURE_TIME);
+    exposureTime = (entry.count > 0) ? entry.data.i64[0] : Sensor::kExposureTimeRange[0];
+    entry = settings.find(ANDROID_SENSOR_FRAME_DURATION);
+    frameDuration = (entry.count > 0)? entry.data.i64[0] : Sensor::kFrameDurationRange[0];
+    entry = settings.find(ANDROID_SENSOR_SENSITIVITY);
+    sensitivity = (entry.count > 0) ? entry.data.i32[0] : Sensor::kSensitivityRange[0];
+
+    if (exposureTime > frameDuration) {
+        frameDuration = exposureTime + Sensor::kMinVerticalBlank;
+        settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
+    }
+
+    Buffers *sensorBuffers = new Buffers();
+    HalBufferVector *buffers = new HalBufferVector();
+
+    sensorBuffers->setCapacity(request->num_output_buffers);
+    buffers->setCapacity(request->num_output_buffers);
+
+    // Process all the buffers we got for output, constructing internal buffer
+    // structures for them, and lock them for writing.
+    for (size_t i = 0; i < request->num_output_buffers; i++) {
+        const camera3_stream_buffer &srcBuf = request->output_buffers[i];
+        StreamBuffer destBuf;
+        destBuf.streamId = kGenericStreamId;
+        destBuf.width    = srcBuf.stream->width;
+        destBuf.height   = srcBuf.stream->height;
+        // inline with goldfish gralloc
+        if (srcBuf.stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+            if (srcBuf.stream->usage & GRALLOC_USAGE_HW_CAMERA_WRITE) {
+                if (srcBuf.stream->usage & GRALLOC_USAGE_HW_TEXTURE) {
+                    destBuf.format = HAL_PIXEL_FORMAT_YCbCr_420_888;
+                }
+                else if (srcBuf.stream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) {
+                    destBuf.format = HAL_PIXEL_FORMAT_YCbCr_420_888;
+                }
+                else if ((srcBuf.stream->usage & GRALLOC_USAGE_HW_CAMERA_MASK)
+                         == GRALLOC_USAGE_HW_CAMERA_ZSL) {
+                    destBuf.format = HAL_PIXEL_FORMAT_RGB_888;
+                }
+            }
+        }
+        else {
+            destBuf.format = srcBuf.stream->format;
+        }
+        destBuf.stride   = srcBuf.stream->width;
+        destBuf.dataSpace = srcBuf.stream->data_space;
+        destBuf.buffer   = srcBuf.buffer;
+
+        if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) {
+            needJpeg = true;
+        }
+
+        // Wait on fence
+        sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence);
+        res = bufferAcquireFence->wait(kFenceTimeoutMs);
+        if (res == TIMED_OUT) {
+            ALOGE("%s: Request %d: Buffer %zu: Fence timed out after %d ms",
+                    __FUNCTION__, frameNumber, i, kFenceTimeoutMs);
+        }
+        if (res == OK) {
+            // Lock buffer for writing
+            if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
+                if (destBuf.format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
+                    android_ycbcr ycbcr = {};
+                    res = mGBM->lockYCbCr(
+                        *(destBuf.buffer),
+                        GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+                        Rect(0, 0, destBuf.width, destBuf.height),
+                        &ycbcr);
+                    // This is only valid because we know that emulator's
+                    // YCbCr_420_888 is really contiguous NV21 under the hood
+                    destBuf.img = static_cast<uint8_t*>(ycbcr.y);
+                } else {
+                    ALOGE("Unexpected private format for flexible YUV: 0x%x",
+                            destBuf.format);
+                    res = INVALID_OPERATION;
+                }
+            } else {
+                res = mGBM->lock(
+                    *(destBuf.buffer),
+                    GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN,
+                    Rect(0, 0, destBuf.width, destBuf.height),
+                    (void**)&(destBuf.img));
+
+            }
+            if (res != OK) {
+                ALOGE("%s: Request %d: Buffer %zu: Unable to lock buffer",
+                        __FUNCTION__, frameNumber, i);
+            } else {
+                ALOGV("%s, stream format 0x%x width %d height %d buffer 0x%p img 0x%p",
+                  __FUNCTION__, destBuf.format, destBuf.width, destBuf.height,
+                  destBuf.buffer, destBuf.img);
+            }
+        }
+
+        if (res != OK) {
+            // Either waiting or locking failed. Unlock locked buffers and bail
+            // out.
+            for (size_t j = 0; j < i; j++) {
+                mGBM->unlock(*(request->output_buffers[i].buffer));
+            }
+            delete sensorBuffers;
+            delete buffers;
+            return NO_INIT;
+        }
+
+        sensorBuffers->push_back(destBuf);
+        buffers->push_back(srcBuf);
+    }
+
+    /**
+     * Wait for JPEG compressor to not be busy, if needed
+     */
+    if (needJpeg) {
+        bool ready = mJpegCompressor->waitForDone(kJpegTimeoutNs);
+        if (!ready) {
+            ALOGE("%s: Timeout waiting for JPEG compression to complete!",
+                    __FUNCTION__);
+            return NO_INIT;
+        }
+        res = mJpegCompressor->reserve();
+        if (res != OK) {
+            ALOGE("%s: Error managing JPEG compressor resources, can't reserve it!", __FUNCTION__);
+            return NO_INIT;
+        }
+    }
+
+    /**
+     * Wait until the in-flight queue has room
+     */
+    res = mReadoutThread->waitForReadout();
+    if (res != OK) {
+        ALOGE("%s: Timeout waiting for previous requests to complete!",
+                __FUNCTION__);
+        return NO_INIT;
+    }
+
+    /**
+     * Wait until sensor's ready. This waits for lengthy amounts of time with
+     * mLock held, but the interface spec is that no other calls may by done to
+     * the HAL by the framework while process_capture_request is happening.
+     */
+    int syncTimeoutCount = 0;
+    while(!mSensor->waitForVSync(kSyncWaitTimeout)) {
+        if (mStatus == STATUS_ERROR) {
+            return NO_INIT;
+        }
+        if (syncTimeoutCount == kMaxSyncTimeoutCount) {
+            ALOGE("%s: Request %d: Sensor sync timed out after %" PRId64 " ms",
+                    __FUNCTION__, frameNumber,
+                    kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000);
+            return NO_INIT;
+        }
+        syncTimeoutCount++;
+    }
+
+    /**
+     * Configure sensor and queue up the request to the readout thread
+     */
+    mSensor->setExposureTime(exposureTime);
+    mSensor->setFrameDuration(frameDuration);
+    mSensor->setSensitivity(sensitivity);
+    mSensor->setDestinationBuffers(sensorBuffers);
+    mSensor->setFrameNumber(request->frame_number);
+
+    ReadoutThread::Request r;
+    r.frameNumber = request->frame_number;
+    r.settings = settings;
+    r.sensorBuffers = sensorBuffers;
+    r.buffers = buffers;
+
+    mReadoutThread->queueCaptureRequest(r);
+    ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number);
+
+    // Cache the settings for next time
+    mPrevSettings.acquire(settings);
+
+    return OK;
+}
+
+status_t EmulatedFakeRotatingCamera3::flush() {
+    ALOGW("%s: Not implemented; ignored", __FUNCTION__);
+    return OK;
+}
+
+/** Debug methods */
+
+void EmulatedFakeRotatingCamera3::dump(int fd) {
+
+}
+
+/**
+ * Private methods
+ */
+
+status_t EmulatedFakeRotatingCamera3::getCameraCapabilities() {
+
+    const char *key = mFacingBack ? "qemu.sf.back_camera_caps" : "qemu.sf.front_camera_caps";
+
+    /* Defined by 'qemu.sf.*_camera_caps' boot property: if the
+     * property doesn't exist, it is assumed to list FULL. */
+    char prop[PROPERTY_VALUE_MAX];
+    if (property_get(key, prop, NULL) > 0) {
+        char *saveptr = nullptr;
+        char *cap = strtok_r(prop, " ,", &saveptr);
+        while (cap != NULL) {
+            for (int i = 0; i < NUM_CAPABILITIES; i++) {
+                if (!strcasecmp(cap, sAvailableCapabilitiesStrings[i])) {
+                    mCapabilities.add(static_cast<AvailableCapabilities>(i));
+                    break;
+                }
+            }
+            cap = strtok_r(NULL, " ,", &saveptr);
+        }
+        if (mCapabilities.size() == 0) {
+            ALOGE("qemu.sf.back_camera_caps had no valid capabilities: %s", prop);
+        }
+    }
+    // Default to FULL_LEVEL plus RAW if nothing is defined
+    if (mCapabilities.size() == 0) {
+        mCapabilities.add(FULL_LEVEL);
+        // "RAW" causes several CTS failures: b/68723953, disable it so far.
+        // TODO: add "RAW" back when all failures are resolved.
+        mCapabilities.add(RAW);
+        mCapabilities.add(MOTION_TRACKING);
+    }
+
+    // Add level-based caps
+    if (hasCapability(FULL_LEVEL)) {
+        mCapabilities.add(BURST_CAPTURE);
+        mCapabilities.add(READ_SENSOR_SETTINGS);
+        mCapabilities.add(MANUAL_SENSOR);
+        mCapabilities.add(MANUAL_POST_PROCESSING);
+    };
+
+    // Backwards-compatible is required for most other caps
+    // Not required for DEPTH_OUTPUT, though.
+    if (hasCapability(BURST_CAPTURE) ||
+            hasCapability(READ_SENSOR_SETTINGS) ||
+            hasCapability(RAW) ||
+            hasCapability(MANUAL_SENSOR) ||
+            hasCapability(MANUAL_POST_PROCESSING) ||
+            hasCapability(PRIVATE_REPROCESSING) ||
+            hasCapability(YUV_REPROCESSING) ||
+            hasCapability(CONSTRAINED_HIGH_SPEED_VIDEO)) {
+        mCapabilities.add(BACKWARD_COMPATIBLE);
+    }
+
+    ALOGI("Camera %d capabilities:", mCameraID);
+    for (size_t i = 0; i < mCapabilities.size(); i++) {
+        ALOGI("  %s", sAvailableCapabilitiesStrings[mCapabilities[i]]);
+    }
+
+    return OK;
+}
+
+bool EmulatedFakeRotatingCamera3::hasCapability(AvailableCapabilities cap) {
+    ssize_t idx = mCapabilities.indexOf(cap);
+    return idx >= 0;
+}
+
+status_t EmulatedFakeRotatingCamera3::constructStaticInfo() {
+
+    CameraMetadata info;
+    Vector<int32_t> availableCharacteristicsKeys;
+    status_t res;
+
+    // Find max width/height
+    int32_t width = 0, height = 0;
+    size_t rawSizeCount = sizeof(kAvailableRawSizes)/sizeof(kAvailableRawSizes[0]);
+    for (size_t index = 0; index + 1 < rawSizeCount; index += 2) {
+        if (width <= (int32_t)kAvailableRawSizes[index] &&
+            height <= (int32_t)kAvailableRawSizes[index+1]) {
+            width = kAvailableRawSizes[index];
+            height = kAvailableRawSizes[index+1];
+        }
+    }
+
+    if (width < 640 || height < 480) {
+        width = 640;
+        height = 480;
+    }
+    mSensorWidth = width;
+    mSensorHeight = height;
+
+#define ADD_STATIC_ENTRY(name, varptr, count) \
+        availableCharacteristicsKeys.add(name);   \
+        res = info.update(name, varptr, count); \
+        if (res != OK) return res
+
+    // android.sensor
+
+    if (hasCapability(MANUAL_SENSOR)) {
+
+        ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
+                Sensor::kExposureTimeRange, 2);
+
+        ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
+                &Sensor::kFrameDurationRange[1], 1);
+
+        ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
+                Sensor::kSensitivityRange,
+                sizeof(Sensor::kSensitivityRange)
+                /sizeof(int32_t));
+
+        ADD_STATIC_ENTRY(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
+                &Sensor::kSensitivityRange[1], 1);
+    }
+
+    static const uint8_t sensorColorFilterArrangement =
+        ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
+    ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
+            &sensorColorFilterArrangement, 1);
+
+    static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
+    ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
+            sensorPhysicalSize, 2);
+
+    const int32_t pixelArray[] = {mSensorWidth, mSensorHeight};
+    ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
+            pixelArray, 2);
+    const int32_t activeArray[] = {0, 0, mSensorWidth, mSensorHeight};
+    ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
+            activeArray, 4);
+
+    static const int32_t orientation = 90; // Aligned with 'long edge'
+    ADD_STATIC_ENTRY(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
+
+    static const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
+    ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, &timestampSource, 1);
+
+    if (hasCapability(RAW) || hasCapability(MANUAL_SENSOR)) {
+        ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_WHITE_LEVEL,
+                (int32_t*)&Sensor::kMaxRawValue, 1);
+
+        static const int32_t blackLevelPattern[4] = {
+            (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel,
+            (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel
+        };
+        ADD_STATIC_ENTRY(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
+                blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t));
+    }
+
+    if (hasCapability(RAW)) {
+        ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
+                &Sensor::kColorFilterArrangement, 1);
+    }
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const int32_t availableTestPatternModes[] = {
+            ANDROID_SENSOR_TEST_PATTERN_MODE_OFF
+        };
+        ADD_STATIC_ENTRY(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
+                availableTestPatternModes, sizeof(availableTestPatternModes)/sizeof(int32_t));
+    }
+
+    // android.lens
+    static const float focalLengths = 5.0f;
+    ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
+            &focalLengths, 1);
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        // 5 cm min focus distance for back camera, infinity (fixed focus) for front
+        const float minFocusDistance = 1.0/0.05;
+        ADD_STATIC_ENTRY(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
+                &minFocusDistance, 1);
+
+        // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
+        const float hyperFocalDistance = 1.0/5.0;
+        ADD_STATIC_ENTRY(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
+                &hyperFocalDistance, 1);
+
+        static const float apertures = 2.8f;
+        ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
+                &apertures, 1);
+        static const float filterDensities = 0;
+        ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
+                &filterDensities, 1);
+        static const uint8_t availableOpticalStabilization =
+                ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
+        ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
+                &availableOpticalStabilization, 1);
+
+        static const int32_t lensShadingMapSize[] = {1, 1};
+        ADD_STATIC_ENTRY(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
+                sizeof(lensShadingMapSize)/sizeof(int32_t));
+
+        static const uint8_t lensFocusCalibration =
+                ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE;
+        ADD_STATIC_ENTRY(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, &lensFocusCalibration, 1);
+    }
+
+    if (hasCapability(DEPTH_OUTPUT)) {
+        // These could be included for non-DEPTH capability as well, but making this variable for
+        // testing coverage
+
+        // 90 degree rotation to align with long edge of a phone device that's by default portrait
+        static const float qO[] = { 0.707107f, 0.f, 0.f, 0.707107f};
+
+        const float qF[] = {0, 1.f, 0, 0.f};
+
+        // Quarternion product, orientation change then facing
+        const float lensPoseRotation[] = {qO[0]*qF[0] - qO[1]*qF[1] - qO[2]*qF[2] - qO[3]*qF[3],
+                                          qO[0]*qF[1] + qO[1]*qF[0] + qO[2]*qF[3] - qO[3]*qF[2],
+                                          qO[0]*qF[2] + qO[2]*qF[0] + qO[1]*qF[3] - qO[3]*qF[1],
+                                          qO[0]*qF[3] + qO[3]*qF[0] + qO[1]*qF[2] - qO[2]*qF[1]};
+
+        ADD_STATIC_ENTRY(ANDROID_LENS_POSE_ROTATION, lensPoseRotation,
+                sizeof(lensPoseRotation)/sizeof(float));
+
+        // Only one camera facing each way, so 0 translation needed to the center of the 'main'
+        // camera
+        static const float lensPoseTranslation[] = {0.f, 0.f, 0.f};
+
+        ADD_STATIC_ENTRY(ANDROID_LENS_POSE_TRANSLATION, lensPoseTranslation,
+                sizeof(lensPoseTranslation)/sizeof(float));
+
+        // Intrinsics are 'ideal' (f_x, f_y, c_x, c_y, s) match focal length and active array size
+        float f_x = focalLengths * mSensorWidth / sensorPhysicalSize[0];
+        float f_y = focalLengths * mSensorHeight / sensorPhysicalSize[1];
+        float c_x = mSensorWidth / 2.f;
+        float c_y = mSensorHeight / 2.f;
+        float s = 0.f;
+        const float lensIntrinsics[] = { f_x, f_y, c_x, c_y, s };
+
+        ADD_STATIC_ENTRY(ANDROID_LENS_INTRINSIC_CALIBRATION, lensIntrinsics,
+                sizeof(lensIntrinsics)/sizeof(float));
+
+        // No radial or tangential distortion
+
+        float lensRadialDistortion[] = {1.0f, 0.f, 0.f, 0.f, 0.f, 0.f};
+
+        ADD_STATIC_ENTRY(ANDROID_LENS_RADIAL_DISTORTION, lensRadialDistortion,
+                sizeof(lensRadialDistortion)/sizeof(float));
+
+    }
+
+
+    const uint8_t lensFacing = mFacingBack ?
+            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
+    ADD_STATIC_ENTRY(ANDROID_LENS_FACING, &lensFacing, 1);
+
+    // android.flash
+
+    static const uint8_t flashAvailable = 1;
+    ADD_STATIC_ENTRY(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
+
+    // android.hotPixel
+
+    if (hasCapability(MANUAL_POST_PROCESSING)) {
+        static const uint8_t availableHotPixelModes[] = {
+            ANDROID_HOT_PIXEL_MODE_FAST, ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY
+        };
+        ADD_STATIC_ENTRY(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
+                availableHotPixelModes, sizeof(availableHotPixelModes));
+    }
+
+    // android.tonemap
+
+    if (hasCapability(MANUAL_POST_PROCESSING)) {
+        static const int32_t tonemapCurvePoints = 128;
+        ADD_STATIC_ENTRY(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
+
+        static const uint8_t availableToneMapModes[] = {
+            ANDROID_TONEMAP_MODE_CONTRAST_CURVE,  ANDROID_TONEMAP_MODE_FAST,
+            ANDROID_TONEMAP_MODE_HIGH_QUALITY
+        };
+        ADD_STATIC_ENTRY(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES, availableToneMapModes,
+                sizeof(availableToneMapModes));
+    }
+
+    // android.scaler
+
+    const std::vector<int32_t> availableStreamConfigurationsBasic = {
+        HAL_PIXEL_FORMAT_BLOB, width, height, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 1280, 720, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+        HAL_PIXEL_FORMAT_YCbCr_420_888, 1280, 720, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+        HAL_PIXEL_FORMAT_BLOB, 1280, 720, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+        HAL_PIXEL_FORMAT_YCbCr_420_888, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+        HAL_PIXEL_FORMAT_BLOB, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 320, 240, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+        HAL_PIXEL_FORMAT_YCbCr_420_888, 320, 240, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+        HAL_PIXEL_FORMAT_BLOB, 320, 240, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 176, 144, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+        HAL_PIXEL_FORMAT_YCbCr_420_888, 176, 144, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+        HAL_PIXEL_FORMAT_BLOB, 176, 144, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+    };
+
+    const std::vector<int32_t> availableStreamConfigurationsRaw = {
+        HAL_PIXEL_FORMAT_RAW16, width, height, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+    };
+
+    const std::vector<int32_t> availableStreamConfigurationsBurst = {
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, width, height, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+        HAL_PIXEL_FORMAT_YCbCr_420_888, width, height, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+        HAL_PIXEL_FORMAT_RGBA_8888, width, height, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+    };
+
+    std::vector<int32_t> availableStreamConfigurations;
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        availableStreamConfigurations.insert(availableStreamConfigurations.end(),
+                availableStreamConfigurationsBasic.begin(),
+                availableStreamConfigurationsBasic.end());
+    }
+    if (hasCapability(RAW)) {
+        availableStreamConfigurations.insert(availableStreamConfigurations.end(),
+                availableStreamConfigurationsRaw.begin(),
+                availableStreamConfigurationsRaw.end());
+    }
+    if (hasCapability(BURST_CAPTURE)) {
+        availableStreamConfigurations.insert(availableStreamConfigurations.end(),
+                availableStreamConfigurationsBurst.begin(),
+                availableStreamConfigurationsBurst.end());
+    }
+
+    if (availableStreamConfigurations.size() > 0) {
+        ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+                &availableStreamConfigurations[0],
+                availableStreamConfigurations.size());
+    }
+
+    const std::vector<int64_t> availableMinFrameDurationsBasic = {
+        HAL_PIXEL_FORMAT_BLOB, width, height, Sensor::kFrameDurationRange[0],
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 1280, 720, Sensor::kFrameDurationRange[0],
+        HAL_PIXEL_FORMAT_YCbCr_420_888, 1280, 720, Sensor::kFrameDurationRange[0],
+        HAL_PIXEL_FORMAT_BLOB, 1280, 720, Sensor::kFrameDurationRange[0],
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, Sensor::kFrameDurationRange[0],
+        HAL_PIXEL_FORMAT_YCbCr_420_888, 640, 480, Sensor::kFrameDurationRange[0],
+        HAL_PIXEL_FORMAT_BLOB, 640, 480, Sensor::kFrameDurationRange[0],
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 320, 240, Sensor::kFrameDurationRange[0],
+        HAL_PIXEL_FORMAT_YCbCr_420_888, 320, 240, Sensor::kFrameDurationRange[0],
+        HAL_PIXEL_FORMAT_BLOB, 320, 240, Sensor::kFrameDurationRange[0],
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 176, 144, Sensor::kFrameDurationRange[0],
+        HAL_PIXEL_FORMAT_YCbCr_420_888, 176, 144, Sensor::kFrameDurationRange[0],
+        HAL_PIXEL_FORMAT_BLOB, 176, 144, Sensor::kFrameDurationRange[0],
+    };
+
+    const std::vector<int64_t> availableMinFrameDurationsRaw = {
+        HAL_PIXEL_FORMAT_RAW16, width, height, Sensor::kFrameDurationRange[0],
+    };
+
+    const std::vector<int64_t> availableMinFrameDurationsBurst = {
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, width, height, Sensor::kFrameDurationRange[0],
+        HAL_PIXEL_FORMAT_YCbCr_420_888, width, height, Sensor::kFrameDurationRange[0],
+        HAL_PIXEL_FORMAT_RGBA_8888, width, height, Sensor::kFrameDurationRange[0],
+    };
+
+    std::vector<int64_t> availableMinFrameDurations;
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        availableMinFrameDurations.insert(availableMinFrameDurations.end(),
+                availableMinFrameDurationsBasic.begin(),
+                availableMinFrameDurationsBasic.end());
+    }
+    if (hasCapability(RAW)) {
+        availableMinFrameDurations.insert(availableMinFrameDurations.end(),
+                availableMinFrameDurationsRaw.begin(),
+                availableMinFrameDurationsRaw.end());
+    }
+    if (hasCapability(BURST_CAPTURE)) {
+        availableMinFrameDurations.insert(availableMinFrameDurations.end(),
+                availableMinFrameDurationsBurst.begin(),
+                availableMinFrameDurationsBurst.end());
+    }
+
+    if (availableMinFrameDurations.size() > 0) {
+        ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
+                &availableMinFrameDurations[0],
+                availableMinFrameDurations.size());
+    }
+
+    const std::vector<int64_t> availableStallDurationsBasic = {
+        HAL_PIXEL_FORMAT_BLOB, width, height, Sensor::kFrameDurationRange[0],
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 1280, 720, 0,
+        HAL_PIXEL_FORMAT_YCbCr_420_888, 1280, 720, 0,
+        HAL_PIXEL_FORMAT_BLOB, 1280, 720, Sensor::kFrameDurationRange[0],
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, 0,
+        HAL_PIXEL_FORMAT_YCbCr_420_888, 640, 480, 0,
+        HAL_PIXEL_FORMAT_BLOB, 640, 480, Sensor::kFrameDurationRange[0],
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 320, 240, 0,
+        HAL_PIXEL_FORMAT_YCbCr_420_888, 320, 240, 0,
+        HAL_PIXEL_FORMAT_RGBA_8888, 320, 240, 0,
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 176, 144, 0,
+        HAL_PIXEL_FORMAT_YCbCr_420_888, 176, 144, 0,
+        HAL_PIXEL_FORMAT_RGBA_8888, 176, 144, 0,
+    };
+
+    const std::vector<int64_t> availableStallDurationsRaw = {
+        HAL_PIXEL_FORMAT_RAW16, width, height, Sensor::kFrameDurationRange[0]
+    };
+    const std::vector<int64_t> availableStallDurationsBurst = {
+        HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, width, height, 0,
+        HAL_PIXEL_FORMAT_YCbCr_420_888, width, height, 0,
+        HAL_PIXEL_FORMAT_RGBA_8888, width, height, 0
+    };
+
+    std::vector<int64_t> availableStallDurations;
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        availableStallDurations.insert(availableStallDurations.end(),
+                availableStallDurationsBasic.begin(),
+                availableStallDurationsBasic.end());
+    }
+    if (hasCapability(RAW)) {
+        availableStallDurations.insert(availableStallDurations.end(),
+                availableStallDurationsRaw.begin(),
+                availableStallDurationsRaw.end());
+    }
+    if (hasCapability(BURST_CAPTURE)) {
+        availableStallDurations.insert(availableStallDurations.end(),
+                availableStallDurationsBurst.begin(),
+                availableStallDurationsBurst.end());
+    }
+
+    if (availableStallDurations.size() > 0) {
+        ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
+                &availableStallDurations[0],
+                availableStallDurations.size());
+    }
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
+        ADD_STATIC_ENTRY(ANDROID_SCALER_CROPPING_TYPE,
+                &croppingType, 1);
+
+        static const float maxZoom = 10;
+        ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
+                &maxZoom, 1);
+    }
+
+    // android.jpeg
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const int32_t jpegThumbnailSizes[] = {
+            0, 0,
+            160, 120,
+            320, 180,
+            320, 240
+        };
+        ADD_STATIC_ENTRY(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+                jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t));
+
+        static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
+        ADD_STATIC_ENTRY(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
+    }
+
+    // android.stats
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const uint8_t availableFaceDetectModes[] = {
+            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
+            ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
+            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL
+        };
+        ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
+                availableFaceDetectModes,
+                sizeof(availableFaceDetectModes));
+
+        static const int32_t maxFaceCount = 8;
+        ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
+                &maxFaceCount, 1);
+
+
+        static const uint8_t availableShadingMapModes[] = {
+            ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF
+        };
+        ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
+                availableShadingMapModes, sizeof(availableShadingMapModes));
+    }
+
+    // android.sync
+
+    static const int32_t maxLatency =
+            hasCapability(FULL_LEVEL) ? ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL : 3;
+    ADD_STATIC_ENTRY(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1);
+
+    // android.control
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        const uint8_t availableControlModes[] = {
+            ANDROID_CONTROL_MODE_OFF, ANDROID_CONTROL_MODE_AUTO, ANDROID_CONTROL_MODE_USE_SCENE_MODE
+        };
+        ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES,
+                availableControlModes, sizeof(availableControlModes));
+    } else {
+        const uint8_t availableControlModes[] = {
+            ANDROID_CONTROL_MODE_AUTO
+        };
+        ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES,
+                availableControlModes, sizeof(availableControlModes));
+    }
+
+    const uint8_t availableSceneModes[] = {
+        hasCapability(BACKWARD_COMPATIBLE) ?
+            ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY :
+            ANDROID_CONTROL_SCENE_MODE_DISABLED
+    };
+    ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
+            availableSceneModes, sizeof(availableSceneModes));
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const uint8_t availableEffects[] = {
+            ANDROID_CONTROL_EFFECT_MODE_OFF
+        };
+        ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_EFFECTS,
+                availableEffects, sizeof(availableEffects));
+    }
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const int32_t max3aRegions[] = {/*AE*/ 1,/*AWB*/ 0,/*AF*/ 1};
+        ADD_STATIC_ENTRY(ANDROID_CONTROL_MAX_REGIONS,
+                max3aRegions, sizeof(max3aRegions)/sizeof(max3aRegions[0]));
+
+        static const uint8_t availableAeModes[] = {
+            ANDROID_CONTROL_AE_MODE_OFF,
+            ANDROID_CONTROL_AE_MODE_ON
+        };
+        ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_MODES,
+                availableAeModes, sizeof(availableAeModes));
+
+        static const camera_metadata_rational exposureCompensationStep = {
+            0, 3
+        };
+        ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_STEP,
+                &exposureCompensationStep, 1);
+
+        static const int32_t exposureCompensationRange[] = {0, 0};
+        ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
+                exposureCompensationRange,
+                sizeof(exposureCompensationRange)/sizeof(int32_t));
+    }
+
+    static const int32_t availableTargetFpsRanges[] = {
+        15, 30, 30, 30
+    };
+    ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
+            availableTargetFpsRanges,
+            sizeof(availableTargetFpsRanges)/sizeof(int32_t));
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const uint8_t availableAntibandingModes[] = {
+            ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
+            ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO
+        };
+        ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
+                availableAntibandingModes, sizeof(availableAntibandingModes));
+    }
+
+    const uint8_t aeLockAvailable = hasCapability(BACKWARD_COMPATIBLE) ?
+            ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
+
+    ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
+            &aeLockAvailable, 1);
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        static const uint8_t availableAwbModes[] = {
+            ANDROID_CONTROL_AWB_MODE_OFF,
+            ANDROID_CONTROL_AWB_MODE_AUTO,
+            ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
+            ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
+            ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
+            ANDROID_CONTROL_AWB_MODE_SHADE
+        };
+        ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
+                availableAwbModes, sizeof(availableAwbModes));
+    }
+
+    const uint8_t awbLockAvailable = hasCapability(BACKWARD_COMPATIBLE) ?
+            ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
+
+    ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
+            &awbLockAvailable, 1);
+
+    static const uint8_t availableAfModesBack[] = {
+            ANDROID_CONTROL_AF_MODE_OFF,
+            ANDROID_CONTROL_AF_MODE_AUTO,
+            ANDROID_CONTROL_AF_MODE_MACRO,
+            ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
+            ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE
+    };
+
+    static const uint8_t availableAfModesFront[] = {
+            ANDROID_CONTROL_AF_MODE_OFF
+    };
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES,
+                availableAfModesBack, sizeof(availableAfModesBack));
+    } else {
+        ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES,
+                availableAfModesFront, sizeof(availableAfModesFront));
+    }
+
+    static const uint8_t availableVstabModes[] = {
+        ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF
+    };
+    ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
+            availableVstabModes, sizeof(availableVstabModes));
+
+    // android.colorCorrection
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        const uint8_t availableAberrationModes[] = {
+            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
+            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
+            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY
+        };
+        ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
+                availableAberrationModes, sizeof(availableAberrationModes));
+    } else {
+        const uint8_t availableAberrationModes[] = {
+            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
+        };
+        ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
+                availableAberrationModes, sizeof(availableAberrationModes));
+    }
+    // android.edge
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        const uint8_t availableEdgeModes[] = {
+            ANDROID_EDGE_MODE_OFF, ANDROID_EDGE_MODE_FAST, ANDROID_EDGE_MODE_HIGH_QUALITY
+        };
+        ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
+                availableEdgeModes, sizeof(availableEdgeModes));
+    } else {
+        const uint8_t availableEdgeModes[] = {
+            ANDROID_EDGE_MODE_OFF
+        };
+        ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
+                availableEdgeModes, sizeof(availableEdgeModes));
+    }
+
+    // android.info
+
+    const uint8_t supportedHardwareLevel =
+            hasCapability(FULL_LEVEL) ? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL :
+                    ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
+    ADD_STATIC_ENTRY(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
+                &supportedHardwareLevel,
+                /*count*/1);
+
+    // android.noiseReduction
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        const uint8_t availableNoiseReductionModes[] = {
+            ANDROID_NOISE_REDUCTION_MODE_OFF,
+            ANDROID_NOISE_REDUCTION_MODE_FAST,
+            ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY
+        };
+        ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
+                availableNoiseReductionModes, sizeof(availableNoiseReductionModes));
+    } else {
+        const uint8_t availableNoiseReductionModes[] = {
+            ANDROID_NOISE_REDUCTION_MODE_OFF,
+        };
+        ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
+                availableNoiseReductionModes, sizeof(availableNoiseReductionModes));
+    }
+
+    // android.depth
+
+    if (hasCapability(DEPTH_OUTPUT)) {
+
+        static const int32_t maxDepthSamples = 100;
+        ADD_STATIC_ENTRY(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
+                &maxDepthSamples, 1);
+
+        static const int32_t availableDepthStreamConfigurations[] = {
+            HAL_PIXEL_FORMAT_Y16, 160, 120, ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT,
+            HAL_PIXEL_FORMAT_BLOB, maxDepthSamples,1, ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT
+        };
+        ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
+                availableDepthStreamConfigurations,
+                sizeof(availableDepthStreamConfigurations)/sizeof(int32_t));
+
+        static const int64_t availableDepthMinFrameDurations[] = {
+            HAL_PIXEL_FORMAT_Y16, 160, 120, Sensor::kFrameDurationRange[0],
+            HAL_PIXEL_FORMAT_BLOB, maxDepthSamples,1, Sensor::kFrameDurationRange[0]
+        };
+        ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
+                availableDepthMinFrameDurations,
+                sizeof(availableDepthMinFrameDurations)/sizeof(int64_t));
+
+        static const int64_t availableDepthStallDurations[] = {
+            HAL_PIXEL_FORMAT_Y16, 160, 120, Sensor::kFrameDurationRange[0],
+            HAL_PIXEL_FORMAT_BLOB, maxDepthSamples,1, Sensor::kFrameDurationRange[0]
+        };
+        ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
+                availableDepthStallDurations,
+                sizeof(availableDepthStallDurations)/sizeof(int64_t));
+
+        static const uint8_t depthIsExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
+        ADD_STATIC_ENTRY(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE,
+                &depthIsExclusive, 1);
+    }
+
+    // android.shading
+
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        const uint8_t availableShadingModes[] = {
+            ANDROID_SHADING_MODE_OFF, ANDROID_SHADING_MODE_FAST, ANDROID_SHADING_MODE_HIGH_QUALITY
+        };
+        ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
+                sizeof(availableShadingModes));
+    } else {
+        const uint8_t availableShadingModes[] = {
+            ANDROID_SHADING_MODE_OFF
+        };
+        ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
+                sizeof(availableShadingModes));
+    }
+
+    // android.request
+
+    static const int32_t maxNumOutputStreams[] = {
+            kMaxRawStreamCount, kMaxProcessedStreamCount, kMaxJpegStreamCount
+    };
+    ADD_STATIC_ENTRY(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, maxNumOutputStreams, 3);
+
+    static const uint8_t maxPipelineDepth = kMaxBufferCount;
+    ADD_STATIC_ENTRY(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &maxPipelineDepth, 1);
+
+    static const int32_t partialResultCount = 1;
+    ADD_STATIC_ENTRY(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
+            &partialResultCount, /*count*/1);
+
+    SortedVector<uint8_t> caps;
+    for (size_t i = 0; i < mCapabilities.size(); i++) {
+        switch(mCapabilities[i]) {
+            case BACKWARD_COMPATIBLE:
+                caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
+                break;
+            case MANUAL_SENSOR:
+                caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
+                break;
+            case MANUAL_POST_PROCESSING:
+                caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
+                break;
+            case RAW:
+                caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
+                break;
+            case PRIVATE_REPROCESSING:
+                caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
+                break;
+            case READ_SENSOR_SETTINGS:
+                caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
+                break;
+            case BURST_CAPTURE:
+                caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
+                break;
+            case YUV_REPROCESSING:
+                caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
+                break;
+            case DEPTH_OUTPUT:
+                caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT);
+                break;
+            case CONSTRAINED_HIGH_SPEED_VIDEO:
+                caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
+                break;
+            case MOTION_TRACKING:
+                caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING);
+                break;
+            default:
+                // Ignore LEVELs
+                break;
+        }
+    }
+    ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, caps.array(), caps.size());
+
+    // Scan a default request template for included request keys
+    Vector<int32_t> availableRequestKeys;
+    const camera_metadata_t *previewRequest =
+        constructDefaultRequestSettings(CAMERA3_TEMPLATE_PREVIEW);
+    for (size_t i = 0; i < get_camera_metadata_entry_count(previewRequest); i++) {
+        camera_metadata_ro_entry_t entry;
+        get_camera_metadata_ro_entry(previewRequest, i, &entry);
+        availableRequestKeys.add(entry.tag);
+    }
+    ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, availableRequestKeys.array(),
+            availableRequestKeys.size());
+
+    // Add a few more result keys. Must be kept up to date with the various places that add these
+
+    Vector<int32_t> availableResultKeys(availableRequestKeys);
+    if (hasCapability(BACKWARD_COMPATIBLE)) {
+        availableResultKeys.add(ANDROID_CONTROL_AE_STATE);
+        availableResultKeys.add(ANDROID_CONTROL_AF_STATE);
+        availableResultKeys.add(ANDROID_CONTROL_AWB_STATE);
+        availableResultKeys.add(ANDROID_FLASH_STATE);
+        availableResultKeys.add(ANDROID_LENS_STATE);
+        availableResultKeys.add(ANDROID_LENS_FOCUS_RANGE);
+        availableResultKeys.add(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW);
+        availableResultKeys.add(ANDROID_STATISTICS_SCENE_FLICKER);
+    }
+
+    if (hasCapability(DEPTH_OUTPUT)) {
+        availableResultKeys.add(ANDROID_LENS_POSE_ROTATION);
+        availableResultKeys.add(ANDROID_LENS_POSE_TRANSLATION);
+        availableResultKeys.add(ANDROID_LENS_INTRINSIC_CALIBRATION);
+        availableResultKeys.add(ANDROID_LENS_RADIAL_DISTORTION);
+    }
+
+    availableResultKeys.add(ANDROID_REQUEST_PIPELINE_DEPTH);
+    availableResultKeys.add(ANDROID_SENSOR_TIMESTAMP);
+
+    ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, availableResultKeys.array(),
+            availableResultKeys.size());
+
+    // Needs to be last, to collect all the keys set
+
+    availableCharacteristicsKeys.add(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
+    info.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
+            availableCharacteristicsKeys);
+
+    mCameraInfo = info.release();
+
+#undef ADD_STATIC_ENTRY
+    return OK;
+}
+
+status_t EmulatedFakeRotatingCamera3::process3A(CameraMetadata &settings) {
+    /**
+     * Extract top-level 3A controls
+     */
+    status_t res;
+
+    camera_metadata_entry e;
+
+    e = settings.find(ANDROID_CONTROL_MODE);
+    if (e.count == 0) {
+        ALOGE("%s: No control mode entry!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+    uint8_t controlMode = e.data.u8[0];
+
+    if (controlMode == ANDROID_CONTROL_MODE_OFF) {
+        mAeMode   = ANDROID_CONTROL_AE_MODE_OFF;
+        mAfMode   = ANDROID_CONTROL_AF_MODE_OFF;
+        mAwbMode  = ANDROID_CONTROL_AWB_MODE_OFF;
+        mAeState  = ANDROID_CONTROL_AE_STATE_INACTIVE;
+        mAfState  = ANDROID_CONTROL_AF_STATE_INACTIVE;
+        mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
+        update3A(settings);
+        return OK;
+    } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
+        if (!hasCapability(BACKWARD_COMPATIBLE)) {
+            ALOGE("%s: Can't use scene mode when BACKWARD_COMPATIBLE not supported!",
+                  __FUNCTION__);
+            return BAD_VALUE;
+        }
+
+        e = settings.find(ANDROID_CONTROL_SCENE_MODE);
+        if (e.count == 0) {
+            ALOGE("%s: No scene mode entry!", __FUNCTION__);
+            return BAD_VALUE;
+        }
+        uint8_t sceneMode = e.data.u8[0];
+
+        switch(sceneMode) {
+            case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
+                mFacePriority = true;
+                break;
+            default:
+                ALOGE("%s: Emulator doesn't support scene mode %d",
+                        __FUNCTION__, sceneMode);
+                return BAD_VALUE;
+        }
+    } else {
+        mFacePriority = false;
+    }
+
+    // controlMode == AUTO or sceneMode = FACE_PRIORITY
+    // Process individual 3A controls
+
+    res = doFakeAE(settings);
+    if (res != OK) return res;
+
+    res = doFakeAF(settings);
+    if (res != OK) return res;
+
+    res = doFakeAWB(settings);
+    if (res != OK) return res;
+
+    update3A(settings);
+    return OK;
+}
+
+status_t EmulatedFakeRotatingCamera3::doFakeAE(CameraMetadata &settings) {
+    camera_metadata_entry e;
+
+    e = settings.find(ANDROID_CONTROL_AE_MODE);
+    if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
+        ALOGE("%s: No AE mode entry!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+    uint8_t aeMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AE_MODE_ON;
+    mAeMode = aeMode;
+
+    switch (aeMode) {
+        case ANDROID_CONTROL_AE_MODE_OFF:
+            // AE is OFF
+            mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
+            return OK;
+        case ANDROID_CONTROL_AE_MODE_ON:
+            // OK for AUTO modes
+            break;
+        default:
+            // Mostly silently ignore unsupported modes
+            ALOGV("%s: Emulator doesn't support AE mode %d, assuming ON",
+                    __FUNCTION__, aeMode);
+            break;
+    }
+
+    e = settings.find(ANDROID_CONTROL_AE_LOCK);
+    bool aeLocked = (e.count > 0) ? (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON) : false;
+
+    e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
+    bool precaptureTrigger = false;
+    if (e.count != 0) {
+        precaptureTrigger =
+                (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START);
+    }
+
+    if (precaptureTrigger) {
+        ALOGV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger);
+    } else if (e.count > 0) {
+        ALOGV("%s: Pre capture trigger was present? %zu",
+              __FUNCTION__,
+              e.count);
+    }
+
+    if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
+        // Run precapture sequence
+        if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
+            mAeCounter = 0;
+        }
+
+        if (mFacePriority) {
+            mAeTargetExposureTime = kFacePriorityExposureTime;
+        } else {
+            mAeTargetExposureTime = kNormalExposureTime;
+        }
+
+        if (mAeCounter > kPrecaptureMinFrames &&
+                (mAeTargetExposureTime - mAeCurrentExposureTime) <
+                mAeTargetExposureTime / 10) {
+            // Done with precapture
+            mAeCounter = 0;
+            mAeState = aeLocked ? ANDROID_CONTROL_AE_STATE_LOCKED :
+                    ANDROID_CONTROL_AE_STATE_CONVERGED;
+        } else {
+            // Converge some more
+            mAeCurrentExposureTime +=
+                    (mAeTargetExposureTime - mAeCurrentExposureTime) *
+                    kExposureTrackRate;
+            mAeCounter++;
+            mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
+        }
+
+    } else if (!aeLocked) {
+        // Run standard occasional AE scan
+        switch (mAeState) {
+            case ANDROID_CONTROL_AE_STATE_INACTIVE:
+                mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
+                break;
+            case ANDROID_CONTROL_AE_STATE_CONVERGED:
+                mAeCounter++;
+                if (mAeCounter > kStableAeMaxFrames) {
+                    mAeTargetExposureTime =
+                            mFacePriority ? kFacePriorityExposureTime :
+                            kNormalExposureTime;
+                    float exposureStep = ((double)rand() / RAND_MAX) *
+                            (kExposureWanderMax - kExposureWanderMin) +
+                            kExposureWanderMin;
+                    mAeTargetExposureTime *= std::pow(2, exposureStep);
+                    mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
+                }
+                break;
+            case ANDROID_CONTROL_AE_STATE_SEARCHING:
+                mAeCurrentExposureTime +=
+                        (mAeTargetExposureTime - mAeCurrentExposureTime) *
+                        kExposureTrackRate;
+                if (abs(mAeTargetExposureTime - mAeCurrentExposureTime) <
+                        mAeTargetExposureTime / 10) {
+                    // Close enough
+                    mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
+                    mAeCounter = 0;
+                }
+                break;
+            case ANDROID_CONTROL_AE_STATE_LOCKED:
+                mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
+                mAeCounter = 0;
+                break;
+            default:
+                ALOGE("%s: Emulator in unexpected AE state %d",
+                        __FUNCTION__, mAeState);
+                return INVALID_OPERATION;
+        }
+    } else {
+        // AE is locked
+        mAeState = ANDROID_CONTROL_AE_STATE_LOCKED;
+    }
+
+    return OK;
+}
+
+status_t EmulatedFakeRotatingCamera3::doFakeAF(CameraMetadata &settings) {
+    camera_metadata_entry e;
+
+    e = settings.find(ANDROID_CONTROL_AF_MODE);
+    if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
+        ALOGE("%s: No AF mode entry!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+    uint8_t afMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AF_MODE_OFF;
+
+    e = settings.find(ANDROID_CONTROL_AF_TRIGGER);
+    typedef camera_metadata_enum_android_control_af_trigger af_trigger_t;
+    af_trigger_t afTrigger;
+    if (e.count != 0) {
+        afTrigger = static_cast<af_trigger_t>(e.data.u8[0]);
+
+        ALOGV("%s: AF trigger set to 0x%x", __FUNCTION__, afTrigger);
+        ALOGV("%s: AF mode is 0x%x", __FUNCTION__, afMode);
+    } else {
+        afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
+    }
+
+    switch (afMode) {
+        case ANDROID_CONTROL_AF_MODE_OFF:
+            mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
+            return OK;
+        case ANDROID_CONTROL_AF_MODE_AUTO:
+        case ANDROID_CONTROL_AF_MODE_MACRO:
+        case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
+        case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
+            break;
+        default:
+            ALOGE("%s: Emulator doesn't support AF mode %d",
+                    __FUNCTION__, afMode);
+            return BAD_VALUE;
+    }
+
+    bool afModeChanged = mAfMode != afMode;
+    mAfMode = afMode;
+
+    /**
+     * Simulate AF triggers. Transition at most 1 state per frame.
+     * - Focusing always succeeds (goes into locked, or PASSIVE_SCAN).
+     */
+
+    bool afTriggerStart = false;
+    bool afTriggerCancel = false;
+    switch (afTrigger) {
+        case ANDROID_CONTROL_AF_TRIGGER_IDLE:
+            break;
+        case ANDROID_CONTROL_AF_TRIGGER_START:
+            afTriggerStart = true;
+            break;
+        case ANDROID_CONTROL_AF_TRIGGER_CANCEL:
+            afTriggerCancel = true;
+            // Cancel trigger always transitions into INACTIVE
+            mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
+
+            ALOGV("%s: AF State transition to STATE_INACTIVE", __FUNCTION__);
+
+            // Stay in 'inactive' until at least next frame
+            return OK;
+        default:
+            ALOGE("%s: Unknown af trigger value %d", __FUNCTION__, afTrigger);
+            return BAD_VALUE;
+    }
+
+    // If we get down here, we're either in an autofocus mode
+    //  or in a continuous focus mode (and no other modes)
+
+    int oldAfState = mAfState;
+    switch (mAfState) {
+        case ANDROID_CONTROL_AF_STATE_INACTIVE:
+            if (afTriggerStart) {
+                switch (afMode) {
+                    case ANDROID_CONTROL_AF_MODE_AUTO:
+                        // fall-through
+                    case ANDROID_CONTROL_AF_MODE_MACRO:
+                        mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
+                        break;
+                    case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
+                        // fall-through
+                    case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
+                        mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
+                        break;
+                }
+            } else {
+                // At least one frame stays in INACTIVE
+                if (!afModeChanged) {
+                    switch (afMode) {
+                        case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
+                            // fall-through
+                        case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
+                            mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN;
+                            break;
+                    }
+                }
+            }
+            break;
+        case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
+            /**
+             * When the AF trigger is activated, the algorithm should finish
+             * its PASSIVE_SCAN if active, and then transition into AF_FOCUSED
+             * or AF_NOT_FOCUSED as appropriate
+             */
+            if (afTriggerStart) {
+                // Randomly transition to focused or not focused
+                if (rand() % 3) {
+                    mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
+                } else {
+                    mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
+                }
+            }
+            /**
+             * When the AF trigger is not involved, the AF algorithm should
+             * start in INACTIVE state, and then transition into PASSIVE_SCAN
+             * and PASSIVE_FOCUSED states
+             */
+            else if (!afTriggerCancel) {
+               // Randomly transition to passive focus
+                if (rand() % 3 == 0) {
+                    mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
+                }
+            }
+
+            break;
+        case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
+            if (afTriggerStart) {
+                // Randomly transition to focused or not focused
+                if (rand() % 3) {
+                    mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
+                } else {
+                    mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
+                }
+            }
+            // TODO: initiate passive scan (PASSIVE_SCAN)
+            break;
+        case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
+            // Simulate AF sweep completing instantaneously
+
+            // Randomly transition to focused or not focused
+            if (rand() % 3) {
+                mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
+            } else {
+                mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
+            }
+            break;
+        case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
+            if (afTriggerStart) {
+                switch (afMode) {
+                    case ANDROID_CONTROL_AF_MODE_AUTO:
+                        // fall-through
+                    case ANDROID_CONTROL_AF_MODE_MACRO:
+                        mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
+                        break;
+                    case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
+                        // fall-through
+                    case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
+                        // continuous autofocus => trigger start has no effect
+                        break;
+                }
+            }
+            break;
+        case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
+            if (afTriggerStart) {
+                switch (afMode) {
+                    case ANDROID_CONTROL_AF_MODE_AUTO:
+                        // fall-through
+                    case ANDROID_CONTROL_AF_MODE_MACRO:
+                        mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
+                        break;
+                    case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
+                        // fall-through
+                    case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
+                        // continuous autofocus => trigger start has no effect
+                        break;
+                }
+            }
+            break;
+        default:
+            ALOGE("%s: Bad af state %d", __FUNCTION__, mAfState);
+    }
+
+    {
+        char afStateString[100] = {0,};
+        camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
+                oldAfState,
+                afStateString,
+                sizeof(afStateString));
+
+        char afNewStateString[100] = {0,};
+        camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
+                mAfState,
+                afNewStateString,
+                sizeof(afNewStateString));
+        ALOGVV("%s: AF state transitioned from %s to %s",
+              __FUNCTION__, afStateString, afNewStateString);
+    }
+
+
+    return OK;
+}
+
+status_t EmulatedFakeRotatingCamera3::doFakeAWB(CameraMetadata &settings) {
+    camera_metadata_entry e;
+
+    e = settings.find(ANDROID_CONTROL_AWB_MODE);
+    if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
+        ALOGE("%s: No AWB mode entry!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+    uint8_t awbMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AWB_MODE_AUTO;
+
+    // TODO: Add white balance simulation
+
+    e = settings.find(ANDROID_CONTROL_AWB_LOCK);
+    bool awbLocked = (e.count > 0) ? (e.data.u8[0] == ANDROID_CONTROL_AWB_LOCK_ON) : false;
+
+    switch (awbMode) {
+        case ANDROID_CONTROL_AWB_MODE_OFF:
+            mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
+            break;
+        case ANDROID_CONTROL_AWB_MODE_AUTO:
+        case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
+        case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
+        case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
+        case ANDROID_CONTROL_AWB_MODE_SHADE:
+            // Always magically right, or locked
+            mAwbState = awbLocked ? ANDROID_CONTROL_AWB_STATE_LOCKED :
+                    ANDROID_CONTROL_AWB_STATE_CONVERGED;
+            break;
+        default:
+            ALOGE("%s: Emulator doesn't support AWB mode %d",
+                    __FUNCTION__, awbMode);
+            return BAD_VALUE;
+    }
+
+    return OK;
+}
+
+// Update the 3A Region by calculating the intersection of AE/AF/AWB and CROP
+// regions
+static void update3ARegion(uint32_t tag, CameraMetadata &settings) {
+    if (tag != ANDROID_CONTROL_AE_REGIONS &&
+        tag != ANDROID_CONTROL_AF_REGIONS &&
+        tag != ANDROID_CONTROL_AWB_REGIONS) {
+        return;
+    }
+    camera_metadata_entry_t entry;
+    entry = settings.find(ANDROID_SCALER_CROP_REGION);
+    if (entry.count > 0) {
+        int32_t cropRegion[4];
+        cropRegion[0] =  entry.data.i32[0];
+        cropRegion[1] =  entry.data.i32[1];
+        cropRegion[2] =  entry.data.i32[2] + cropRegion[0];
+        cropRegion[3] =  entry.data.i32[3] + cropRegion[1];
+        entry = settings.find(tag);
+        if (entry.count > 0) {
+            int32_t* ARegion = entry.data.i32;
+            // calculate the intersection of AE/AF/AWB and CROP regions
+            if (ARegion[0] < cropRegion[2] && cropRegion[0] < ARegion[2] &&
+                ARegion[1] < cropRegion[3] && cropRegion[1] < ARegion[3]) {
+                int32_t interSect[5];
+                interSect[0] = std::max(ARegion[0], cropRegion[0]);
+                interSect[1] = std::max(ARegion[1], cropRegion[1]);
+                interSect[2] = std::min(ARegion[2], cropRegion[2]);
+                interSect[3] = std::min(ARegion[3], cropRegion[3]);
+                interSect[4] = ARegion[4];
+                settings.update(tag, &interSect[0], 5);
+            }
+        }
+    }
+}
+
+void EmulatedFakeRotatingCamera3::update3A(CameraMetadata &settings) {
+    if (mAeMode != ANDROID_CONTROL_AE_MODE_OFF) {
+        settings.update(ANDROID_SENSOR_EXPOSURE_TIME,
+                &mAeCurrentExposureTime, 1);
+        settings.update(ANDROID_SENSOR_SENSITIVITY,
+                &mAeCurrentSensitivity, 1);
+    }
+
+    settings.update(ANDROID_CONTROL_AE_STATE,
+            &mAeState, 1);
+    settings.update(ANDROID_CONTROL_AF_STATE,
+            &mAfState, 1);
+    settings.update(ANDROID_CONTROL_AWB_STATE,
+            &mAwbState, 1);
+
+    uint8_t lensState;
+    switch (mAfState) {
+        case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
+        case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
+            lensState = ANDROID_LENS_STATE_MOVING;
+            break;
+        case ANDROID_CONTROL_AF_STATE_INACTIVE:
+        case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
+        case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
+        case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
+        case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
+        default:
+            lensState = ANDROID_LENS_STATE_STATIONARY;
+            break;
+    }
+    settings.update(ANDROID_LENS_STATE, &lensState, 1);
+    update3ARegion(ANDROID_CONTROL_AE_REGIONS, settings);
+    update3ARegion(ANDROID_CONTROL_AF_REGIONS, settings);
+    update3ARegion(ANDROID_CONTROL_AWB_REGIONS, settings);
+}
+
+void EmulatedFakeRotatingCamera3::signalReadoutIdle() {
+    Mutex::Autolock l(mLock);
+    // Need to chek isIdle again because waiting on mLock may have allowed
+    // something to be placed in the in-flight queue.
+    if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) {
+        ALOGV("Now idle");
+        mStatus = STATUS_READY;
+    }
+}
+
+void EmulatedFakeRotatingCamera3::onCameraRotatorEvent(uint32_t frameNumber, Event e,
+        nsecs_t timestamp) {
+    switch(e) {
+        case CameraRotator::CameraRotatorListener::EXPOSURE_START: {
+            ALOGVV("%s: Frame %d: Sensor started exposure at %lld",
+                    __FUNCTION__, frameNumber, timestamp);
+            // Trigger shutter notify to framework
+            camera3_notify_msg_t msg;
+            msg.type = CAMERA3_MSG_SHUTTER;
+            msg.message.shutter.frame_number = frameNumber;
+            msg.message.shutter.timestamp = timestamp;
+            sendNotify(&msg);
+            break;
+        }
+        default:
+            ALOGW("%s: Unexpected sensor event %d at %" PRId64, __FUNCTION__,
+                    e, timestamp);
+            break;
+    }
+}
+
+EmulatedFakeRotatingCamera3::ReadoutThread::ReadoutThread(EmulatedFakeRotatingCamera3 *parent) :
+        mParent(parent), mJpegWaiting(false) {
+}
+
+EmulatedFakeRotatingCamera3::ReadoutThread::~ReadoutThread() {
+    for (List<Request>::iterator i = mInFlightQueue.begin();
+         i != mInFlightQueue.end(); i++) {
+        delete i->buffers;
+        delete i->sensorBuffers;
+    }
+}
+
+void EmulatedFakeRotatingCamera3::ReadoutThread::queueCaptureRequest(const Request &r) {
+    Mutex::Autolock l(mLock);
+
+    mInFlightQueue.push_back(r);
+    mInFlightSignal.signal();
+}
+
+bool EmulatedFakeRotatingCamera3::ReadoutThread::isIdle() {
+    Mutex::Autolock l(mLock);
+    return mInFlightQueue.empty() && !mThreadActive;
+}
+
+status_t EmulatedFakeRotatingCamera3::ReadoutThread::waitForReadout() {
+    status_t res;
+    Mutex::Autolock l(mLock);
+    int loopCount = 0;
+    while (mInFlightQueue.size() >= kMaxQueueSize) {
+        res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
+        if (res != OK && res != TIMED_OUT) {
+            ALOGE("%s: Error waiting for in-flight queue to shrink",
+                    __FUNCTION__);
+            return INVALID_OPERATION;
+        }
+        if (loopCount == kMaxWaitLoops) {
+            ALOGE("%s: Timed out waiting for in-flight queue to shrink",
+                    __FUNCTION__);
+            return TIMED_OUT;
+        }
+        loopCount++;
+    }
+    return OK;
+}
+
+bool EmulatedFakeRotatingCamera3::ReadoutThread::threadLoop() {
+    status_t res;
+
+    ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__);
+
+    // First wait for a request from the in-flight queue
+
+    if (mCurrentRequest.settings.isEmpty()) {
+        Mutex::Autolock l(mLock);
+        if (mInFlightQueue.empty()) {
+            res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
+            if (res == TIMED_OUT) {
+                ALOGVV("%s: ReadoutThread: Timed out waiting for request",
+                        __FUNCTION__);
+                return true;
+            } else if (res != NO_ERROR) {
+                ALOGE("%s: Error waiting for capture requests: %d",
+                        __FUNCTION__, res);
+                return false;
+            }
+        }
+        mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber;
+        mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings);
+        mCurrentRequest.buffers = mInFlightQueue.begin()->buffers;
+        mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers;
+        mInFlightQueue.erase(mInFlightQueue.begin());
+        mInFlightSignal.signal();
+        mThreadActive = true;
+        ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__,
+                mCurrentRequest.frameNumber);
+    }
+
+    // Then wait for it to be delivered from the sensor
+    ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor",
+            __FUNCTION__);
+
+    nsecs_t captureTime;
+    bool gotFrame =
+            mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime);
+    if (!gotFrame) {
+        ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame",
+                __FUNCTION__);
+        return true;
+    }
+
+    ALOGVV("Sensor done with readout for frame %d, captured at %lld ",
+            mCurrentRequest.frameNumber, captureTime);
+
+    // Check if we need to JPEG encode a buffer, and send it for async
+    // compression if so. Otherwise prepare the buffer for return.
+    bool needJpeg = false;
+    HalBufferVector::iterator buf = mCurrentRequest.buffers->begin();
+    while(buf != mCurrentRequest.buffers->end()) {
+        bool goodBuffer = true;
+        if ( buf->stream->format ==
+                HAL_PIXEL_FORMAT_BLOB && buf->stream->data_space != HAL_DATASPACE_DEPTH) {
+            Mutex::Autolock jl(mJpegLock);
+            if (mJpegWaiting) {
+                // This shouldn't happen, because processCaptureRequest should
+                // be stalling until JPEG compressor is free.
+                ALOGE("%s: Already processing a JPEG!", __FUNCTION__);
+                goodBuffer = false;
+            }
+            if (goodBuffer) {
+                // Compressor takes ownership of sensorBuffers here
+                res = mParent->mJpegCompressor->start(mCurrentRequest.sensorBuffers,
+                        this, &(mCurrentRequest.settings));
+                goodBuffer = (res == OK);
+            }
+            if (goodBuffer) {
+                needJpeg = true;
+
+                mJpegHalBuffer = *buf;
+                mJpegFrameNumber = mCurrentRequest.frameNumber;
+                mJpegWaiting = true;
+
+                mCurrentRequest.sensorBuffers = NULL;
+                buf = mCurrentRequest.buffers->erase(buf);
+
+                continue;
+            }
+            ALOGE("%s: Error compressing output buffer: %s (%d)",
+                        __FUNCTION__, strerror(-res), res);
+            // fallthrough for cleanup
+        }
+        mParent->mGBM->unlock(*(buf->buffer));
+
+        buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK :
+                CAMERA3_BUFFER_STATUS_ERROR;
+        buf->acquire_fence = -1;
+        buf->release_fence = -1;
+
+        ++buf;
+    } // end while
+
+    // Construct result for all completed buffers and results
+
+    camera3_capture_result result;
+
+    if (mParent->hasCapability(BACKWARD_COMPATIBLE)) {
+        static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
+        mCurrentRequest.settings.update(ANDROID_STATISTICS_SCENE_FLICKER,
+                &sceneFlicker, 1);
+
+        static const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
+        mCurrentRequest.settings.update(ANDROID_FLASH_STATE,
+                &flashState, 1);
+
+        nsecs_t rollingShutterSkew = Sensor::kFrameDurationRange[0];
+        mCurrentRequest.settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
+                &rollingShutterSkew, 1);
+
+        float focusRange[] = { 1.0f/5.0f, 0 }; // 5 m to infinity in focus
+        mCurrentRequest.settings.update(ANDROID_LENS_FOCUS_RANGE,
+                focusRange, sizeof(focusRange)/sizeof(float));
+    }
+
+    if (mParent->hasCapability(DEPTH_OUTPUT)) {
+        camera_metadata_entry_t entry;
+
+        find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_POSE_TRANSLATION, &entry);
+        mCurrentRequest.settings.update(ANDROID_LENS_POSE_TRANSLATION,
+                entry.data.f, entry.count);
+
+        find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_POSE_ROTATION, &entry);
+        mCurrentRequest.settings.update(ANDROID_LENS_POSE_ROTATION,
+                entry.data.f, entry.count);
+
+        find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_INTRINSIC_CALIBRATION, &entry);
+        mCurrentRequest.settings.update(ANDROID_LENS_INTRINSIC_CALIBRATION,
+                entry.data.f, entry.count);
+
+        find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_RADIAL_DISTORTION, &entry);
+        mCurrentRequest.settings.update(ANDROID_LENS_RADIAL_DISTORTION,
+                entry.data.f, entry.count);
+    }
+
+    mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP,
+            &captureTime, 1);
+
+
+    // JPEGs take a stage longer
+    const uint8_t pipelineDepth = needJpeg ? kMaxBufferCount : kMaxBufferCount - 1;
+    mCurrentRequest.settings.update(ANDROID_REQUEST_PIPELINE_DEPTH,
+            &pipelineDepth, 1);
+
+    result.frame_number = mCurrentRequest.frameNumber;
+    result.result = mCurrentRequest.settings.getAndLock();
+    result.num_output_buffers = mCurrentRequest.buffers->size();
+    result.output_buffers = mCurrentRequest.buffers->array();
+    result.input_buffer = nullptr;
+    result.partial_result = 1;
+
+    // Go idle if queue is empty, before sending result
+    bool signalIdle = false;
+    {
+        Mutex::Autolock l(mLock);
+        if (mInFlightQueue.empty()) {
+            mThreadActive = false;
+            signalIdle = true;
+        }
+    }
+    if (signalIdle) mParent->signalReadoutIdle();
+
+    // Send it off to the framework
+    ALOGVV("%s: ReadoutThread: Send result to framework",
+            __FUNCTION__);
+    mParent->sendCaptureResult(&result);
+
+    // Clean up
+    mCurrentRequest.settings.unlock(result.result);
+
+    delete mCurrentRequest.buffers;
+    mCurrentRequest.buffers = NULL;
+    if (!needJpeg) {
+        delete mCurrentRequest.sensorBuffers;
+        mCurrentRequest.sensorBuffers = NULL;
+    }
+    mCurrentRequest.settings.clear();
+
+    return true;
+}
+
+void EmulatedFakeRotatingCamera3::ReadoutThread::onJpegDone(
+        const StreamBuffer &jpegBuffer, bool success) {
+    Mutex::Autolock jl(mJpegLock);
+
+    mParent->mGBM->unlock(*(jpegBuffer.buffer));
+
+    mJpegHalBuffer.status = success ?
+            CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR;
+    mJpegHalBuffer.acquire_fence = -1;
+    mJpegHalBuffer.release_fence = -1;
+    mJpegWaiting = false;
+
+    camera3_capture_result result;
+
+    result.frame_number = mJpegFrameNumber;
+    result.result = NULL;
+    result.num_output_buffers = 1;
+    result.output_buffers = &mJpegHalBuffer;
+    result.input_buffer = nullptr;
+    result.partial_result = 0;
+
+    if (!success) {
+        ALOGE("%s: Compression failure, returning error state buffer to"
+                " framework", __FUNCTION__);
+    } else {
+        ALOGV("%s: Compression complete, returning buffer to framework",
+                __FUNCTION__);
+    }
+
+    mParent->sendCaptureResult(&result);
+}
+
+void EmulatedFakeRotatingCamera3::ReadoutThread::onJpegInputDone(
+        const StreamBuffer &inputBuffer) {
+    // Should never get here, since the input buffer has to be returned
+    // by end of processCaptureRequest
+    ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__);
+}
+
+
+}; // namespace android
diff --git a/camera/EmulatedFakeRotatingCamera3.h b/camera/EmulatedFakeRotatingCamera3.h
new file mode 100644
index 0000000..6bc43ca
--- /dev/null
+++ b/camera/EmulatedFakeRotatingCamera3.h
@@ -0,0 +1,294 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+/**
+ * Contains declaration of a class EmulatedCamera that encapsulates
+ * functionality of a fake camera that implements version 3 of the camera device
+ * interface.
+ */
+
+#include "EmulatedCamera3.h"
+#include "CameraRotator.h"
+#include "fake-pipeline2/Base.h"
+#include "fake-pipeline2/Sensor.h"
+#include "fake-pipeline2/JpegCompressor.h"
+#include <CameraMetadata.h>
+#include <utils/SortedVector.h>
+#include <utils/List.h>
+#include <utils/Mutex.h>
+
+using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
+
+namespace android {
+
+/**
+ * Encapsulates functionality for a v3 HAL camera which produces synthetic data.
+ *
+ * Note that EmulatedCameraFactory instantiates an object of this class just
+ * once, when EmulatedCameraFactory instance gets constructed. Connection to /
+ * disconnection from the actual camera device is handled by calls to
+ * connectDevice(), and closeCamera() methods of this class that are invoked in
+ * response to hw_module_methods_t::open, and camera_device::close callbacks.
+ */
+class EmulatedFakeRotatingCamera3 : public EmulatedCamera3,
+        private CameraRotator::CameraRotatorListener {
+public:
+
+    EmulatedFakeRotatingCamera3(int cameraId, bool facingBack,
+            struct hw_module_t* module, GraphicBufferMapper* gbm);
+
+    virtual ~EmulatedFakeRotatingCamera3();
+
+    /****************************************************************************
+     * EmulatedCamera3 virtual overrides
+     ***************************************************************************/
+
+public:
+
+    virtual status_t Initialize();
+
+    /****************************************************************************
+     * Camera module API and generic hardware device API implementation
+     ***************************************************************************/
+
+public:
+    virtual status_t connectCamera(hw_device_t** device);
+
+    virtual status_t closeCamera();
+
+    virtual status_t getCameraInfo(struct camera_info *info);
+
+    /****************************************************************************
+     * EmulatedCamera3 abstract API implementation
+     ***************************************************************************/
+
+protected:
+
+    virtual status_t configureStreams(
+        camera3_stream_configuration *streamList);
+
+    virtual status_t registerStreamBuffers(
+        const camera3_stream_buffer_set *bufferSet) ;
+
+    virtual const camera_metadata_t* constructDefaultRequestSettings(
+        int type);
+
+    virtual status_t processCaptureRequest(camera3_capture_request *request);
+
+    virtual status_t flush();
+
+    /** Debug methods */
+
+    virtual void dump(int fd);
+
+private:
+
+    /**
+     * Get the requested capability set for this camera
+     */
+    status_t getCameraCapabilities();
+
+    bool hasCapability(AvailableCapabilities cap);
+
+    /**
+     * Build the static info metadata buffer for this device
+     */
+    status_t constructStaticInfo();
+
+    /**
+     * Run the fake 3A algorithms as needed. May override/modify settings
+     * values.
+     */
+    status_t process3A(CameraMetadata &settings);
+
+    status_t doFakeAE(CameraMetadata &settings);
+    status_t doFakeAF(CameraMetadata &settings);
+    status_t doFakeAWB(CameraMetadata &settings);
+    void     update3A(CameraMetadata &settings);
+
+    /** Signal from readout thread that it doesn't have anything to do */
+    void     signalReadoutIdle();
+
+    /** Handle interrupt events from the sensor */
+    void     onCameraRotatorEvent(uint32_t frameNumber, Event e, nsecs_t timestamp);
+
+    /****************************************************************************
+     * Static configuration information
+     ***************************************************************************/
+private:
+    static const uint32_t kMaxRawStreamCount = 1;
+    static const uint32_t kMaxProcessedStreamCount = 3;
+    static const uint32_t kMaxJpegStreamCount = 1;
+    static const uint32_t kMaxReprocessStreamCount = 2;
+    static const uint32_t kMaxBufferCount = 4;
+    // We need a positive stream ID to distinguish external buffers from
+    // sensor-generated buffers which use a nonpositive ID. Otherwise, HAL3 has
+    // no concept of a stream id.
+    static const uint32_t kGenericStreamId = 1;
+    static const int32_t  kAvailableFormats[];
+    static const uint32_t kAvailableRawSizes[];
+    static const int64_t  kSyncWaitTimeout     = 10000000; // 10 ms
+    static const int32_t  kMaxSyncTimeoutCount = 1000; // 1000 kSyncWaitTimeouts
+    static const uint32_t kFenceTimeoutMs      = 2000; // 2 s
+    static const nsecs_t  kJpegTimeoutNs       = 5000000000l; // 5 s
+
+    /****************************************************************************
+     * Data members.
+     ***************************************************************************/
+
+    /* HAL interface serialization lock. */
+    Mutex              mLock;
+
+    /* Facing back (true) or front (false) switch. */
+    bool               mFacingBack;
+    int32_t            mSensorWidth;
+    int32_t            mSensorHeight;
+
+    SortedVector<AvailableCapabilities> mCapabilities;
+
+    GraphicBufferMapper* mGBM;
+
+    /**
+     * Cache for default templates. Once one is requested, the pointer must be
+     * valid at least until close() is called on the device
+     */
+    camera_metadata_t *mDefaultTemplates[CAMERA3_TEMPLATE_COUNT];
+
+    /**
+     * Private stream information, stored in camera3_stream_t->priv.
+     */
+    struct PrivateStreamInfo {
+        bool alive;
+    };
+
+    // Shortcut to the input stream
+    camera3_stream_t*  mInputStream;
+
+    typedef List<camera3_stream_t*>           StreamList;
+    typedef List<camera3_stream_t*>::iterator StreamIterator;
+    typedef Vector<camera3_stream_buffer>     HalBufferVector;
+
+    // All streams, including input stream
+    StreamList         mStreams;
+
+    // Cached settings from latest submitted request
+    CameraMetadata     mPrevSettings;
+
+    /** Fake hardware interfaces */
+    sp<CameraRotator>         mSensor;
+    sp<JpegCompressor> mJpegCompressor;
+    friend class       JpegCompressor;
+
+    /** Processing thread for sending out results */
+
+    class ReadoutThread : public Thread, private JpegCompressor::JpegListener {
+      public:
+        ReadoutThread(EmulatedFakeRotatingCamera3 *parent);
+        ~ReadoutThread();
+
+        struct Request {
+            uint32_t         frameNumber;
+            CameraMetadata   settings;
+            HalBufferVector *buffers;
+            Buffers         *sensorBuffers;
+        };
+
+        /**
+         * Interface to parent class
+         */
+
+        // Place request in the in-flight queue to wait for sensor capture
+        void     queueCaptureRequest(const Request &r);
+
+        // Test if the readout thread is idle (no in-flight requests, not
+        // currently reading out anything
+        bool     isIdle();
+
+        // Wait until isIdle is true
+        status_t waitForReadout();
+
+      private:
+        static const nsecs_t kWaitPerLoop  = 10000000L; // 10 ms
+        static const nsecs_t kMaxWaitLoops = 1000;
+        static const size_t  kMaxQueueSize = 4;
+
+        EmulatedFakeRotatingCamera3 *mParent;
+        Mutex mLock;
+
+        List<Request> mInFlightQueue;
+        Condition     mInFlightSignal;
+        bool          mThreadActive;
+
+        virtual bool threadLoop();
+
+        // Only accessed by threadLoop
+
+        Request mCurrentRequest;
+
+        // Jpeg completion callbacks
+
+        Mutex                 mJpegLock;
+        bool                  mJpegWaiting;
+        camera3_stream_buffer mJpegHalBuffer;
+        uint32_t              mJpegFrameNumber;
+        virtual void onJpegDone(const StreamBuffer &jpegBuffer, bool success);
+        virtual void onJpegInputDone(const StreamBuffer &inputBuffer);
+    };
+
+    sp<ReadoutThread> mReadoutThread;
+
+    /** Fake 3A constants */
+
+    static const nsecs_t kNormalExposureTime;
+    static const nsecs_t kFacePriorityExposureTime;
+    static const int     kNormalSensitivity;
+    static const int     kFacePrioritySensitivity;
+    // Rate of converging AE to new target value, as fraction of difference between
+    // current and target value.
+    static const float   kExposureTrackRate;
+    // Minimum duration for precapture state. May be longer if slow to converge
+    // to target exposure
+    static const int     kPrecaptureMinFrames;
+    // How often to restart AE 'scanning'
+    static const int     kStableAeMaxFrames;
+    // Maximum stop below 'normal' exposure time that we'll wander to while
+    // pretending to converge AE. In powers of 2. (-2 == 1/4 as bright)
+    static const float   kExposureWanderMin;
+    // Maximum stop above 'normal' exposure time that we'll wander to while
+    // pretending to converge AE. In powers of 2. (2 == 4x as bright)
+    static const float   kExposureWanderMax;
+
+    /** Fake 3A state */
+
+    uint8_t mControlMode;
+    bool    mFacePriority;
+    uint8_t mAeState;
+    uint8_t mAfState;
+    uint8_t mAwbState;
+    uint8_t mAeMode;
+    uint8_t mAfMode;
+    uint8_t mAwbMode;
+
+    int     mAeCounter;
+    nsecs_t mAeCurrentExposureTime;
+    nsecs_t mAeTargetExposureTime;
+    int     mAeCurrentSensitivity;
+
+};
+
+} // namespace android
diff --git a/camera/EmulatedFakeRotatingCameraDevice.cpp b/camera/EmulatedFakeRotatingCameraDevice.cpp
index 6220084..1adf944 100755
--- a/camera/EmulatedFakeRotatingCameraDevice.cpp
+++ b/camera/EmulatedFakeRotatingCameraDevice.cpp
@@ -407,13 +407,18 @@
     return 1;
 }
 
-EmulatedFakeRotatingCameraDevice::EmulatedFakeRotatingCameraDevice(EmulatedFakeCamera* camera_hal)
-    : EmulatedCameraDevice(camera_hal), mOpenglReady(false)
+EmulatedFakeRotatingCameraDevice::EmulatedFakeRotatingCameraDevice():
+    mObjectLock(),
+    mOpenglReady(false),
+    mState(ECDS_CONNECTED)
 {
+    // not much to initialize
+    mState = ECDS_INITIALIZED;
 }
 
 EmulatedFakeRotatingCameraDevice::~EmulatedFakeRotatingCameraDevice()
 {
+    mState = ECDS_INVALID;
 }
 
 /****************************************************************************
@@ -476,13 +481,12 @@
         return EINVAL;
     }
 
-    /* Initialize the base class. */
-    const status_t res =
-        EmulatedCameraDevice::commonStartDevice(width, height, pix_fmt);
-
+    mFrameWidth = width;
+    mFrameHeight = height;
+    mPixelFormat = pix_fmt;
     mState = ECDS_STARTED;
 
-    return res;
+    return NO_ERROR;
 }
 
 status_t EmulatedFakeRotatingCameraDevice::stopDevice()
@@ -495,7 +499,6 @@
         return NO_ERROR;
     }
 
-    EmulatedCameraDevice::commonStopDevice();
     mState = ECDS_CONNECTED;
 
     if (mOpenglReady) {
diff --git a/camera/EmulatedFakeRotatingCameraDevice.h b/camera/EmulatedFakeRotatingCameraDevice.h
index 33b4b32..43a8c97 100755
--- a/camera/EmulatedFakeRotatingCameraDevice.h
+++ b/camera/EmulatedFakeRotatingCameraDevice.h
@@ -31,16 +31,13 @@
 
 namespace android {
 
-class EmulatedFakeCamera;
-
 /* Encapsulates a fake camera device.
  * Fake camera device emulates a camera device by providing frames containing
  * an image rendered by opengl, that takes rotating input from host
  */
-class EmulatedFakeRotatingCameraDevice : public EmulatedCameraDevice {
+class EmulatedFakeRotatingCameraDevice {
 public:
-    /* Constructs EmulatedFakeRotatingCameraDevice instance. */
-    explicit EmulatedFakeRotatingCameraDevice(EmulatedFakeCamera* camera_hal);
+    explicit EmulatedFakeRotatingCameraDevice();
 
     /* Destructs EmulatedFakeRotatingCameraDevice instance. */
     ~EmulatedFakeRotatingCameraDevice();
@@ -71,13 +68,50 @@
     status_t stopDevice();
 
 
-protected:
     /* Implementation of the frame production routine. */
-    bool produceFrame(void* buffer, int64_t* timestamp) override;
+    bool produceFrame(void* buffer, int64_t* timestamp);
 
     /****************************************************************************
      * Fake camera device private API
      ***************************************************************************/
+private:
+
+    enum EmulatedCameraDeviceState {
+        ECDS_INVALID,
+        /* Object has been constructed. */
+        ECDS_CONSTRUCTED,
+        /* Object has been initialized. */
+        ECDS_INITIALIZED,
+        /* Object has been connected to the physical device. */
+        ECDS_CONNECTED,
+        /* Camera device has been started. */
+        ECDS_STARTED,
+    };
+
+    /* Object state. */
+    EmulatedCameraDeviceState   mState;
+    
+    inline bool isInitialized() const {
+        return mState != ECDS_CONSTRUCTED;
+    }
+    inline bool isConnected() const {
+        /* Instance is connected when its status is either"connected", or
+         * "started". */
+        return mState == ECDS_CONNECTED || mState == ECDS_STARTED;
+    }
+    inline bool isStarted() const {
+        return mState == ECDS_STARTED;
+    }
+
+
+    Mutex                       mObjectLock;
+    /* Frame width */
+    int                         mFrameWidth;
+
+    /* Frame height */
+    int                         mFrameHeight;
+
+    uint32_t                    mPixelFormat;
 
 private: