[Camera Revamp] New camera protocol, guest side

Using host-backed memory to transmit frame from host to guest.
Support multiple resolution requests without restart host camera.

Bug: 142964263

Test: camera preview, taking pic, recording video

Change-Id: Ifd388b0957a2915a83dbe251ed6accc48cb162ee
diff --git a/camera/EmulatedQemuCamera3.cpp b/camera/EmulatedQemuCamera3.cpp
index 7cc3d86..6dd15fb 100644
--- a/camera/EmulatedQemuCamera3.cpp
+++ b/camera/EmulatedQemuCamera3.cpp
@@ -416,7 +416,7 @@
         if (newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
             if (newStream->usage & GRALLOC_USAGE_HW_CAMERA_WRITE) {
                 if (newStream->usage & GRALLOC_USAGE_HW_TEXTURE) {
-                    newStream->format = HAL_PIXEL_FORMAT_RGBA_8888;
+                    newStream->format = HAL_PIXEL_FORMAT_YCbCr_420_888;
                 }
                 else if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) {
                     newStream->format = HAL_PIXEL_FORMAT_YCbCr_420_888;
@@ -904,7 +904,7 @@
         if (srcBuf.stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
             if (srcBuf.stream->usage & GRALLOC_USAGE_HW_CAMERA_WRITE) {
                 if (srcBuf.stream->usage & GRALLOC_USAGE_HW_TEXTURE) {
-                    destBuf.format = HAL_PIXEL_FORMAT_RGBA_8888;
+                    destBuf.format = HAL_PIXEL_FORMAT_YCbCr_420_888;
                 }
                 else if (srcBuf.stream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) {
                     destBuf.format = HAL_PIXEL_FORMAT_YCbCr_420_888;
diff --git a/camera/EmulatorCameraTest.cpp b/camera/EmulatorCameraTest.cpp
index 684e6d7..eb82b9c 100644
--- a/camera/EmulatorCameraTest.cpp
+++ b/camera/EmulatorCameraTest.cpp
@@ -3,79 +3,289 @@
 #include <vector>
 
 #include "fake-pipeline2/Base.h"
+#include "fake-pipeline2/Scene.h"
 #include "QemuClient.h"
+#include "GrallocModule.h"
+#include "gralloc_cb.h"
 
 #include <linux/videodev2.h>
 #include <utils/Timers.h>
 
 using namespace android;
 
+
+const nsecs_t kExposureTimeRange[2] =
+    {1000L, 300000000L} ; // 1 us - 0.3 sec
+const nsecs_t kFrameDurationRange[2] =
+    {33331760L, 300000000L}; // ~1/30 s - 0.3 sec
+
+const nsecs_t kMinVerticalBlank = 10000L;
+
+const uint8_t kColorFilterArrangement =
+    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
+
+// Output image data characteristics
+const uint32_t kMaxRawValue = 4000;
+const uint32_t kBlackLevel  = 1000;
+
+// Sensor sensitivity
+const float kSaturationVoltage      = 0.520f;
+const uint32_t kSaturationElectrons = 2000;
+const float kVoltsPerLuxSecond      = 0.100f;
+
+const float kElectronsPerLuxSecond =
+        kSaturationElectrons / kSaturationVoltage
+        * kVoltsPerLuxSecond;
+
+const float kBaseGainFactor = (float)kMaxRawValue /
+            kSaturationElectrons;
+
+const float kReadNoiseStddevBeforeGain = 1.177; // in electrons
+const float kReadNoiseStddevAfterGain =  2.100; // in digital counts
+const float kReadNoiseVarBeforeGain =
+            kReadNoiseStddevBeforeGain *
+            kReadNoiseStddevBeforeGain;
+const float kReadNoiseVarAfterGain =
+            kReadNoiseStddevAfterGain *
+            kReadNoiseStddevAfterGain;
+
+const int32_t kSensitivityRange[2] = {100, 1600};
+const uint32_t kDefaultSensitivity = 100;
+
+void captureRGBA(uint8_t *img, uint32_t gain, uint32_t width, uint32_t height, Scene& scene, uint32_t sWidth, uint32_t sHeight) {
+    float totalGain = gain/100.0 * kBaseGainFactor;
+    // In fixed-point math, calculate total scaling from electrons to 8bpp
+    int scale64x = 64 * totalGain * 255 / kMaxRawValue;
+    unsigned int DivH= (float)sHeight/height * (0x1 << 10);
+    unsigned int DivW = (float)sWidth/width * (0x1 << 10);
+
+    for (unsigned int outY = 0; outY < height; outY++) {
+        unsigned int y = outY * DivH >> 10;
+        uint8_t *px = img + outY * width * 4;
+        scene.setReadoutPixel(0, y);
+        unsigned int lastX = 0;
+        const uint32_t *pixel = scene.getPixelElectrons();
+        for (unsigned int outX = 0; outX < width; outX++) {
+            uint32_t rCount, gCount, bCount;
+            unsigned int x = outX * DivW >> 10;
+            if (x - lastX > 0) {
+                for (unsigned int k = 0; k < (x-lastX); k++) {
+                     pixel = scene.getPixelElectrons();
+                }
+            }
+            lastX = x;
+            // TODO: Perfect demosaicing is a cheat
+            rCount = pixel[Scene::R]  * scale64x;
+            gCount = pixel[Scene::Gr] * scale64x;
+            bCount = pixel[Scene::B]  * scale64x;
+
+            *px++ = rCount < 255*64 ? rCount / 64 : 255;
+            *px++ = gCount < 255*64 ? gCount / 64 : 255;
+            *px++ = bCount < 255*64 ? bCount / 64 : 255;
+            *px++ = 255;
+         }
+        // TODO: Handle this better
+        //simulatedTime += mRowReadoutTime;
+    }
+}
+
+void captureYU12(uint8_t *img, uint32_t gain, uint32_t width, uint32_t height, Scene& scene, uint32_t sWidth, uint32_t sHeight) {
+    float totalGain = gain/100.0 * kBaseGainFactor;
+    // Using fixed-point math with 6 bits of fractional precision.
+    // In fixed-point math, calculate total scaling from electrons to 8bpp
+    const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
+    // In fixed-point math, saturation point of sensor after gain
+    const int saturationPoint = 64 * 255;
+    // Fixed-point coefficients for RGB-YUV transform
+    // Based on JFIF RGB->YUV transform.
+    // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
+    float rgbToY[]  = {19.0, 37.0, 7.0, 0.0};
+    float rgbToCb[] = {-10.0,-21.0, 32.0, 524288.0};
+    float rgbToCr[] = {32.0,-26.0, -5.0, 524288.0};
+    // Scale back to 8bpp non-fixed-point
+    const int scaleOut = 64;
+    const int scaleOutSq = scaleOut * scaleOut; // after multiplies
+    const double invscaleOutSq = 1.0/scaleOutSq;
+    for (int i=0; i < 4; ++i) {
+        rgbToY[i] *= invscaleOutSq;
+        rgbToCb[i] *= invscaleOutSq;
+        rgbToCr[i] *= invscaleOutSq;
+    }
+
+    unsigned int DivH= (float)sHeight/height * (0x1 << 10);
+    unsigned int DivW = (float)sWidth/width * (0x1 << 10);
+    for (unsigned int outY = 0; outY < height; outY++) {
+        unsigned int y = outY * DivH >> 10;
+        uint8_t *pxY = img + outY * width;
+        uint8_t *pxVU = img + (height + outY / 2) * width;
+        uint8_t *pxU = img + height * width + (outY / 2) * (width / 2);
+        uint8_t *pxV = pxU + (height / 2) * (width / 2);
+        scene.setReadoutPixel(0, y);
+        unsigned int lastX = 0;
+        const uint32_t *pixel = scene.getPixelElectrons();
+         for (unsigned int outX = 0; outX < width; outX++) {
+            int32_t rCount, gCount, bCount;
+            unsigned int x = outX * DivW >> 10;
+            if (x - lastX > 0) {
+                for (unsigned int k = 0; k < (x-lastX); k++) {
+                     pixel = scene.getPixelElectrons();
+                }
+            }
+            lastX = x;
+            rCount = pixel[Scene::R]  * scale64x;
+            rCount = rCount < saturationPoint ? rCount : saturationPoint;
+            gCount = pixel[Scene::Gr] * scale64x;
+            gCount = gCount < saturationPoint ? gCount : saturationPoint;
+            bCount = pixel[Scene::B]  * scale64x;
+            bCount = bCount < saturationPoint ? bCount : saturationPoint;
+            *pxY++ = (rgbToY[0] * rCount + rgbToY[1] * gCount + rgbToY[2] * bCount);
+            if (outY % 2 == 0 && outX % 2 == 0) {
+                *pxV++ = (rgbToCr[0] * rCount + rgbToCr[1] * gCount + rgbToCr[2] * bCount + rgbToCr[3]);
+                *pxU++ = (rgbToCb[0] * rCount + rgbToCb[1] * gCount + rgbToCb[2] * bCount + rgbToCb[3]);
+            }
+        }
+    }
+}
+
 // Test the capture speed of qemu camera, e.g., webcam and virtual scene
 int main(int argc, char* argv[]) {
     uint32_t pixFmt;
+    int grallocFmt;
+    bool v1 = false;
+    bool fake = false;
+    std::vector<nsecs_t> report;
+    uint32_t sceneWidth;
+    uint32_t sceneHeight;
+
     if (!strncmp(argv[1], "RGB", 3)) {
         pixFmt = V4L2_PIX_FMT_RGB32;
+        grallocFmt = HAL_PIXEL_FORMAT_RGBA_8888;
     } else if (!strncmp(argv[1], "NV21", 3)) {
         pixFmt = V4L2_PIX_FMT_NV21;
+        grallocFmt = HAL_PIXEL_FORMAT_YCbCr_420_888;
     } else if (!strncmp(argv[1], "YV12", 3)) {
         pixFmt = V4L2_PIX_FMT_YVU420;
+        grallocFmt = HAL_PIXEL_FORMAT_YCbCr_420_888;
+    } else if (!strncmp(argv[1], "YU12", 3)) {
+        pixFmt = V4L2_PIX_FMT_YUV420;
+        grallocFmt = HAL_PIXEL_FORMAT_YCbCr_420_888;
     } else {
-        printf("format error, use RGB, NV21 or YV12");
+        printf("format error, use RGB, NV21, YV12 or YU12");
         return -1;
     }
     uint32_t width = atoi(argv[2]);
     uint32_t height = atoi(argv[3]);
+    uint32_t repeated = atoi(argv[4]);
     std::string deviceName;
-    if (!strncmp(argv[4], "web", 3)) {
+    if (!strncmp(argv[5], "web", 3)) {
         deviceName = "name=/dev/video0";
-    } else if (!strncmp(argv[4], "vir", 3)) {
+    } else if (!strncmp(argv[5], "vir", 3)) {
         deviceName = "name=virtualscene";
+    } else if (!strncmp(argv[5], "fak", 3)){
+        fake = true;
+        sceneWidth = atoi(argv[6]);
+        sceneHeight = atoi(argv[7]);
     } else {
         printf("device error, use web or virtual");
         return -1;
     }
 
-    // Open qemu pipe
-    CameraQemuClient client;
-    int ret = client.connectClient(deviceName.c_str());
-    if (ret != NO_ERROR) {
-        printf("Failed to connect device\n");
-        return -1;
+    if (fake) {
+        std::vector<uint8_t> buf(width * height * 4);
+        Scene scene(width, height, kElectronsPerLuxSecond);
+        for (int i = 0 ; i < repeated; i++) {
+            nsecs_t start = systemTime();
+            if (pixFmt == HAL_PIXEL_FORMAT_RGBA_8888) {
+                captureRGBA(buf.data(), 0, width, height, scene, sceneWidth, sceneHeight);
+            } else {
+                captureYU12(buf.data(), 0, width, height, scene, sceneWidth, sceneHeight);
+            }
+            nsecs_t end = systemTime();
+            report.push_back(end - start);
+        }
     }
-    ret = client.queryConnect();
-    if (ret == NO_ERROR) {
-        printf("Connected to device\n");
-    } else {
-        printf("Failed to connect device\n");
-        return -1;
+    else {
+        if (argc > 6 && !strncmp(argv[6], "v1", 2)) {
+            v1 = true;
+        }
+        // Open qemu pipe
+        CameraQemuClient client;
+        int ret = client.connectClient(deviceName.c_str());
+        if (ret != NO_ERROR) {
+            printf("Failed to connect device\n");
+            return -1;
+        }
+        ret = client.queryConnect();
+        if (ret == NO_ERROR) {
+            printf("Connected to device\n");
+        } else {
+            printf("Failed to connect device\n");
+            return -1;
+        }
+        // Caputre ASAP
+        if (v1) {
+            //ret = client.queryStart();
+            ret = client.queryStart(pixFmt, width, height);
+        } else {
+            ret = client.queryStart(pixFmt, width, height);
+        }
+        if (ret != NO_ERROR) {
+            printf("Failed to configure device for query\n");
+            return -1;
+        }
+        if (v1) {
+            int grallocUsage = GRALLOC_USAGE_HW_CAMERA_WRITE |
+                               GRALLOC_USAGE_HW_CAMERA_READ |
+                               GRALLOC_USAGE_HW_TEXTURE;
+            cb_handle_t* handle;
+            android_ycbcr ycbcr;
+            GrallocModule::getInstance().alloc(width, height, grallocFmt,
+                                                grallocUsage, (buffer_handle_t*)&handle);
+            void* addr;
+            if (grallocFmt == HAL_PIXEL_FORMAT_RGBA_8888) {
+                GrallocModule::getInstance().lock(handle, GRALLOC_USAGE_HW_CAMERA_WRITE,
+                                                   0, 0,
+                                                   width, height, &addr);
+            } else {
+                GrallocModule::getInstance().lock_ycbcr(handle,
+                                                         GRALLOC_USAGE_HW_CAMERA_WRITE,
+                                                         0, 0,
+                                                         width, height,
+                                                         &ycbcr);
+            }
+            uint64_t offset = handle->getMmapedOffset();
+            printf("offset is 0x%llx\n", offset);
+            float whiteBalance[] = {1.0f, 1.0f, 1.0f};
+            float exposureCompensation = 1.0f;
+            for (int i = 0 ; i < repeated; i++) {
+                nsecs_t start = systemTime();
+                client.queryFrame(width, height, pixFmt, offset,
+                                  whiteBalance[0], whiteBalance[1], whiteBalance[2],
+                                  exposureCompensation, nullptr);
+                nsecs_t end = systemTime();
+                report.push_back(end - start);
+            }
+            GrallocModule::getInstance().unlock(handle);
+        } else {
+            size_t bufferSize;
+            if (pixFmt == V4L2_PIX_FMT_RGB32) {
+                bufferSize = width * height * 4;
+            } else {
+                bufferSize = width * height * 12 / 8;
+            }
+            std::vector<char> buffer(bufferSize, 0);
+            float whiteBalance[] = {1.0f, 1.0f, 1.0f};
+            float exposureCompensation = 1.0f;
+            for (int i = 0 ; i < repeated; i++) {
+                nsecs_t start = systemTime();
+                client.queryFrame(buffer.data(), nullptr, 0, bufferSize,
+                                  whiteBalance[0], whiteBalance[1], whiteBalance[2],
+                                  exposureCompensation, nullptr);
+                nsecs_t end = systemTime();
+                report.push_back(end - start);
+            }
+        }
     }
-
-    // Caputre ASAP
-    ret = client.queryStart(pixFmt, width, height);
-    if (ret != NO_ERROR) {
-        printf("Failed to configure device for query\n");
-        return -1;
-    }
-    size_t bufferSize;
-    if (pixFmt == V4L2_PIX_FMT_RGB32) {
-        bufferSize = width * height * 4;
-    } else {
-        bufferSize = width * height * 12 / 8;
-    }
-    std::vector<char> buffer(bufferSize, 0);
-    float whiteBalance[] = {1.0f, 1.0f, 1.0f};
-    float exposureCompensation = 1.0f;
-    std::vector<nsecs_t> report;
-    size_t repeated = 100;
-    for (int i = 0 ; i < repeated; i++) {
-        nsecs_t start = systemTime();
-        client.queryFrame(buffer.data(), nullptr, 0, bufferSize,
-                          whiteBalance[0], whiteBalance[1], whiteBalance[2],
-                          exposureCompensation, nullptr);
-        nsecs_t end = systemTime();
-        report.push_back(end - start);
-    }
-
     // Report
     nsecs_t average, sum = 0;
     for (int i = 0; i < repeated; i++) {
diff --git a/camera/GrallocModule.h b/camera/GrallocModule.h
index c6d4840..486b0ca 100644
--- a/camera/GrallocModule.h
+++ b/camera/GrallocModule.h
@@ -1,6 +1,8 @@
 #ifndef EMU_CAMERA_GRALLOC_MODULE_H
 #define EMU_CAMERA_GRALLOC_MODULE_H
 
+#include "gralloc_cb.h"
+#include <assert.h>
 #include <hardware/gralloc.h>
 #include <log/log.h>
 
@@ -12,6 +14,10 @@
     return instance;
   }
 
+  ~GrallocModule() {
+      gralloc_close(mAllocDev);
+  }
+
   int lock(buffer_handle_t handle,
       int usage, int l, int t, int w, int h, void **vaddr) {
     return mModule->lock(mModule, handle, usage, l, t, w, h, vaddr);
@@ -29,16 +35,33 @@
     return mModule->unlock(mModule, handle);
   }
 
+  int alloc(int w, int h, int format, int usage, buffer_handle_t* handle) {
+      int stride;
+      return mAllocDev->alloc(mAllocDev, w, h, format, usage, handle, &stride);
+  }
+
+  int free(buffer_handle_t handle) {
+      return mAllocDev->free(mAllocDev, handle);
+  }
+
+  uint64_t getOffset(const buffer_handle_t handle) {
+      const cb_handle_t* cb_handle = cb_handle_t::from(handle);
+      return cb_handle->getMmapedOffset();
+  }
+
 private:
   GrallocModule() {
     const hw_module_t *module = NULL;
     int ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &module);
-    if (ret) {
-      ALOGE("%s: Failed to get gralloc module: %d", __FUNCTION__, ret);
-    }
+    assert(ret == 0 && "Failed to get gralloc module");
     mModule = reinterpret_cast<const gralloc_module_t*>(module);
+    ret = gralloc_open(module, &mAllocDev);
+    assert(ret == 0 && "Fail to open GPU device");
   }
+
   const gralloc_module_t *mModule;
+  alloc_device_t* mAllocDev;
+
 };
 
 #endif
diff --git a/camera/QemuClient.cpp b/camera/QemuClient.cpp
index 18beb69..bf40d61 100755
--- a/camera/QemuClient.cpp
+++ b/camera/QemuClient.cpp
@@ -470,6 +470,17 @@
     return res;
 }
 
+status_t CameraQemuClient::queryStart() {
+    ALOGV("%s", __FUNCTION__);
+    QemuQuery query(mQueryStart);
+    doQuery(&query);
+    const status_t res = query.getCompletionStatus();
+    ALOGE_IF(res != NO_ERROR, "%s: Query failed: %s",
+            __FUNCTION__, query.mReplyData ? query.mReplyData :
+                                             "No error message");
+    return res;
+}
+
 status_t CameraQemuClient::queryStart(uint32_t pixel_format,
                                       int width,
                                       int height)
@@ -566,4 +577,44 @@
     return NO_ERROR;
 }
 
+status_t CameraQemuClient::queryFrame(int width,
+                                      int height,
+                                      uint32_t pixel_format,
+                                      uint64_t offset,
+                                      float r_scale,
+                                      float g_scale,
+                                      float b_scale,
+                                      float exposure_comp,
+                                      int64_t* frame_time)
+{
+    ALOGV("%s: w %d h %d %.4s offset 0x%llx", __FUNCTION__, width, height,
+          (char*)(&pixel_format), offset);
+
+    char query_str[256];
+    snprintf(query_str, sizeof(query_str), "%s dim=%dx%d pix=%d offset=%llu whiteb=%g,%g,%g expcomp=%g time=%d",
+             mQueryFrame, width, height, pixel_format, offset,
+             r_scale, g_scale, b_scale,
+             exposure_comp, frame_time != nullptr ? 1 : 0);
+    QemuQuery query(query_str);
+    doQuery(&query);
+    const status_t res = query.getCompletionStatus();
+    if( res != NO_ERROR) {
+        ALOGE("%s: Query failed: %s",
+             __FUNCTION__, query.mReplyData ? query.mReplyData :
+                                              "No error message");
+        return res;
+    }
+
+    /* Copy requested frames. */
+    const uint8_t* frame = reinterpret_cast<const uint8_t*>(query.mReplyData);
+    if (frame_time != nullptr) {
+        if (query.mReplyDataSize >= 8) {
+            *frame_time = *reinterpret_cast<const int64_t*>(frame);
+        } else {
+            *frame_time = 0L;
+        }
+    }
+
+    return NO_ERROR;
+}
 }; /* namespace android */
diff --git a/camera/QemuClient.h b/camera/QemuClient.h
index ff39608..f3f7b6d 100755
--- a/camera/QemuClient.h
+++ b/camera/QemuClient.h
@@ -385,6 +385,8 @@
      */
     status_t queryStart(uint32_t pixel_format, int width, int height);
 
+    status_t queryStart();
+
     /* Queries camera to stop capturing video.
      * Return:
      *  NO_ERROR on success, or an appropriate error status on failure.
@@ -419,6 +421,16 @@
      * Names of the queries available for the emulated camera.
      ***************************************************************************/
 
+    status_t queryFrame(int wdith,
+                        int height,
+                        uint32_t pixel_format,
+                        uint64_t offset,
+                        float r_scale,
+                        float g_scale,
+                        float b_scale,
+                        float exposure_comp,
+                        int64_t* frame_time);
+
 private:
     /* Connect to the camera. */
     static const char mQueryConnect[];
diff --git a/camera/fake-pipeline2/JpegCompressor.cpp b/camera/fake-pipeline2/JpegCompressor.cpp
index 51de0c0..b7759ec 100644
--- a/camera/fake-pipeline2/JpegCompressor.cpp
+++ b/camera/fake-pipeline2/JpegCompressor.cpp
@@ -25,6 +25,7 @@
 #include "../EmulatedFakeCamera3.h"
 #include "../Exif.h"
 #include "../Thumbnail.h"
+#include "../GrallocModule.h"
 #include "hardware/camera3.h"
 
 namespace android {
@@ -227,7 +228,12 @@
 
     if (mFoundAux) {
         if (mAuxBuffer.streamId == 0) {
-            delete[] mAuxBuffer.img;
+            if (mAuxBuffer.buffer == nullptr) {
+                delete[] mAuxBuffer.img;
+            } else {
+                GrallocModule::getInstance().unlock(*mAuxBuffer.buffer);
+                GrallocModule::getInstance().free(*mAuxBuffer.buffer);
+            }
         } else if (!mSynchronous) {
             mListener->onJpegInputDone(mAuxBuffer);
         }
diff --git a/camera/qemu-pipeline3/QemuSensor.cpp b/camera/qemu-pipeline3/QemuSensor.cpp
index db2a66a..4f34864 100644
--- a/camera/qemu-pipeline3/QemuSensor.cpp
+++ b/camera/qemu-pipeline3/QemuSensor.cpp
@@ -29,6 +29,7 @@
 #define ALOGVV(...) ((void)0)
 #endif
 
+#include "GrallocModule.h"
 #include "qemu-pipeline3/QemuSensor.h"
 #include "system/camera_metadata.h"
 
@@ -36,6 +37,7 @@
 #include <cstdlib>
 #include <linux/videodev2.h>
 #include <log/log.h>
+#include <cutils/properties.h>
 #include <utils/Trace.h>
 
 namespace android {
@@ -49,6 +51,8 @@
 const int32_t QemuSensor::kSensitivityRange[2] = {100, 1600};
 const uint32_t QemuSensor::kDefaultSensitivity = 100;
 
+const char QemuSensor::kHostCameraVerString[] = "ro.kernel.qemu.camera_protocol_ver";
+
 QemuSensor::QemuSensor(const char *deviceName, uint32_t width, uint32_t height):
         Thread(false),
         mWidth(width),
@@ -64,6 +68,7 @@
         mFrameNumber(0),
         mCapturedBuffers(nullptr),
         mListener(nullptr) {
+    mHostCameraVer = property_get_int32(kHostCameraVerString, 0);
     ALOGV("QemuSensor created with pixel array %d x %d", width, height);
 }
 
@@ -283,7 +288,11 @@
                     captureRGB(b.img, b.width, b.height, b.stride, &timestamp);
                     break;
                 case HAL_PIXEL_FORMAT_RGBA_8888:
-                    captureRGBA(b.img, b.width, b.height, b.stride, &timestamp);
+                    if (mHostCameraVer == 1) {
+                        captureRGBA(b.width, b.height, b.stride, &timestamp, b.buffer);
+                    } else {
+                        captureRGBA(b.img, b.width, b.height, b.stride, &timestamp);
+                    }
                     break;
                 case HAL_PIXEL_FORMAT_BLOB:
                     if (b.dataSpace == HAL_DATASPACE_DEPTH) {
@@ -299,14 +308,36 @@
                         bAux.height = b.height;
                         bAux.format = HAL_PIXEL_FORMAT_YCbCr_420_888;
                         bAux.stride = b.width;
-                        bAux.buffer = nullptr;
-                        // TODO: Reuse these.
-                        bAux.img = new uint8_t[b.width * b.height * 3];
+                        if (mHostCameraVer == 1) {
+                            int grallocUsage = GRALLOC_USAGE_HW_CAMERA_WRITE |
+                               GRALLOC_USAGE_HW_CAMERA_READ |
+                               GRALLOC_USAGE_HW_TEXTURE;
+                            cb_handle_t* handle;
+                            android_ycbcr ycbcr;
+                            GrallocModule::getInstance().alloc(bAux.width, bAux.height, bAux.format,
+                                                               grallocUsage,
+                                                               (buffer_handle_t*)&handle);
+                            GrallocModule::getInstance().lock_ycbcr(handle,
+                                                                    GRALLOC_USAGE_HW_CAMERA_WRITE,
+                                                                    0, 0,
+                                                                    bAux.width, bAux.height,
+                                                                    &ycbcr);
+                            bAux.buffer = (buffer_handle_t*)&handle;
+                            bAux.img = (uint8_t*)ycbcr.y;
+                        } else {
+                            bAux.buffer = nullptr;
+                            // TODO: Reuse these.
+                            bAux.img = new uint8_t[b.width * b.height * 3];
+                        }
                         mNextCapturedBuffers->push_back(bAux);
                     }
                     break;
                 case HAL_PIXEL_FORMAT_YCbCr_420_888:
-                    captureYU12(b.img, b.width, b.height, b.stride, &timestamp);
+                    if (mHostCameraVer == 1) {
+                        captureYU12(b.width, b.height, b.stride, &timestamp, b.buffer);
+                    } else {
+                        captureYU12(b.img, b.width, b.height, b.stride, &timestamp);
+                    }
                     break;
                 default:
                     ALOGE("%s: Unknown/unsupported format %x, no output",
@@ -399,14 +430,52 @@
     }
 
     // Since the format is V4L2_PIX_FMT_RGB32, we need 4 bytes per pixel.
-    size_t bufferSize = width * height * 4;
-    // Apply no white balance or exposure compensation.
+      size_t bufferSize = width * height * 4;
+      // Apply no white balance or exposure compensation.
+      float whiteBalance[] = {1.0f, 1.0f, 1.0f};
+      float exposureCompensation = 1.0f;
+      // Read from webcam.
+      mCameraQemuClient.queryFrame(nullptr, img, 0, bufferSize, whiteBalance[0],
+              whiteBalance[1], whiteBalance[2],
+              exposureCompensation, timestamp);
+
+    ALOGVV("RGBA sensor image captured");
+}
+
+void QemuSensor::captureRGBA(uint32_t width, uint32_t height,
+        uint32_t stride, int64_t *timestamp, buffer_handle_t* handle) {
+    ATRACE_CALL();
+    status_t res;
+    if (mLastRequestWidth == -1 || mLastRequestHeight == -1) {
+        uint32_t pixFmt = V4L2_PIX_FMT_YUV420;
+        res = mCameraQemuClient.queryStart();
+        if (res == NO_ERROR) {
+            mLastRequestWidth = width;
+            mLastRequestHeight = height;
+            ALOGV("%s: Qemu camera device '%s' is started for %.4s[%dx%d] frames",
+                    __FUNCTION__, (const char*) mDeviceName,
+                    reinterpret_cast<const char*>(&pixFmt),
+                    mWidth, mHeight);
+            mState = ECDS_STARTED;
+        } else {
+            ALOGE("%s: Unable to start device '%s' for %.4s[%dx%d] frames",
+                    __FUNCTION__, (const char*) mDeviceName,
+                    reinterpret_cast<const char*>(&pixFmt),
+                    mWidth, mHeight);
+            return;
+        }
+    }
+    if (width != stride) {
+        ALOGW("%s: expect stride (%d), actual stride (%d)", __FUNCTION__,
+              width, stride);
+    }
+
     float whiteBalance[] = {1.0f, 1.0f, 1.0f};
     float exposureCompensation = 1.0f;
-    // Read from webcam.
-    mCameraQemuClient.queryFrame(nullptr, img, 0, bufferSize, whiteBalance[0],
-            whiteBalance[1], whiteBalance[2],
-            exposureCompensation, timestamp);
+    uint64_t offset = GrallocModule::getInstance().getOffset(*handle);
+    mCameraQemuClient.queryFrame(width, height, V4L2_PIX_FMT_RGB32, offset,
+                                 whiteBalance[0], whiteBalance[1], whiteBalance[2],
+                                 exposureCompensation, timestamp);
 
     ALOGVV("RGBA sensor image captured");
 }
@@ -415,7 +484,8 @@
     ALOGE("%s: Not implemented", __FUNCTION__);
 }
 
-void QemuSensor::captureYU12(uint8_t *img, uint32_t width, uint32_t height, uint32_t stride, int64_t *timestamp) {
+void QemuSensor::captureYU12(uint8_t *img, uint32_t width, uint32_t height, uint32_t stride,
+                             int64_t *timestamp) {
     ATRACE_CALL();
     status_t res;
     if (width != (uint32_t)mLastRequestWidth ||
@@ -427,7 +497,6 @@
 
         if (mLastRequestWidth != -1 || mLastRequestHeight != -1) {
             // We only need to stop the camera if this isn't the first request.
-
             // Stop the camera device.
             res = mCameraQemuClient.queryStop();
             if (res == NO_ERROR) {
@@ -481,4 +550,41 @@
     ALOGVV("YUV420 sensor image captured");
 }
 
+void QemuSensor::captureYU12(uint32_t width, uint32_t height, uint32_t stride,
+                             int64_t *timestamp, buffer_handle_t* handle) {
+    ATRACE_CALL();
+    status_t res;
+    if (mLastRequestWidth == -1 || mLastRequestHeight == -1) {
+        uint32_t pixFmt = V4L2_PIX_FMT_YUV420;
+        res = mCameraQemuClient.queryStart();
+        if (res == NO_ERROR) {
+            mLastRequestWidth = width;
+            mLastRequestHeight = height;
+            ALOGV("%s: Qemu camera device '%s' is started for %.4s[%dx%d] frames",
+                    __FUNCTION__, (const char*) mDeviceName,
+                    reinterpret_cast<const char*>(&pixFmt),
+                    mWidth, mHeight);
+            mState = ECDS_STARTED;
+        } else {
+            ALOGE("%s: Unable to start device '%s' for %.4s[%dx%d] frames",
+                    __FUNCTION__, (const char*) mDeviceName,
+                    reinterpret_cast<const char*>(&pixFmt),
+                    mWidth, mHeight);
+            return;
+        }
+    }
+    if (width != stride) {
+        ALOGW("%s: expect stride (%d), actual stride (%d)", __FUNCTION__,
+              width, stride);
+    }
+
+    float whiteBalance[] = {1.0f, 1.0f, 1.0f};
+    float exposureCompensation = 1.0f;
+    uint64_t offset = GrallocModule::getInstance().getOffset(*handle);
+    mCameraQemuClient.queryFrame(width, height, V4L2_PIX_FMT_YUV420, offset,
+                                 whiteBalance[0], whiteBalance[1], whiteBalance[2],
+                                 exposureCompensation, timestamp);
+    ALOGVV("YUV420 sensor image captured");
+}
+
 }; // end of namespace android
diff --git a/camera/qemu-pipeline3/QemuSensor.h b/camera/qemu-pipeline3/QemuSensor.h
index a39d3e5..bb64648 100644
--- a/camera/qemu-pipeline3/QemuSensor.h
+++ b/camera/qemu-pipeline3/QemuSensor.h
@@ -123,6 +123,8 @@
     static const int32_t kSensitivityRange[2];
     static const uint32_t kDefaultSensitivity;
 
+    static const char kHostCameraVerString[];
+
   private:
     int32_t mLastRequestWidth, mLastRequestHeight;
 
@@ -169,6 +171,7 @@
 
     // Time of sensor startup (used for simulation zero-time point).
     nsecs_t mStartupTime;
+    int32_t mHostCameraVer;
 
   private:
     /*
@@ -186,12 +189,16 @@
     nsecs_t mNextCaptureTime;
     Buffers *mNextCapturedBuffers;
 
+    void captureRGBA(uint32_t width, uint32_t height, uint32_t stride,
+                     int64_t *timestamp, buffer_handle_t* handle);
+    void captureYU12(uint32_t width, uint32_t height, uint32_t stride,
+                     int64_t *timestamp, buffer_handle_t* handle);
     void captureRGBA(uint8_t *img, uint32_t width, uint32_t height,
-            uint32_t stride, int64_t *timestamp);
-    void captureRGB(uint8_t *img, uint32_t width, uint32_t height,
-            uint32_t stride, int64_t *timestamp);
+                     uint32_t stride, int64_t *timestamp);
     void captureYU12(uint8_t *img, uint32_t width, uint32_t height,
-            uint32_t stride, int64_t *timestamp);
+                     uint32_t stride, int64_t *timestamp);
+    void captureRGB(uint8_t *img, uint32_t width, uint32_t height,
+                    uint32_t stride, int64_t *timestamp);
 };
 
 }; // end of namespace android