EmulatedFakeCamera2: Add opaque stream output support

- Enables basic preview
- Opaque is RGBA for emulator
- Still only 1 stream max
- Preparations for YUV output support

Bug: 6243944
Change-Id: I11cb494d72c7c2ec5f542c79f1aa15d9a3ce00bf
diff --git a/tools/emulator/system/camera/EmulatedFakeCamera2.cpp b/tools/emulator/system/camera/EmulatedFakeCamera2.cpp
index 2e4259e..633ad83 100644
--- a/tools/emulator/system/camera/EmulatedFakeCamera2.cpp
+++ b/tools/emulator/system/camera/EmulatedFakeCamera2.cpp
@@ -236,6 +236,9 @@
             ALOGE("%s: Format 0x%x is not supported", __FUNCTION__, format);
             return BAD_VALUE;
         }
+    } else {
+        // Emulator's opaque format is RGBA
+        format = HAL_PIXEL_FORMAT_RGBA_8888;
     }
 
     const uint32_t *availableSizes;
@@ -263,13 +266,12 @@
     // TODO: Generalize below to work for variable types of streams, etc.
     // Currently only correct for raw sensor format, sensor resolution.
 
-    ALOG_ASSERT(format == HAL_PIXEL_FORMAT_RAW_SENSOR,
-            "%s: TODO: Only supporting raw sensor format right now", __FUNCTION__);
     ALOG_ASSERT(width == Sensor::kResolution[0],
             "%s: TODO: Only supporting raw sensor size right now", __FUNCTION__);
     ALOG_ASSERT(height == Sensor::kResolution[1],
             "%s: TODO: Only supporting raw sensor size right now", __FUNCTION__);
 
+    mStreamFormat = format;
     mRawStreamOps = stream_ops;
 
     *stream_id = mNextStreamId;
@@ -496,7 +498,8 @@
                 ANDROID_REQUEST_FRAME_COUNT,
                 &e);
         if (res != NO_ERROR) {
-            ALOGE("%s: error reading frame count tag", __FUNCTION__);
+            ALOGE("%s: error reading frame count tag: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
             mParent->signalError();
             return false;
         }
@@ -506,7 +509,8 @@
                 ANDROID_SENSOR_EXPOSURE_TIME,
                 &e);
         if (res != NO_ERROR) {
-            ALOGE("%s: error reading exposure time tag", __FUNCTION__);
+            ALOGE("%s: error reading exposure time tag: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
             mParent->signalError();
             return false;
         }
@@ -587,7 +591,8 @@
             mParent->signalError();
             return false;
         }
-        mParent->mSensor->setDestinationBuffer(img, mNextBufferStride);
+        mParent->mSensor->setDestinationBuffer(img, mParent->mStreamFormat,
+                mNextBufferStride);
         mParent->mReadoutThread->setNextCapture(mRequest, mNextBuffer);
 
         mRequest = NULL;
@@ -1089,8 +1094,15 @@
     static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_NONE;
     ADD_OR_SIZE(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
 
+    static const int32_t id = 0;
+    ADD_OR_SIZE(ANDROID_REQUEST_ID, &id, 1);
+
+    static const int32_t frameCount = 0;
+    ADD_OR_SIZE(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
+
     // OUTPUT_STREAMS set by user
-    // FRAME_COUNT set by user
+    entryCount += 1;
+    dataCount += 5; // TODO: Should be maximum stream number
 
     /** android.lens */
 
diff --git a/tools/emulator/system/camera/EmulatedFakeCamera2.h b/tools/emulator/system/camera/EmulatedFakeCamera2.h
index 0005687..eba1a8e 100644
--- a/tools/emulator/system/camera/EmulatedFakeCamera2.h
+++ b/tools/emulator/system/camera/EmulatedFakeCamera2.h
@@ -246,6 +246,7 @@
     /** Stream manipulation */
     uint32_t mNextStreamId;
     const camera2_stream_ops_t *mRawStreamOps;
+    uint32_t mStreamFormat;
 
     /** Simulated hardware interfaces */
     sp<Sensor> mSensor;
diff --git a/tools/emulator/system/camera/fake-pipeline2/Scene.cpp b/tools/emulator/system/camera/fake-pipeline2/Scene.cpp
index aae57f7..ca50350 100644
--- a/tools/emulator/system/camera/fake-pipeline2/Scene.cpp
+++ b/tools/emulator/system/camera/fake-pipeline2/Scene.cpp
@@ -17,6 +17,7 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "EmulatedCamera_Scene"
 #include <utils/Log.h>
+#include <stdlib.h>
 
 #include "Scene.h"
 
@@ -27,17 +28,17 @@
 
 // Define single-letter shortcuts for scene definition, for directly indexing
 // mCurrentColors
-#define G Scene::GRASS*4
-#define S Scene::GRASS_SHADOW*4
-#define H Scene::HILL*4
-#define W Scene::WALL*4
-#define R Scene::ROOF*4
-#define D Scene::DOOR*4
-#define C Scene::CHIMNEY*4
-#define I Scene::WINDOW*4
-#define U Scene::SUN*4
-#define K Scene::SKY*4
-#define M Scene::MOON*4
+#define G (Scene::GRASS * Scene::NUM_CHANNELS)
+#define S (Scene::GRASS_SHADOW * Scene::NUM_CHANNELS)
+#define H (Scene::HILL * Scene::NUM_CHANNELS)
+#define W (Scene::WALL * Scene::NUM_CHANNELS)
+#define R (Scene::ROOF * Scene::NUM_CHANNELS)
+#define D (Scene::DOOR * Scene::NUM_CHANNELS)
+#define C (Scene::CHIMNEY * Scene::NUM_CHANNELS)
+#define I (Scene::WINDOW * Scene::NUM_CHANNELS)
+#define U (Scene::SUN * Scene::NUM_CHANNELS)
+#define K (Scene::SKY * Scene::NUM_CHANNELS)
+#define M (Scene::MOON * Scene::NUM_CHANNELS)
 
 const int Scene::kSceneWidth = 20;
 const int Scene::kSceneHeight = 20;
@@ -91,9 +92,9 @@
 {
     // Map scene to sensor pixels
     if (mSensorWidth > mSensorHeight) {
-        mMapDiv = (mSensorWidth / kSceneWidth) + 1;
+        mMapDiv = (mSensorWidth / (kSceneWidth + 1) ) + 1;
     } else {
-        mMapDiv = (mSensorHeight / kSceneHeight) + 1;
+        mMapDiv = (mSensorHeight / (kSceneHeight + 1) ) + 1;
     }
     mOffsetX = (kSceneWidth * mMapDiv - mSensorWidth) / 2;
     mOffsetY = (kSceneHeight * mMapDiv - mSensorHeight) / 2;
@@ -103,6 +104,8 @@
     mFilterGr[0] = -0.9689f; mFilterGr[1] =  1.8758f; mFilterGr[2] =  0.0415f;
     mFilterGb[0] = -0.9689f; mFilterGb[1] =  1.8758f; mFilterGb[2] =  0.0415f;
     mFilterB[0]  =  0.0557f; mFilterB[1]  = -0.2040f; mFilterB[2]  =  1.0570f;
+
+
 }
 
 Scene::~Scene() {
@@ -290,48 +293,53 @@
         ALOGV("Mat %d XYZ: %f, %f, %f", i, matXYZ[0], matXYZ[1], matXYZ[2]);
         float luxToElectrons = mSensorSensitivity * mExposureDuration /
                 (kAperture * kAperture);
-        mCurrentColors[i*4 + 0] =
+        mCurrentColors[i*NUM_CHANNELS + 0] =
                 (mFilterR[0] * matXYZ[0] +
                  mFilterR[1] * matXYZ[1] +
                  mFilterR[2] * matXYZ[2])
                 * luxToElectrons;
-        mCurrentColors[i*4 + 1] =
+        mCurrentColors[i*NUM_CHANNELS + 1] =
                 (mFilterGr[0] * matXYZ[0] +
                  mFilterGr[1] * matXYZ[1] +
                  mFilterGr[2] * matXYZ[2])
                 * luxToElectrons;
-        mCurrentColors[i*4 + 2] =
+        mCurrentColors[i*NUM_CHANNELS + 2] =
                 (mFilterGb[0] * matXYZ[0] +
                  mFilterGb[1] * matXYZ[1] +
                  mFilterGb[2] * matXYZ[2])
                 * luxToElectrons;
-        mCurrentColors[i*4 + 3] =
+        mCurrentColors[i*NUM_CHANNELS + 3] =
                 (mFilterB[0] * matXYZ[0] +
                  mFilterB[1] * matXYZ[1] +
                  mFilterB[2] * matXYZ[2])
                 * luxToElectrons;
+
         ALOGV("Color %d RGGB: %d, %d, %d, %d", i,
-                mCurrentColors[i*4 + 0],
-                mCurrentColors[i*4 + 1],
-                mCurrentColors[i*4 + 2],
-                mCurrentColors[i*4 + 3]);
+                mCurrentColors[i*NUM_CHANNELS + 0],
+                mCurrentColors[i*NUM_CHANNELS + 1],
+                mCurrentColors[i*NUM_CHANNELS + 2],
+                mCurrentColors[i*NUM_CHANNELS + 3]);
     }
+    // Shake viewpoint
+    mHandshakeX = rand() % mMapDiv/4 - mMapDiv/8;
+    mHandshakeY = rand() % mMapDiv/4 - mMapDiv/8;
+    // Set starting pixel
     setReadoutPixel(0,0);
 }
 
 void Scene::setReadoutPixel(int x, int y) {
     mCurrentX = x;
     mCurrentY = y;
-    mSubX = (x + mOffsetY) % mMapDiv;
-    mSubY = (y + mOffsetX) % mMapDiv;
-    mSceneX = (x + mOffsetX) / mMapDiv;
-    mSceneY = (y + mOffsetY) / mMapDiv;
+    mSubX = (x + mOffsetX + mHandshakeX) % mMapDiv;
+    mSubY = (y + mOffsetY + mHandshakeY) % mMapDiv;
+    mSceneX = (x + mOffsetX + mHandshakeX) / mMapDiv;
+    mSceneY = (y + mOffsetY + mHandshakeY) / mMapDiv;
     mSceneIdx = mSceneY * kSceneWidth + mSceneX;
     mCurrentSceneMaterial = &(mCurrentColors[kScene[mSceneIdx]]);
 }
 
-uint32_t Scene::getPixelElectrons(int x, int y, int c) {
-    uint32_t e = mCurrentSceneMaterial[c];
+const uint32_t* Scene::getPixelElectrons() {
+    const uint32_t *pixel = mCurrentSceneMaterial;
     mCurrentX++;
     mSubX++;
     if (mCurrentX >= mSensorWidth) {
@@ -345,9 +353,16 @@
         mCurrentSceneMaterial = &(mCurrentColors[kScene[mSceneIdx]]);
         mSubX = 0;
     }
-    return e;
+    return pixel;
 }
 
+// RGB->YUV, Jpeg standard
+const float Scene::kRgb2Yuv[12] = {
+       0.299f,    0.587f,    0.114f,    0.f,
+    -0.16874f, -0.33126f,      0.5f, -128.f,
+         0.5f, -0.41869f, -0.08131f, -128.f,
+};
+
 // Aperture of imaging lens
 const float Scene::kAperture = 2.8;
 
diff --git a/tools/emulator/system/camera/fake-pipeline2/Scene.h b/tools/emulator/system/camera/fake-pipeline2/Scene.h
index 89edaed..687e427 100644
--- a/tools/emulator/system/camera/fake-pipeline2/Scene.h
+++ b/tools/emulator/system/camera/fake-pipeline2/Scene.h
@@ -67,10 +67,21 @@
     void setReadoutPixel(int x, int y);
 
     // Get sensor response in physical units (electrons) for light hitting the
-    // current readout pixel, after passing through color filters. The color
-    // channels are 0=R, 1=Gr, 2=Gb, 3=B. The readout pixel will be
-    // auto-incremented.
-    uint32_t getPixelElectrons(int x, int y, int c);
+    // current readout pixel, after passing through color filters. The readout
+    // pixel will be auto-incremented. The returned array can be indexed with
+    // ColorChannels.
+    const uint32_t* getPixelElectrons();
+
+    enum ColorChannels {
+        R = 0,
+        Gr,
+        Gb,
+        B,
+        Y,
+        Cb,
+        Cr,
+        NUM_CHANNELS
+    };
 
   private:
     // Sensor color filtering coefficients in XYZ
@@ -82,6 +93,8 @@
     int mOffsetX, mOffsetY;
     int mMapDiv;
 
+    int mHandshakeX, mHandshakeY;
+
     int mSensorWidth;
     int mSensorHeight;
     int mCurrentX;
@@ -112,12 +125,15 @@
         NUM_MATERIALS
     };
 
-    uint32_t mCurrentColors[NUM_MATERIALS*4];
+    uint32_t mCurrentColors[NUM_MATERIALS*NUM_CHANNELS];
 
     /**
      * Constants for scene definition. These are various degrees of approximate.
      */
 
+    // RGB->YUV conversion
+    static const float kRgb2Yuv[12];
+
     // Aperture of imaging lens
     static const float kAperture;
 
diff --git a/tools/emulator/system/camera/fake-pipeline2/Sensor.cpp b/tools/emulator/system/camera/fake-pipeline2/Sensor.cpp
index 7f81c85..7ce6dab 100644
--- a/tools/emulator/system/camera/fake-pipeline2/Sensor.cpp
+++ b/tools/emulator/system/camera/fake-pipeline2/Sensor.cpp
@@ -15,7 +15,15 @@
  */
 
 //#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0
 #define LOG_TAG "EmulatedCamera2_Sensor"
+
+#ifdef LOG_NNDEBUG
+#define ALOGVV(...) ALOGV(__VA_ARGS__)
+#else
+#define ALOGVV(...) ((void)0)
+#endif
+
 #include <utils/Log.h>
 
 #include "Sensor.h"
@@ -108,9 +116,10 @@
 }
 
 status_t Sensor::startUp() {
+    ALOGV("%s: E", __FUNCTION__);
+
     int res;
     mCapturedBuffer = NULL;
-
     res = readyToRun();
     if (res != OK) {
         ALOGE("Unable to prepare sensor capture thread to run: %d", res);
@@ -126,6 +135,8 @@
 }
 
 status_t Sensor::shutDown() {
+    ALOGV("%s: E", __FUNCTION__);
+
     int res;
     res = requestExitAndWait();
     if (res != OK) {
@@ -140,25 +151,27 @@
 
 void Sensor::setExposureTime(uint64_t ns) {
     Mutex::Autolock lock(mControlMutex);
-    ALOGV("Exposure set to %f", ns/1000000.f);
+    ALOGVV("Exposure set to %f", ns/1000000.f);
     mExposureTime = ns;
 }
 
 void Sensor::setFrameDuration(uint64_t ns) {
     Mutex::Autolock lock(mControlMutex);
-    ALOGV("Frame duration set to %f", ns/1000000.f);
+    ALOGVV("Frame duration set to %f", ns/1000000.f);
     mFrameDuration = ns;
 }
 
 void Sensor::setSensitivity(uint32_t gain) {
     Mutex::Autolock lock(mControlMutex);
-    ALOGV("Gain set to %d", gain);
+    ALOGVV("Gain set to %d", gain);
     mGainFactor = gain;
 }
 
-void Sensor::setDestinationBuffer(uint8_t *buffer, uint32_t stride) {
+void Sensor::setDestinationBuffer(uint8_t *buffer,
+        uint32_t format, uint32_t stride) {
     Mutex::Autolock lock(mControlMutex);
     mNextBuffer = buffer;
+    mNextBufferFmt = format;
     mNextStride = stride;
 }
 
@@ -217,6 +230,7 @@
     uint64_t frameDuration;
     uint32_t gain;
     uint8_t *nextBuffer;
+    uint32_t nextBufferFmt;
     uint32_t stride;
     {
         Mutex::Autolock lock(mControlMutex);
@@ -224,12 +238,13 @@
         frameDuration    = mFrameDuration;
         gain             = mGainFactor;
         nextBuffer       = mNextBuffer;
+        nextBufferFmt    = mNextBufferFmt;
         stride           = mNextStride;
         // Don't reuse a buffer
         mNextBuffer = NULL;
 
         // Signal VSync for start of readout
-        ALOGV("Sensor VSync");
+        ALOGVV("Sensor VSync");
         mGotVSync = true;
         mVSync.signal();
     }
@@ -248,7 +263,7 @@
             kRowReadoutTime * kResolution[1];
 
     if (mNextCapturedBuffer != NULL) {
-        ALOGV("Sensor starting readout");
+        ALOGVV("Sensor starting readout");
         // Pretend we're doing readout now; will signal once enough time has elapsed
         capturedBuffer = mNextCapturedBuffer;
         captureTime    = mNextCaptureTime;
@@ -263,66 +278,30 @@
     mNextCapturedBuffer = nextBuffer;
 
     if (mNextCapturedBuffer != NULL) {
-        ALOGV("Sensor capturing image (%d x %d) stride %d",
+        ALOGVV("Sensor capturing image (%d x %d) stride %d",
                 kResolution[0], kResolution[1], stride);
-        ALOGV("Exposure: %f ms, gain: %d", (float)exposureDuration/1e6, gain);
+        ALOGVV("Exposure: %f ms, gain: %d", (float)exposureDuration/1e6, gain);
         mScene.setExposureDuration((float)exposureDuration/1e9);
         mScene.calculateScene(mNextCaptureTime);
 
-        float totalGain = gain/100.0 * kBaseGainFactor;
-        float noiseVarGain =  totalGain * totalGain;
-        float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
-                + kReadNoiseVarAfterGain;
-
-        int bayerSelect[4] = {0, 1, 2, 3}; // RGGB
-
-        for (unsigned int y = 0; y < kResolution[1]; y++ ) {
-            int *bayerRow = bayerSelect + (y & 0x1) * 2;
-            uint16_t *px = (uint16_t*)mNextCapturedBuffer + y * stride;
-            for (unsigned int x = 0; x < kResolution[0]; x++) {
-                uint32_t electronCount;
-                electronCount = mScene.getPixelElectrons(x, y, bayerRow[x & 0x1]);
-
-                // TODO: Better pixel saturation curve?
-                electronCount = (electronCount < kSaturationElectrons) ?
-                        electronCount : kSaturationElectrons;
-
-                // TODO: Better A/D saturation curve?
-                uint16_t rawCount = electronCount * totalGain;
-                rawCount = (rawCount < kMaxRawValue) ? rawCount : kMaxRawValue;
-
-                // Calculate noise value
-                // TODO: Use more-correct Gaussian instead of uniform noise
-                float photonNoiseVar = electronCount * noiseVarGain;
-                float noiseStddev = sqrtf_approx(readNoiseVar + photonNoiseVar);
-                // Scaled to roughly match gaussian/uniform noise stddev
-                float noiseSample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25;
-
-                rawCount += kBlackLevel;
-                rawCount += noiseStddev * noiseSample;
-
-                *px++ = rawCount;
-            }
-            simulatedTime += kRowReadoutTime;
-
-            // If enough time has elapsed to complete readout, signal done frame
-            // Only check every so often, though
-            if ((capturedBuffer != NULL) &&
-                    ((y & 63) == 0) &&
-                    (systemTime() >= frameReadoutEndRealTime) ) {
-                ALOGV("Sensor readout complete");
-                Mutex::Autolock lock(mReadoutMutex);
-                mCapturedBuffer = capturedBuffer;
-                mCaptureTime = captureTime;
-                mReadoutComplete.signal();
-                capturedBuffer = NULL;
-            }
+        switch(nextBufferFmt) {
+            case HAL_PIXEL_FORMAT_RAW_SENSOR:
+                captureRaw(gain, stride, &capturedBuffer,
+                        captureTime, frameEndRealTime);
+                break;
+            case HAL_PIXEL_FORMAT_RGBA_8888:
+                captureRGBA(gain, stride, &capturedBuffer,
+                        captureTime, frameEndRealTime);
+                break;
+            default:
+                ALOGE("%s: Unknown format %x, no output", __FUNCTION__,
+                        nextBufferFmt);
+                break;
         }
-        ALOGV("Sensor image captured");
     }
     // No capture done, or finished image generation before readout was completed
     if (capturedBuffer != NULL) {
-        ALOGV("Sensor readout complete");
+        ALOGVV("Sensor readout complete");
         Mutex::Autolock lock(mReadoutMutex);
         mCapturedBuffer = capturedBuffer;
         mCaptureTime = captureTime;
@@ -330,7 +309,7 @@
         capturedBuffer = NULL;
     }
 
-    ALOGV("Sensor vertical blanking interval");
+    ALOGVV("Sensor vertical blanking interval");
     nsecs_t workDoneRealTime = systemTime();
     const nsecs_t timeAccuracy = 2e6; // 2 ms of imprecision is ok
     if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
@@ -344,10 +323,106 @@
         } while (ret != 0);
     }
     nsecs_t endRealTime = systemTime();
-    ALOGV("Frame cycle took %d ms, target %d ms",
+    ALOGVV("Frame cycle took %d ms, target %d ms",
             (int)((endRealTime - startRealTime)/1000000),
             (int)(frameDuration / 1000000));
     return true;
 };
 
+void Sensor::captureRaw(uint32_t gain, uint32_t stride,
+        uint8_t **capturedBuffer, nsecs_t captureTime, nsecs_t frameReadoutTime) {
+    float totalGain = gain/100.0 * kBaseGainFactor;
+    float noiseVarGain =  totalGain * totalGain;
+    float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
+            + kReadNoiseVarAfterGain;
+
+    int bayerSelect[4] = {Scene::R, Scene::Gr, Scene::Gb, Scene::B}; // RGGB
+
+    for (unsigned int y = 0; y < kResolution[1]; y++ ) {
+        int *bayerRow = bayerSelect + (y & 0x1) * 2;
+        uint16_t *px = (uint16_t*)mNextCapturedBuffer + y * stride;
+        for (unsigned int x = 0; x < kResolution[0]; x++) {
+            uint32_t electronCount;
+            electronCount = mScene.getPixelElectrons()[bayerRow[x & 0x1]];
+
+            // TODO: Better pixel saturation curve?
+            electronCount = (electronCount < kSaturationElectrons) ?
+                    electronCount : kSaturationElectrons;
+
+            // TODO: Better A/D saturation curve?
+            uint16_t rawCount = electronCount * totalGain;
+            rawCount = (rawCount < kMaxRawValue) ? rawCount : kMaxRawValue;
+
+            // Calculate noise value
+            // TODO: Use more-correct Gaussian instead of uniform noise
+            float photonNoiseVar = electronCount * noiseVarGain;
+            float noiseStddev = sqrtf_approx(readNoiseVar + photonNoiseVar);
+            // Scaled to roughly match gaussian/uniform noise stddev
+            float noiseSample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25;
+
+            rawCount += kBlackLevel;
+            rawCount += noiseStddev * noiseSample;
+
+            *px++ = rawCount;
+        }
+        // TODO: Handle this better
+        //simulatedTime += kRowReadoutTime;
+
+        // If enough time has elapsed to complete readout, signal done frame
+        // Only check every so often, though
+        if ((*capturedBuffer != NULL) &&
+                ((y & 63) == 0) &&
+                (systemTime() >= frameReadoutTime) ) {
+            ALOGV("Sensor readout complete");
+            Mutex::Autolock lock(mReadoutMutex);
+            mCapturedBuffer = *capturedBuffer;
+            mCaptureTime = captureTime;
+            mReadoutComplete.signal();
+            *capturedBuffer = NULL;
+        }
+    }
+    ALOGVV("Raw sensor image captured");
+}
+
+void Sensor::captureRGBA(uint32_t gain, uint32_t stride,
+        uint8_t **capturedBuffer, nsecs_t captureTime, nsecs_t frameReadoutTime) {
+    float totalGain = gain/100.0 * kBaseGainFactor;
+    float noiseVarGain =  totalGain * totalGain;
+    float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
+            + kReadNoiseVarAfterGain;
+
+    for (unsigned int y = 0; y < kResolution[1]; y++ ) {
+        uint8_t *px = (uint8_t*)mNextCapturedBuffer + y * stride * 4;
+        for (unsigned int x = 0; x < kResolution[0]; x++) {
+            uint32_t rCount, gCount, bCount;
+            // TODO: Perfect demosaicing is a cheat
+            const uint32_t *pixel = mScene.getPixelElectrons();
+            rCount = pixel[Scene::R]  * totalGain;
+            gCount = pixel[Scene::Gr] * totalGain;
+            bCount = pixel[Scene::B]  * totalGain;
+
+            *px++ = rCount / (kMaxRawValue / 255);
+            *px++ = gCount / (kMaxRawValue / 255);
+            *px++ = bCount / (kMaxRawValue / 255);
+            *px++ = 255;
+        }
+        // TODO: Handle this better
+        //simulatedTime += kRowReadoutTime;
+
+        // If enough time has elapsed to complete readout, signal done frame
+        // Only check every so often, though
+        if ((*capturedBuffer != NULL) &&
+                ((y & 63) == 0) &&
+                (systemTime() >= frameReadoutTime) ) {
+            ALOGV("Sensor readout complete");
+            Mutex::Autolock lock(mReadoutMutex);
+            mCapturedBuffer = *capturedBuffer;
+            mCaptureTime = captureTime;
+            mReadoutComplete.signal();
+            *capturedBuffer = NULL;
+        }
+    }
+    ALOGVV("RGBA sensor image captured");
+}
+
 } // namespace android
diff --git a/tools/emulator/system/camera/fake-pipeline2/Sensor.h b/tools/emulator/system/camera/fake-pipeline2/Sensor.h
index 565d10a..50ec2b7 100644
--- a/tools/emulator/system/camera/fake-pipeline2/Sensor.h
+++ b/tools/emulator/system/camera/fake-pipeline2/Sensor.h
@@ -107,7 +107,7 @@
     void setFrameDuration(uint64_t ns);
     void setSensitivity(uint32_t gain);
     // Buffer must be at least stride*height*2 bytes in size
-    void setDestinationBuffer(uint8_t *buffer, uint32_t stride);
+    void setDestinationBuffer(uint8_t *buffer, uint32_t format, uint32_t stride);
 
     /*
      * Controls that cause reconfiguration delay
@@ -178,7 +178,9 @@
     uint64_t  mFrameDuration;
     uint32_t  mGainFactor;
     uint8_t  *mNextBuffer;
+    uint32_t  mNextBufferFmt;
     uint32_t  mNextStride;
+
     // End of control parameters
 
     Mutex mReadoutMutex; // Lock before accessing readout variables
@@ -204,6 +206,15 @@
     uint8_t *mNextCapturedBuffer;
 
     Scene mScene;
+
+    void captureRaw(uint32_t gain, uint32_t stride,
+            uint8_t **capturedBuffer, nsecs_t captureTime,
+            nsecs_t frameReadoutTime);
+
+    void captureRGBA(uint32_t gain, uint32_t stride,
+            uint8_t **capturedBuffer, nsecs_t captureTime,
+            nsecs_t frameReadoutTime);
+
 };
 
 }