EmulatedFakeCamera2: Add precapture trigger and reprocess support

- Manage the precapture protocol with trigger/AE notifications
- Support reprocessing from opaque format to JPEG
- Add a bit of exposure variation to scene

Bug: 6243944

Change-Id: Ic88f8f0ea641ddaf3e921849caf79dd3838665f8
diff --git a/tools/emulator/system/camera/EmulatedCamera2.cpp b/tools/emulator/system/camera/EmulatedCamera2.cpp
index fa7ee4d..bbc1740 100644
--- a/tools/emulator/system/camera/EmulatedCamera2.cpp
+++ b/tools/emulator/system/camera/EmulatedCamera2.cpp
@@ -162,6 +162,13 @@
     return INVALID_OPERATION;
 }
 
+int EmulatedCamera2::allocateReprocessStreamFromStream(
+        uint32_t output_stream_id,
+        const camera2_stream_in_ops_t *reprocess_stream_ops,
+        uint32_t *stream_id) {
+    return INVALID_OPERATION;
+}
+
 int EmulatedCamera2::releaseReprocessStream(uint32_t stream_id) {
     return INVALID_OPERATION;
 }
@@ -288,6 +295,17 @@
             reprocess_stream_ops, stream_id, consumer_usage, max_buffers);
 }
 
+int EmulatedCamera2::allocate_reprocess_stream_from_stream(
+            const camera2_device_t *d,
+            uint32_t output_stream_id,
+            const camera2_stream_in_ops_t *reprocess_stream_ops,
+            uint32_t *stream_id) {
+    EmulatedCamera2* ec = getInstance(d);
+    return ec->allocateReprocessStreamFromStream(output_stream_id,
+            reprocess_stream_ops, stream_id);
+}
+
+
 int EmulatedCamera2::release_reprocess_stream(const camera2_device_t *d,
         uint32_t stream_id) {
     EmulatedCamera2* ec = getInstance(d);
@@ -379,6 +397,7 @@
     EmulatedCamera2::register_stream_buffers,
     EmulatedCamera2::release_stream,
     EmulatedCamera2::allocate_reprocess_stream,
+    EmulatedCamera2::allocate_reprocess_stream_from_stream,
     EmulatedCamera2::release_reprocess_stream,
     EmulatedCamera2::trigger_action,
     EmulatedCamera2::set_notify_callback,
diff --git a/tools/emulator/system/camera/EmulatedCamera2.h b/tools/emulator/system/camera/EmulatedCamera2.h
index a294454..755ed0e 100644
--- a/tools/emulator/system/camera/EmulatedCamera2.h
+++ b/tools/emulator/system/camera/EmulatedCamera2.h
@@ -126,6 +126,11 @@
             uint32_t *consumer_usage,
             uint32_t *max_buffers);
 
+    virtual int allocateReprocessStreamFromStream(
+            uint32_t output_stream_id,
+            const camera2_stream_in_ops_t *reprocess_stream_ops,
+            uint32_t *stream_id);
+
     virtual int releaseReprocessStream(uint32_t stream_id);
 
     /** 3A action triggering */
@@ -197,6 +202,11 @@
             uint32_t *consumer_usage,
             uint32_t *max_buffers);
 
+    static int allocate_reprocess_stream_from_stream(const camera2_device_t *,
+            uint32_t output_stream_id,
+            const camera2_stream_in_ops_t *reprocess_stream_ops,
+            uint32_t *stream_id);
+
     static int release_reprocess_stream(const camera2_device_t *,
             uint32_t stream_id);
 
diff --git a/tools/emulator/system/camera/EmulatedFakeCamera2.cpp b/tools/emulator/system/camera/EmulatedFakeCamera2.cpp
index 08475b1..bc1517e 100644
--- a/tools/emulator/system/camera/EmulatedFakeCamera2.cpp
+++ b/tools/emulator/system/camera/EmulatedFakeCamera2.cpp
@@ -31,6 +31,10 @@
 
 namespace android {
 
+const int64_t USEC = 1000LL;
+const int64_t MSEC = USEC * 1000LL;
+const int64_t SEC = MSEC * 1000LL;
+
 const uint32_t EmulatedFakeCamera2::kAvailableFormats[4] = {
         HAL_PIXEL_FORMAT_RAW_SENSOR,
         HAL_PIXEL_FORMAT_BLOB,
@@ -118,10 +122,12 @@
     }
     if (res != OK) return res;
 
-    mNextStreamId = 0;
+    mNextStreamId = 1;
+    mNextReprocessStreamId = 1;
     mRawStreamCount = 0;
     mProcessedStreamCount = 0;
     mJpegStreamCount = 0;
+    mReprocessStreamCount = 0;
 
     return NO_ERROR;
 }
@@ -140,7 +146,8 @@
     mSensor = new Sensor(this);
     mJpegCompressor = new JpegCompressor(this);
 
-    mNextStreamId = 0;
+    mNextStreamId = 1;
+    mNextReprocessStreamId = 1;
 
     res = mSensor->startUp();
     if (res != NO_ERROR) return res;
@@ -435,6 +442,69 @@
     return NO_ERROR;
 }
 
+int EmulatedFakeCamera2::allocateReprocessStreamFromStream(
+        uint32_t output_stream_id,
+        const camera2_stream_in_ops_t *stream_ops,
+        uint32_t *stream_id) {
+    Mutex::Autolock l(mMutex);
+
+    ssize_t baseStreamIndex = mStreams.indexOfKey(output_stream_id);
+    if (baseStreamIndex < 0) {
+        ALOGE("%s: Unknown output stream id %d!", __FUNCTION__, output_stream_id);
+        return BAD_VALUE;
+    }
+
+    const Stream &baseStream = mStreams[baseStreamIndex];
+
+    // We'll reprocess anything we produced
+
+    if (mReprocessStreamCount >= kMaxReprocessStreamCount) {
+        ALOGE("%s: Cannot allocate another reprocess stream (%d already allocated)",
+                __FUNCTION__, mReprocessStreamCount);
+        return INVALID_OPERATION;
+    }
+    mReprocessStreamCount++;
+
+    ReprocessStream newStream;
+    newStream.ops = stream_ops;
+    newStream.width = baseStream.width;
+    newStream.height = baseStream.height;
+    newStream.format = baseStream.format;
+    newStream.stride = baseStream.stride;
+    newStream.sourceStreamId = output_stream_id;
+
+    *stream_id = mNextReprocessStreamId;
+    mReprocessStreams.add(mNextReprocessStreamId, newStream);
+
+    ALOGV("Reprocess stream allocated: %d: %d, %d, 0x%x. Parent stream: %d",
+            *stream_id, newStream.width, newStream.height, newStream.format,
+            output_stream_id);
+
+    mNextReprocessStreamId++;
+    return NO_ERROR;
+}
+
+int EmulatedFakeCamera2::releaseReprocessStream(uint32_t stream_id) {
+    Mutex::Autolock l(mMutex);
+
+    ssize_t streamIndex = mReprocessStreams.indexOfKey(stream_id);
+    if (streamIndex < 0) {
+        ALOGE("%s: Unknown reprocess stream id %d!", __FUNCTION__, stream_id);
+        return BAD_VALUE;
+    }
+
+    if (isReprocessStreamInUse(stream_id)) {
+        ALOGE("%s: Cannot release reprocessing stream %d; in use!", __FUNCTION__,
+                stream_id);
+        return BAD_VALUE;
+    }
+
+    mReprocessStreamCount--;
+    mReprocessStreams.removeItemsAt(streamIndex);
+
+    return NO_ERROR;
+}
+
 int EmulatedFakeCamera2::triggerAction(uint32_t trigger_id,
         int32_t ext1,
         int32_t ext2) {
@@ -603,7 +673,6 @@
 }
 
 bool EmulatedFakeCamera2::ConfigureThread::threadLoop() {
-    static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms
     status_t res;
 
     // Check if we're currently processing or just waiting
@@ -645,105 +714,32 @@
             Mutex::Autolock lock(mInputMutex);
             mRequestCount++;
         }
-        // Get necessary parameters for sensor config
 
-        mParent->mControlThread->processRequest(mRequest);
-
-        camera_metadata_entry_t streams;
+        camera_metadata_entry_t type;
         res = find_camera_metadata_entry(mRequest,
-                ANDROID_REQUEST_OUTPUT_STREAMS,
-                &streams);
+                ANDROID_REQUEST_TYPE,
+                &type);
         if (res != NO_ERROR) {
-            ALOGE("%s: error reading output stream tag", __FUNCTION__);
+            ALOGE("%s: error reading request type", __FUNCTION__);
             mParent->signalError();
             return false;
         }
-
-        mNextBuffers = new Buffers;
-        mNextNeedsJpeg = false;
-        ALOGV("Configure: Setting up buffers for capture");
-        for (size_t i = 0; i < streams.count; i++) {
-            int streamId = streams.data.u8[i];
-            const Stream &s = mParent->getStreamInfo(streamId);
-            if (s.format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
-                ALOGE("%s: Stream %d does not have a concrete pixel format, but "
-                        "is included in a request!", __FUNCTION__, streamId);
+        bool success = false;;
+        switch (type.data.u8[0]) {
+            case ANDROID_REQUEST_TYPE_CAPTURE:
+                success = setupCapture();
+                break;
+            case ANDROID_REQUEST_TYPE_REPROCESS:
+                success = setupReprocess();
+                break;
+            default:
+                ALOGE("%s: Unexpected request type %d",
+                        __FUNCTION__, type.data.u8[0]);
                 mParent->signalError();
-                return false;
-            }
-            StreamBuffer b;
-            b.streamId = streams.data.u8[i];
-            b.width  = s.width;
-            b.height = s.height;
-            b.format = s.format;
-            b.stride = s.stride;
-            mNextBuffers->push_back(b);
-            ALOGV("Configure:    Buffer %d: Stream %d, %d x %d, format 0x%x, "
-                    "stride %d",
-                    i, b.streamId, b.width, b.height, b.format, b.stride);
-            if (b.format == HAL_PIXEL_FORMAT_BLOB) {
-                mNextNeedsJpeg = true;
-            }
+                break;
         }
+        if (!success) return false;
 
-        camera_metadata_entry_t e;
-        res = find_camera_metadata_entry(mRequest,
-                ANDROID_REQUEST_FRAME_COUNT,
-                &e);
-        if (res != NO_ERROR) {
-            ALOGE("%s: error reading frame count tag: %s (%d)",
-                    __FUNCTION__, strerror(-res), res);
-            mParent->signalError();
-            return false;
-        }
-        mNextFrameNumber = *e.data.i32;
-
-        res = find_camera_metadata_entry(mRequest,
-                ANDROID_SENSOR_EXPOSURE_TIME,
-                &e);
-        if (res != NO_ERROR) {
-            ALOGE("%s: error reading exposure time tag: %s (%d)",
-                    __FUNCTION__, strerror(-res), res);
-            mParent->signalError();
-            return false;
-        }
-        mNextExposureTime = *e.data.i64;
-
-        res = find_camera_metadata_entry(mRequest,
-                ANDROID_SENSOR_FRAME_DURATION,
-                &e);
-        if (res != NO_ERROR) {
-            ALOGE("%s: error reading frame duration tag", __FUNCTION__);
-            mParent->signalError();
-            return false;
-        }
-        mNextFrameDuration = *e.data.i64;
-
-        if (mNextFrameDuration <
-                mNextExposureTime + Sensor::kMinVerticalBlank) {
-            mNextFrameDuration = mNextExposureTime + Sensor::kMinVerticalBlank;
-        }
-        res = find_camera_metadata_entry(mRequest,
-                ANDROID_SENSOR_SENSITIVITY,
-                &e);
-        if (res != NO_ERROR) {
-            ALOGE("%s: error reading sensitivity tag", __FUNCTION__);
-            mParent->signalError();
-            return false;
-        }
-        mNextSensitivity = *e.data.i32;
-
-        res = find_camera_metadata_entry(mRequest,
-                EMULATOR_SCENE_HOUROFDAY,
-                &e);
-        if (res == NO_ERROR) {
-            ALOGV("Setting hour: %d", *e.data.i32);
-            mParent->mSensor->getScene().setHour(*e.data.i32);
-        }
-
-        // Start waiting on readout thread
-        mWaitingForReadout = true;
-        ALOGV("Configure: Waiting for readout thread");
     }
 
     if (mWaitingForReadout) {
@@ -767,49 +763,134 @@
         ALOGV("Configure: Waiting for sensor");
         mNextNeedsJpeg = false;
     }
-    bool vsync = mParent->mSensor->waitForVSync(kWaitPerLoop);
 
+    if (mNextIsCapture) {
+        return configureNextCapture();
+    } else {
+        return configureNextReprocess();
+    }
+}
+
+bool EmulatedFakeCamera2::ConfigureThread::setupCapture() {
+    status_t res;
+
+    mNextIsCapture = true;
+    // Get necessary parameters for sensor config
+    mParent->mControlThread->processRequest(mRequest);
+
+    camera_metadata_entry_t streams;
+    res = find_camera_metadata_entry(mRequest,
+            ANDROID_REQUEST_OUTPUT_STREAMS,
+            &streams);
+    if (res != NO_ERROR) {
+        ALOGE("%s: error reading output stream tag", __FUNCTION__);
+        mParent->signalError();
+        return false;
+    }
+
+    mNextBuffers = new Buffers;
+    mNextNeedsJpeg = false;
+    ALOGV("Configure: Setting up buffers for capture");
+    for (size_t i = 0; i < streams.count; i++) {
+        int streamId = streams.data.u8[i];
+        const Stream &s = mParent->getStreamInfo(streamId);
+        if (s.format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+            ALOGE("%s: Stream %d does not have a concrete pixel format, but "
+                    "is included in a request!", __FUNCTION__, streamId);
+            mParent->signalError();
+            return false;
+        }
+        StreamBuffer b;
+        b.streamId = streams.data.u8[i];
+        b.width  = s.width;
+        b.height = s.height;
+        b.format = s.format;
+        b.stride = s.stride;
+        mNextBuffers->push_back(b);
+        ALOGV("Configure:    Buffer %d: Stream %d, %d x %d, format 0x%x, "
+                "stride %d",
+                i, b.streamId, b.width, b.height, b.format, b.stride);
+        if (b.format == HAL_PIXEL_FORMAT_BLOB) {
+            mNextNeedsJpeg = true;
+        }
+    }
+
+    camera_metadata_entry_t e;
+    res = find_camera_metadata_entry(mRequest,
+            ANDROID_REQUEST_FRAME_COUNT,
+            &e);
+    if (res != NO_ERROR) {
+        ALOGE("%s: error reading frame count tag: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        mParent->signalError();
+        return false;
+    }
+    mNextFrameNumber = *e.data.i32;
+
+    res = find_camera_metadata_entry(mRequest,
+            ANDROID_SENSOR_EXPOSURE_TIME,
+            &e);
+    if (res != NO_ERROR) {
+        ALOGE("%s: error reading exposure time tag: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        mParent->signalError();
+        return false;
+    }
+    mNextExposureTime = *e.data.i64;
+
+    res = find_camera_metadata_entry(mRequest,
+            ANDROID_SENSOR_FRAME_DURATION,
+            &e);
+    if (res != NO_ERROR) {
+        ALOGE("%s: error reading frame duration tag", __FUNCTION__);
+        mParent->signalError();
+        return false;
+    }
+    mNextFrameDuration = *e.data.i64;
+
+    if (mNextFrameDuration <
+            mNextExposureTime + Sensor::kMinVerticalBlank) {
+        mNextFrameDuration = mNextExposureTime + Sensor::kMinVerticalBlank;
+    }
+    res = find_camera_metadata_entry(mRequest,
+            ANDROID_SENSOR_SENSITIVITY,
+            &e);
+    if (res != NO_ERROR) {
+        ALOGE("%s: error reading sensitivity tag", __FUNCTION__);
+        mParent->signalError();
+        return false;
+    }
+    mNextSensitivity = *e.data.i32;
+
+    res = find_camera_metadata_entry(mRequest,
+            EMULATOR_SCENE_HOUROFDAY,
+            &e);
+    if (res == NO_ERROR) {
+        ALOGV("Setting hour: %d", *e.data.i32);
+        mParent->mSensor->getScene().setHour(*e.data.i32);
+    }
+
+    // Start waiting on readout thread
+    mWaitingForReadout = true;
+    ALOGV("Configure: Waiting for readout thread");
+
+    return true;
+}
+
+bool EmulatedFakeCamera2::ConfigureThread::configureNextCapture() {
+    bool vsync = mParent->mSensor->waitForVSync(kWaitPerLoop);
     if (!vsync) return true;
 
     Mutex::Autolock il(mInternalsMutex);
-    ALOGV("Configure: Configuring sensor for frame %d", mNextFrameNumber);
+    ALOGV("Configure: Configuring sensor for capture %d", mNextFrameNumber);
     mParent->mSensor->setExposureTime(mNextExposureTime);
     mParent->mSensor->setFrameDuration(mNextFrameDuration);
     mParent->mSensor->setSensitivity(mNextSensitivity);
 
-    /** Get buffers to fill for this frame */
-    for (size_t i = 0; i < mNextBuffers->size(); i++) {
-        StreamBuffer &b = mNextBuffers->editItemAt(i);
+    getBuffers();
 
-        Stream s = mParent->getStreamInfo(b.streamId);
-        ALOGV("Configure: Dequeing buffer from stream %d", b.streamId);
-        res = s.ops->dequeue_buffer(s.ops, &(b.buffer) );
-        if (res != NO_ERROR || b.buffer == NULL) {
-            ALOGE("%s: Unable to dequeue buffer from stream %d: %s (%d)",
-                    __FUNCTION__, b.streamId, strerror(-res), res);
-            mParent->signalError();
-            return false;
-        }
-
-        /* Lock the buffer from the perspective of the graphics mapper */
-        uint8_t *img;
-        const Rect rect(s.width, s.height);
-
-        res = GraphicBufferMapper::get().lock(*(b.buffer),
-                GRALLOC_USAGE_HW_CAMERA_WRITE,
-                rect, (void**)&(b.img) );
-
-        if (res != NO_ERROR) {
-            ALOGE("%s: grbuffer_mapper.lock failure: %s (%d)",
-                    __FUNCTION__, strerror(-res), res);
-            s.ops->cancel_buffer(s.ops,
-                    b.buffer);
-            mParent->signalError();
-            return false;
-        }
-    }
-    ALOGV("Configure: Done configure for frame %d", mNextFrameNumber);
-    mParent->mReadoutThread->setNextCapture(mRequest, mNextBuffers);
+    ALOGV("Configure: Done configure for capture %d", mNextFrameNumber);
+    mParent->mReadoutThread->setNextOperation(true, mRequest, mNextBuffers);
     mParent->mSensor->setDestinationBuffers(mNextBuffers);
 
     mRequest = NULL;
@@ -821,6 +902,172 @@
     return true;
 }
 
+bool EmulatedFakeCamera2::ConfigureThread::setupReprocess() {
+    status_t res;
+
+    mNextNeedsJpeg = true;
+    mNextIsCapture = false;
+
+    camera_metadata_entry_t reprocessStreams;
+    res = find_camera_metadata_entry(mRequest,
+            ANDROID_REQUEST_INPUT_STREAMS,
+            &reprocessStreams);
+    if (res != NO_ERROR) {
+        ALOGE("%s: error reading output stream tag", __FUNCTION__);
+        mParent->signalError();
+        return false;
+    }
+
+    mNextBuffers = new Buffers;
+
+    ALOGV("Configure: Setting up input buffers for reprocess");
+    for (size_t i = 0; i < reprocessStreams.count; i++) {
+        int streamId = reprocessStreams.data.u8[i];
+        const ReprocessStream &s = mParent->getReprocessStreamInfo(streamId);
+        if (s.format != HAL_PIXEL_FORMAT_RGB_888) {
+            ALOGE("%s: Only ZSL reprocessing supported!",
+                    __FUNCTION__);
+            mParent->signalError();
+            return false;
+        }
+        StreamBuffer b;
+        b.streamId = -streamId;
+        b.width = s.width;
+        b.height = s.height;
+        b.format = s.format;
+        b.stride = s.stride;
+        mNextBuffers->push_back(b);
+    }
+
+    camera_metadata_entry_t streams;
+    res = find_camera_metadata_entry(mRequest,
+            ANDROID_REQUEST_OUTPUT_STREAMS,
+            &streams);
+    if (res != NO_ERROR) {
+        ALOGE("%s: error reading output stream tag", __FUNCTION__);
+        mParent->signalError();
+        return false;
+    }
+
+    ALOGV("Configure: Setting up output buffers for reprocess");
+    for (size_t i = 0; i < streams.count; i++) {
+        int streamId = streams.data.u8[i];
+        const Stream &s = mParent->getStreamInfo(streamId);
+        if (s.format != HAL_PIXEL_FORMAT_BLOB) {
+            // TODO: Support reprocess to YUV
+            ALOGE("%s: Non-JPEG output stream %d for reprocess not supported",
+                    __FUNCTION__, streamId);
+            mParent->signalError();
+            return false;
+        }
+        StreamBuffer b;
+        b.streamId = streams.data.u8[i];
+        b.width  = s.width;
+        b.height = s.height;
+        b.format = s.format;
+        b.stride = s.stride;
+        mNextBuffers->push_back(b);
+        ALOGV("Configure:    Buffer %d: Stream %d, %d x %d, format 0x%x, "
+                "stride %d",
+                i, b.streamId, b.width, b.height, b.format, b.stride);
+    }
+
+    camera_metadata_entry_t e;
+    res = find_camera_metadata_entry(mRequest,
+            ANDROID_REQUEST_FRAME_COUNT,
+            &e);
+    if (res != NO_ERROR) {
+        ALOGE("%s: error reading frame count tag: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        mParent->signalError();
+        return false;
+    }
+    mNextFrameNumber = *e.data.i32;
+
+    return true;
+}
+
+bool EmulatedFakeCamera2::ConfigureThread::configureNextReprocess() {
+    Mutex::Autolock il(mInternalsMutex);
+
+    getBuffers();
+
+    ALOGV("Configure: Done configure for reprocess %d", mNextFrameNumber);
+    mParent->mReadoutThread->setNextOperation(false, mRequest, mNextBuffers);
+
+    mRequest = NULL;
+    mNextBuffers = NULL;
+
+    Mutex::Autolock lock(mInputMutex);
+    mRequestCount--;
+
+    return true;
+}
+
+bool EmulatedFakeCamera2::ConfigureThread::getBuffers() {
+    status_t res;
+    /** Get buffers to fill for this frame */
+    for (size_t i = 0; i < mNextBuffers->size(); i++) {
+        StreamBuffer &b = mNextBuffers->editItemAt(i);
+
+        if (b.streamId > 0) {
+            Stream s = mParent->getStreamInfo(b.streamId);
+            ALOGV("Configure: Dequeing buffer from stream %d", b.streamId);
+            res = s.ops->dequeue_buffer(s.ops, &(b.buffer) );
+            if (res != NO_ERROR || b.buffer == NULL) {
+                ALOGE("%s: Unable to dequeue buffer from stream %d: %s (%d)",
+                        __FUNCTION__, b.streamId, strerror(-res), res);
+                mParent->signalError();
+                return false;
+            }
+
+            /* Lock the buffer from the perspective of the graphics mapper */
+            const Rect rect(s.width, s.height);
+
+            res = GraphicBufferMapper::get().lock(*(b.buffer),
+                    GRALLOC_USAGE_HW_CAMERA_WRITE,
+                    rect, (void**)&(b.img) );
+
+            if (res != NO_ERROR) {
+                ALOGE("%s: grbuffer_mapper.lock failure: %s (%d)",
+                        __FUNCTION__, strerror(-res), res);
+                s.ops->cancel_buffer(s.ops,
+                        b.buffer);
+                mParent->signalError();
+                return false;
+            }
+        } else {
+            ReprocessStream s = mParent->getReprocessStreamInfo(-b.streamId);
+            ALOGV("Configure: Acquiring buffer from reprocess stream %d",
+                    -b.streamId);
+            res = s.ops->acquire_buffer(s.ops, &(b.buffer) );
+            if (res != NO_ERROR || b.buffer == NULL) {
+                ALOGE("%s: Unable to acquire buffer from reprocess stream %d: "
+                        "%s (%d)", __FUNCTION__, -b.streamId,
+                        strerror(-res), res);
+                mParent->signalError();
+                return false;
+            }
+
+            /* Lock the buffer from the perspective of the graphics mapper */
+            const Rect rect(s.width, s.height);
+
+            res = GraphicBufferMapper::get().lock(*(b.buffer),
+                    GRALLOC_USAGE_HW_CAMERA_READ,
+                    rect, (void**)&(b.img) );
+            if (res != NO_ERROR) {
+                ALOGE("%s: grbuffer_mapper.lock failure: %s (%d)",
+                        __FUNCTION__, strerror(-res), res);
+                s.ops->release_buffer(s.ops,
+                        b.buffer);
+                mParent->signalError();
+                return false;
+            }
+        }
+    }
+    return true;
+}
+
 EmulatedFakeCamera2::ReadoutThread::ReadoutThread(EmulatedFakeCamera2 *parent):
         Thread(false),
         mParent(parent),
@@ -874,7 +1121,8 @@
     return (mInFlightTail + 1) % kInFlightQueueSize != mInFlightHead;
 }
 
-void EmulatedFakeCamera2::ReadoutThread::setNextCapture(
+void EmulatedFakeCamera2::ReadoutThread::setNextOperation(
+        bool isCapture,
         camera_metadata_t *request,
         Buffers *buffers) {
     Mutex::Autolock lock(mInputMutex);
@@ -883,6 +1131,7 @@
         mParent->signalError();
         return;
     }
+    mInFlightQueue[mInFlightTail].isCapture = isCapture;
     mInFlightQueue[mInFlightTail].request = request;
     mInFlightQueue[mInFlightTail].buffers = buffers;
     mInFlightTail = (mInFlightTail + 1) % kInFlightQueueSize;
@@ -952,6 +1201,7 @@
             } else {
                 Mutex::Autolock iLock(mInternalsMutex);
                 mReadySignal.signal();
+                mIsCapture = mInFlightQueue[mInFlightHead].isCapture;
                 mRequest = mInFlightQueue[mInFlightHead].request;
                 mBuffers  = mInFlightQueue[mInFlightHead].buffers;
                 mInFlightQueue[mInFlightHead].request = NULL;
@@ -967,15 +1217,30 @@
 
     nsecs_t captureTime;
 
-    bool gotFrame;
-    gotFrame = mParent->mSensor->waitForNewFrame(kWaitPerLoop,
-            &captureTime);
+    if (mIsCapture) {
+        bool gotFrame;
+        gotFrame = mParent->mSensor->waitForNewFrame(kWaitPerLoop,
+                &captureTime);
 
-    if (!gotFrame) return true;
+        if (!gotFrame) return true;
+    }
 
     Mutex::Autolock iLock(mInternalsMutex);
 
     camera_metadata_entry_t entry;
+    if (!mIsCapture) {
+        res = find_camera_metadata_entry(mRequest,
+                ANDROID_SENSOR_TIMESTAMP,
+            &entry);
+        if (res != NO_ERROR) {
+            ALOGE("%s: error reading reprocessing timestamp: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
+            mParent->signalError();
+            return false;
+        }
+        captureTime = entry.data.i64[0];
+    }
+
     res = find_camera_metadata_entry(mRequest,
             ANDROID_REQUEST_FRAME_COUNT,
             &entry);
@@ -1027,31 +1292,34 @@
             ALOGE("Unable to append request metadata");
         }
 
-        add_camera_metadata_entry(frame,
-                ANDROID_SENSOR_TIMESTAMP,
-                &captureTime,
-                1);
+        if (mIsCapture) {
+            add_camera_metadata_entry(frame,
+                    ANDROID_SENSOR_TIMESTAMP,
+                    &captureTime,
+                    1);
 
-        int32_t hourOfDay = (int32_t)mParent->mSensor->getScene().getHour();
-        camera_metadata_entry_t requestedHour;
-        res = find_camera_metadata_entry(frame,
-                EMULATOR_SCENE_HOUROFDAY,
-                &requestedHour);
-        if (res == NAME_NOT_FOUND) {
-            res = add_camera_metadata_entry(frame,
+            int32_t hourOfDay = (int32_t)mParent->mSensor->getScene().getHour();
+            camera_metadata_entry_t requestedHour;
+            res = find_camera_metadata_entry(frame,
                     EMULATOR_SCENE_HOUROFDAY,
-                    &hourOfDay, 1);
-            if (res != NO_ERROR) {
-                ALOGE("Unable to add vendor tag");
+                    &requestedHour);
+            if (res == NAME_NOT_FOUND) {
+                res = add_camera_metadata_entry(frame,
+                        EMULATOR_SCENE_HOUROFDAY,
+                        &hourOfDay, 1);
+                if (res != NO_ERROR) {
+                    ALOGE("Unable to add vendor tag");
+                }
+            } else if (res == OK) {
+                *requestedHour.data.i32 = hourOfDay;
+            } else {
+                ALOGE("%s: Error looking up vendor tag", __FUNCTION__);
             }
-        } else if (res == OK) {
-            *requestedHour.data.i32 = hourOfDay;
-        } else {
-            ALOGE("%s: Error looking up vendor tag", __FUNCTION__);
+
+            collectStatisticsMetadata(frame);
+            // TODO: Collect all final values used from sensor in addition to timestamp
         }
 
-        collectStatisticsMetadata(frame);
-        // TODO: Collect all final values used from sensor in addition to timestamp
         ALOGV("Readout: Enqueue frame %d", frameNumber);
         mParent->mFrameQueueDst->enqueue_frame(mParent->mFrameQueueDst,
                 frame);
@@ -1072,13 +1340,13 @@
         const StreamBuffer &b = (*mBuffers)[i];
         ALOGV("Readout:    Buffer %d: Stream %d, %d x %d, format 0x%x, stride %d",
                 i, b.streamId, b.width, b.height, b.format, b.stride);
-        if (b.streamId >= 0) {
+        if (b.streamId > 0) {
             if (b.format == HAL_PIXEL_FORMAT_BLOB) {
                 // Assumes only one BLOB buffer type per capture
                 compressedBufferIndex = i;
             } else {
-                ALOGV("Readout:    Sending image buffer %d to output stream %d",
-                        i, b.streamId);
+                ALOGV("Readout:    Sending image buffer %d (%p) to output stream %d",
+                        i, (void*)*(b.buffer), b.streamId);
                 GraphicBufferMapper::get().unlock(*(b.buffer));
                 const Stream &s = mParent->getStreamInfo(b.streamId);
                 res = s.ops->enqueue_buffer(s.ops, captureTime, b.buffer);
@@ -1253,6 +1521,8 @@
     mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
     mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
 
+    mExposureTime = kNormalExposureTime;
+
     mInputSignal.signal();
     return NO_ERROR;
 }
@@ -1308,13 +1578,24 @@
             &mode);
     mAwbMode = mode.data.u8[0];
 
-    // TODO: Override control fields
+    // TODO: Override more control fields
+
+    if (mAeMode != ANDROID_CONTROL_AE_OFF) {
+        camera_metadata_entry_t exposureTime;
+        res = find_camera_metadata_entry(request,
+                ANDROID_SENSOR_EXPOSURE_TIME,
+                &exposureTime);
+        if (res == OK) {
+            exposureTime.data.i64[0] = mExposureTime;
+        }
+    }
 
     return OK;
 }
 
 status_t EmulatedFakeCamera2::ControlThread::triggerAction(uint32_t msgType,
         int32_t ext1, int32_t ext2) {
+    ALOGV("%s: Triggering %d (%d, %d)", __FUNCTION__, msgType, ext1, ext2);
     Mutex::Autolock lock(mInputMutex);
     switch (msgType) {
         case CAMERA2_TRIGGER_AUTOFOCUS:
@@ -1339,12 +1620,24 @@
     return OK;
 }
 
-const nsecs_t EmulatedFakeCamera2::ControlThread::kControlCycleDelay = 100000000;
-const nsecs_t EmulatedFakeCamera2::ControlThread::kMinAfDuration = 500000000;
-const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxAfDuration = 900000000;
+const nsecs_t EmulatedFakeCamera2::ControlThread::kControlCycleDelay = 100 * MSEC;
+const nsecs_t EmulatedFakeCamera2::ControlThread::kMinAfDuration = 500 * MSEC;
+const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxAfDuration = 900 * MSEC;
 const float EmulatedFakeCamera2::ControlThread::kAfSuccessRate = 0.9;
+ // Once every 5 seconds
 const float EmulatedFakeCamera2::ControlThread::kContinuousAfStartRate =
-    kControlCycleDelay / 5000000000.0; // Once every 5 seconds
+        kControlCycleDelay / 5.0 * SEC;
+const nsecs_t EmulatedFakeCamera2::ControlThread::kMinAeDuration = 500 * MSEC;
+const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxAeDuration = 2 * SEC;
+const nsecs_t EmulatedFakeCamera2::ControlThread::kMinPrecaptureAeDuration = 100 * MSEC;
+const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxPrecaptureAeDuration = 400 * MSEC;
+ // Once every 3 seconds
+const float EmulatedFakeCamera2::ControlThread::kAeScanStartRate =
+    kControlCycleDelay / 3000000000.0;
+
+const nsecs_t EmulatedFakeCamera2::ControlThread::kNormalExposureTime = 10 * MSEC;
+const nsecs_t EmulatedFakeCamera2::ControlThread::kExposureJump = 5 * MSEC;
+const nsecs_t EmulatedFakeCamera2::ControlThread::kMinExposureTime = 1 * MSEC;
 
 bool EmulatedFakeCamera2::ControlThread::threadLoop() {
     bool afModeChange = false;
@@ -1353,14 +1646,20 @@
     uint8_t afState;
     uint8_t afMode;
     int32_t afTriggerId;
+    bool precaptureTriggered = false;
+    uint8_t aeState;
+    uint8_t aeMode;
+    int32_t precaptureTriggerId;
     nsecs_t nextSleep = kControlCycleDelay;
 
     {
         Mutex::Autolock lock(mInputMutex);
         if (mStartAf) {
+            ALOGD("Starting AF trigger processing");
             afTriggered = true;
             mStartAf = false;
         } else if (mCancelAf) {
+            ALOGD("Starting cancel AF trigger processing");
             afCancelled = true;
             mCancelAf = false;
         }
@@ -1370,6 +1669,15 @@
         mAfModeChange = false;
 
         afTriggerId = mAfTriggerId;
+
+        if(mStartPrecapture) {
+            ALOGD("Starting precapture trigger processing");
+            precaptureTriggered = true;
+            mStartPrecapture = false;
+        }
+        aeState = mAeState;
+        aeMode = mAeMode;
+        precaptureTriggerId = mPrecaptureTriggerId;
     }
 
     if (afCancelled || afModeChange) {
@@ -1392,6 +1700,16 @@
 
     updateAfState(afState, afTriggerId);
 
+    if (precaptureTriggered) {
+        aeState = processPrecaptureTrigger(aeMode, aeState);
+    }
+
+    aeState = maybeStartAeScan(aeMode, aeState);
+
+    aeState = updateAeScan(aeMode, aeState, &nextSleep);
+
+    updateAeState(aeState, precaptureTriggerId);
+
     int ret;
     timespec t;
     t.tv_sec = 0;
@@ -1400,6 +1718,13 @@
         ret = nanosleep(&t, &t);
     } while (ret != 0);
 
+    if (mAfScanDuration > 0) {
+        mAfScanDuration -= nextSleep;
+    }
+    if (mAeScanDuration > 0) {
+        mAeScanDuration -= nextSleep;
+    }
+
     return true;
 }
 
@@ -1504,7 +1829,7 @@
         return afState;
     }
 
-    if (mAfScanDuration == 0) {
+    if (mAfScanDuration <= 0) {
         ALOGV("%s: AF scan done", __FUNCTION__);
         switch (afMode) {
             case ANDROID_CONTROL_AF_MACRO:
@@ -1534,9 +1859,6 @@
     } else {
         if (mAfScanDuration <= *maxSleep) {
             *maxSleep = mAfScanDuration;
-            mAfScanDuration = 0;
-        } else {
-            mAfScanDuration -= *maxSleep;
         }
     }
     return afState;
@@ -1554,6 +1876,97 @@
     }
 }
 
+int EmulatedFakeCamera2::ControlThread::processPrecaptureTrigger(uint8_t aeMode,
+        uint8_t aeState) {
+    switch (aeMode) {
+        case ANDROID_CONTROL_AE_OFF:
+        case ANDROID_CONTROL_AE_LOCKED:
+            // Don't do anything for these
+            return aeState;
+        case ANDROID_CONTROL_AE_ON:
+        case ANDROID_CONTROL_AE_ON_AUTO_FLASH:
+        case ANDROID_CONTROL_AE_ON_ALWAYS_FLASH:
+        case ANDROID_CONTROL_AE_ON_AUTO_FLASH_REDEYE:
+            // Trigger a precapture cycle
+            aeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
+            mAeScanDuration = ((double)rand() / RAND_MAX) *
+                    (kMaxPrecaptureAeDuration - kMinPrecaptureAeDuration) +
+                    kMinPrecaptureAeDuration;
+            ALOGD("%s: AE precapture scan start, duration %lld ms",
+                    __FUNCTION__, mAeScanDuration / 1000000);
+
+    }
+    return aeState;
+}
+
+int EmulatedFakeCamera2::ControlThread::maybeStartAeScan(uint8_t aeMode,
+        uint8_t aeState) {
+    switch (aeMode) {
+        case ANDROID_CONTROL_AE_OFF:
+        case ANDROID_CONTROL_AE_LOCKED:
+            // Don't do anything for these
+            break;
+        case ANDROID_CONTROL_AE_ON:
+        case ANDROID_CONTROL_AE_ON_AUTO_FLASH:
+        case ANDROID_CONTROL_AE_ON_ALWAYS_FLASH:
+        case ANDROID_CONTROL_AE_ON_AUTO_FLASH_REDEYE: {
+            if (aeState != ANDROID_CONTROL_AE_STATE_INACTIVE &&
+                    aeState != ANDROID_CONTROL_AE_STATE_CONVERGED) break;
+
+            bool startScan = ((double)rand() / RAND_MAX) < kAeScanStartRate;
+            if (startScan) {
+                mAeScanDuration = ((double)rand() / RAND_MAX) *
+                (kMaxAeDuration - kMinAeDuration) + kMinAeDuration;
+                aeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
+                ALOGD("%s: AE scan start, duration %lld ms",
+                        __FUNCTION__, mAeScanDuration / 1000000);
+            }
+        }
+    }
+
+    return aeState;
+}
+
+int EmulatedFakeCamera2::ControlThread::updateAeScan(uint8_t aeMode,
+        uint8_t aeState, nsecs_t *maxSleep) {
+    if ((aeState == ANDROID_CONTROL_AE_STATE_SEARCHING) ||
+            (aeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE ) ) {
+        if (mAeScanDuration <= 0) {
+            ALOGD("%s: AE scan done", __FUNCTION__);
+            aeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
+
+            Mutex::Autolock lock(mInputMutex);
+            mExposureTime = kNormalExposureTime;
+        } else {
+            if (mAeScanDuration <= *maxSleep) {
+                *maxSleep = mAeScanDuration;
+            }
+
+            int64_t exposureDelta =
+                    ((double)rand() / RAND_MAX) * 2 * kExposureJump -
+                    kExposureJump;
+            Mutex::Autolock lock(mInputMutex);
+            mExposureTime = mExposureTime + exposureDelta;
+            if (mExposureTime < kMinExposureTime) mExposureTime = kMinExposureTime;
+        }
+    }
+
+    return aeState;
+}
+
+
+void EmulatedFakeCamera2::ControlThread::updateAeState(uint8_t newState,
+        int32_t triggerId) {
+    Mutex::Autolock lock(mInputMutex);
+    if (mAeState != newState) {
+        ALOGD("%s: Autoexposure state now %d, id %d", __FUNCTION__,
+                newState, triggerId);
+        mAeState = newState;
+        mParent->sendNotification(CAMERA2_MSG_AUTOEXPOSURE,
+                newState, triggerId, 0);
+    }
+}
+
 /** Private methods */
 
 status_t EmulatedFakeCamera2::constructStaticInfo(
@@ -1894,12 +2307,11 @@
     if ( ( ret = addOrSize(*request, sizeRequest, &entryCount, &dataCount, \
             tag, data, count) ) != OK ) return ret
 
-    static const int64_t USEC = 1000LL;
-    static const int64_t MSEC = USEC * 1000LL;
-    static const int64_t SEC = MSEC * 1000LL;
-
     /** android.request */
 
+    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
+    ADD_OR_SIZE(ANDROID_REQUEST_TYPE, &requestType, 1);
+
     static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_FULL;
     ADD_OR_SIZE(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
 
@@ -2250,7 +2662,12 @@
         return true;
     }
     return false;
- }
+}
+
+bool EmulatedFakeCamera2::isReprocessStreamInUse(uint32_t id) {
+    // TODO: implement
+    return false;
+}
 
 const Stream& EmulatedFakeCamera2::getStreamInfo(uint32_t streamId) {
     Mutex::Autolock lock(mMutex);
@@ -2258,4 +2675,10 @@
     return mStreams.valueFor(streamId);
 }
 
+const ReprocessStream& EmulatedFakeCamera2::getReprocessStreamInfo(uint32_t streamId) {
+    Mutex::Autolock lock(mMutex);
+
+    return mReprocessStreams.valueFor(streamId);
+}
+
 };  /* namespace android */
diff --git a/tools/emulator/system/camera/EmulatedFakeCamera2.h b/tools/emulator/system/camera/EmulatedFakeCamera2.h
index 11016e8..d2420e8 100644
--- a/tools/emulator/system/camera/EmulatedFakeCamera2.h
+++ b/tools/emulator/system/camera/EmulatedFakeCamera2.h
@@ -110,7 +110,12 @@
     //         uint32_t *usage,
     //         uint32_t *max_buffers);
 
-    // virtual int releaseReprocessStream(uint32_t stream_id);
+    virtual int allocateReprocessStreamFromStream(
+            uint32_t output_stream_id,
+            const camera2_stream_in_ops_t *stream_ops,
+            uint32_t *stream_id);
+
+    virtual int releaseReprocessStream(uint32_t stream_id);
 
     virtual int triggerAction(uint32_t trigger_id,
             int32_t ext1,
@@ -132,6 +137,7 @@
 
     // Get information about a given stream. Will lock mMutex
     const Stream &getStreamInfo(uint32_t streamId);
+    const ReprocessStream &getReprocessStreamInfo(uint32_t streamId);
 
     // Notifies rest of camera subsystem of serious error
     void signalError();
@@ -163,6 +169,10 @@
      * requests. Assumes mMutex is locked */
     bool isStreamInUse(uint32_t streamId);
 
+    /** Determine if the reprocess stream id is listed in any
+     * currently-in-flight requests. Assumes mMutex is locked */
+    bool isReprocessStreamInUse(uint32_t streamId);
+
     /****************************************************************************
      * Pipeline controller threads
      ***************************************************************************/
@@ -180,10 +190,19 @@
         int getInProgressCount();
       private:
         EmulatedFakeCamera2 *mParent;
+        static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms
 
         bool mRunning;
         bool threadLoop();
 
+        bool setupCapture();
+        bool setupReprocess();
+
+        bool configureNextCapture();
+        bool configureNextReprocess();
+
+        bool getBuffers();
+
         Mutex mInputMutex; // Protects mActive, mRequestCount
         Condition mInputSignal;
         bool mActive; // Whether we're waiting for input requests or actively
@@ -195,6 +214,7 @@
         Mutex mInternalsMutex; // Lock before accessing below members.
         bool    mWaitingForReadout;
         bool    mNextNeedsJpeg;
+        bool    mNextIsCapture;
         int32_t mNextFrameNumber;
         int64_t mNextExposureTime;
         int64_t mNextFrameDuration;
@@ -212,9 +232,9 @@
         // Input
         status_t waitUntilRunning();
         bool waitForReady(nsecs_t timeout);
-        void setNextCapture(camera_metadata_t *request,
+        void setNextOperation(bool isCapture,
+                camera_metadata_t *request,
                 Buffers *buffers);
-
         bool isStreamInUse(uint32_t id);
         int getInProgressCount();
       private:
@@ -235,6 +255,7 @@
 
         static const int kInFlightQueueSize = 4;
         struct InFlightQueue {
+            bool isCapture;
             camera_metadata_t *request;
             Buffers *buffers;
         } *mInFlightQueue;
@@ -246,6 +267,8 @@
 
         // Internals
         Mutex mInternalsMutex;
+
+        bool mIsCapture;
         camera_metadata_t *mRequest;
         Buffers *mBuffers;
 
@@ -278,6 +301,16 @@
         static const float kAfSuccessRate;
         static const float kContinuousAfStartRate;
 
+        static const float kAeScanStartRate;
+        static const nsecs_t kMinAeDuration;
+        static const nsecs_t kMaxAeDuration;
+        static const nsecs_t kMinPrecaptureAeDuration;
+        static const nsecs_t kMaxPrecaptureAeDuration;
+
+        static const nsecs_t kNormalExposureTime;
+        static const nsecs_t kExposureJump;
+        static const nsecs_t kMinExposureTime;
+
         EmulatedFakeCamera2 *mParent;
 
         bool mRunning;
@@ -312,17 +345,26 @@
         uint8_t mAeState;
         uint8_t mAwbState;
 
+        // Current control parameters
+        nsecs_t mExposureTime;
+
         // Private to threadLoop and its utility methods
 
         nsecs_t mAfScanDuration;
+        nsecs_t mAeScanDuration;
         bool mLockAfterPassiveScan;
 
-        // Utility methods
+        // Utility methods for AF
         int processAfTrigger(uint8_t afMode, uint8_t afState);
         int maybeStartAfScan(uint8_t afMode, uint8_t afState);
         int updateAfScan(uint8_t afMode, uint8_t afState, nsecs_t *maxSleep);
         void updateAfState(uint8_t newState, int32_t triggerId);
 
+        // Utility methods for precapture trigger
+        int processPrecaptureTrigger(uint8_t aeMode, uint8_t aeState);
+        int maybeStartAeScan(uint8_t aeMode, uint8_t aeState);
+        int updateAeScan(uint8_t aeMode, uint8_t aeState, nsecs_t *maxSleep);
+        void updateAeState(uint8_t newState, int32_t triggerId);
     };
 
     /****************************************************************************
@@ -332,6 +374,7 @@
     static const uint32_t kMaxRawStreamCount = 1;
     static const uint32_t kMaxProcessedStreamCount = 3;
     static const uint32_t kMaxJpegStreamCount = 1;
+    static const uint32_t kMaxReprocessStreamCount = 2;
     static const uint32_t kMaxBufferCount = 4;
     static const uint32_t kAvailableFormats[];
     static const uint32_t kAvailableRawSizes[];
@@ -358,7 +401,11 @@
     uint32_t mProcessedStreamCount;
     uint32_t mJpegStreamCount;
 
+    uint32_t mNextReprocessStreamId;
+    uint32_t mReprocessStreamCount;
+
     KeyedVector<uint32_t, Stream> mStreams;
+    KeyedVector<uint32_t, ReprocessStream> mReprocessStreams;
 
     /** Simulated hardware interfaces */
     sp<Sensor> mSensor;
diff --git a/tools/emulator/system/camera/fake-pipeline2/Base.h b/tools/emulator/system/camera/fake-pipeline2/Base.h
index f7ef9b1..057629b 100644
--- a/tools/emulator/system/camera/fake-pipeline2/Base.h
+++ b/tools/emulator/system/camera/fake-pipeline2/Base.h
@@ -31,6 +31,9 @@
 
 /* Internal structure for passing buffers across threads */
 struct StreamBuffer {
+    // Positive numbers are output streams
+    // Negative numbers are input reprocess streams
+    // Zero is an auxillary buffer
     int streamId;
     uint32_t width, height;
     uint32_t format;
@@ -47,6 +50,15 @@
     uint32_t stride;
 };
 
+struct ReprocessStream {
+    const camera2_stream_in_ops_t *ops;
+    uint32_t width, height;
+    int32_t format;
+    uint32_t stride;
+    // -1 if the reprocessing stream is independent
+    int32_t sourceStreamId;
+};
+
 } // namespace android;
 
 #endif
diff --git a/tools/emulator/system/camera/fake-pipeline2/JpegCompressor.cpp b/tools/emulator/system/camera/fake-pipeline2/JpegCompressor.cpp
index 76fbb94..20b9634 100644
--- a/tools/emulator/system/camera/fake-pipeline2/JpegCompressor.cpp
+++ b/tools/emulator/system/camera/fake-pipeline2/JpegCompressor.cpp
@@ -77,7 +77,8 @@
     Mutex::Autolock lock(mMutex);
     ALOGV("%s: Starting compression thread", __FUNCTION__);
 
-    // Find source and target buffers
+    // Find source and target buffers. Assumes only one buffer matches
+    // each condition!
 
     bool foundJpeg = false, mFoundAux = false;
     for (size_t i = 0; i < mBuffers->size(); i++) {
@@ -85,7 +86,7 @@
         if (b.format == HAL_PIXEL_FORMAT_BLOB) {
             mJpegBuffer = b;
             mFoundJpeg = true;
-        } else if (b.streamId == -1) {
+        } else if (b.streamId <= 0) {
             mAuxBuffer = b;
             mFoundAux = true;
         }
@@ -216,11 +217,24 @@
 }
 
 void JpegCompressor::cleanUp() {
+    status_t res;
     jpeg_destroy_compress(&mCInfo);
     Mutex::Autolock lock(mBusyMutex);
 
     if (mFoundAux) {
-        delete[] mAuxBuffer.img;
+        if (mAuxBuffer.streamId == 0) {
+            delete[] mAuxBuffer.img;
+        } else {
+            GraphicBufferMapper::get().unlock(*(mAuxBuffer.buffer));
+            const ReprocessStream &s =
+                    mParent->getReprocessStreamInfo(-mAuxBuffer.streamId);
+            res = s.ops->release_buffer(s.ops, mAuxBuffer.buffer);
+            if (res != OK) {
+                ALOGE("Error releasing reprocess buffer %p: %s (%d)",
+                        mAuxBuffer.buffer, strerror(-res), res);
+                mParent->signalError();
+            }
+        }
     }
     delete mBuffers;
     mBuffers = NULL;
diff --git a/tools/emulator/system/camera/fake-pipeline2/Sensor.cpp b/tools/emulator/system/camera/fake-pipeline2/Sensor.cpp
index d00b6ee..73f1fb5 100644
--- a/tools/emulator/system/camera/fake-pipeline2/Sensor.cpp
+++ b/tools/emulator/system/camera/fake-pipeline2/Sensor.cpp
@@ -293,6 +293,8 @@
                 (float)exposureDuration/1e6, gain);
         mScene.setExposureDuration((float)exposureDuration/1e9);
         mScene.calculateScene(mNextCaptureTime);
+
+        // Might be adding more buffers, so size isn't constant
         for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
             const StreamBuffer &b = (*mNextCapturedBuffers)[i];
             ALOGVV("Sensor capturing buffer %d: stream %d,"
@@ -303,6 +305,9 @@
                 case HAL_PIXEL_FORMAT_RAW_SENSOR:
                     captureRaw(b.img, gain, b.stride);
                     break;
+                case HAL_PIXEL_FORMAT_RGB_888:
+                    captureRGB(b.img, gain, b.stride);
+                    break;
                 case HAL_PIXEL_FORMAT_RGBA_8888:
                     captureRGBA(b.img, gain, b.stride);
                     break;
@@ -311,7 +316,7 @@
                     // Assumes only one BLOB (JPEG) buffer in
                     // mNextCapturedBuffers
                     StreamBuffer bAux;
-                    bAux.streamId = -1;
+                    bAux.streamId = 0;
                     bAux.width = b.width;
                     bAux.height = b.height;
                     bAux.format = HAL_PIXEL_FORMAT_RGB_888;
@@ -319,7 +324,6 @@
                     bAux.buffer = NULL;
                     // TODO: Reuse these
                     bAux.img = new uint8_t[b.width * b.height * 3];
-                    captureRGB(bAux.img, gain, b.stride);
                     mNextCapturedBuffers->push_back(bAux);
                     break;
                 case HAL_PIXEL_FORMAT_YCrCb_420_SP: