Revert "EmulatedFakeCamera2: Add precapture trigger and reprocess support"

More dependent projects than I realized.

This reverts commit 4573a7194d1e5d13d402ba2ca41ef0c25a120f67

Change-Id: I53cfbc5cf5ef4436f5fa0c8b1ceb6296797e32fe
diff --git a/tools/emulator/system/camera/EmulatedCamera2.cpp b/tools/emulator/system/camera/EmulatedCamera2.cpp
index bbc1740..fa7ee4d 100644
--- a/tools/emulator/system/camera/EmulatedCamera2.cpp
+++ b/tools/emulator/system/camera/EmulatedCamera2.cpp
@@ -162,13 +162,6 @@
     return INVALID_OPERATION;
 }
 
-int EmulatedCamera2::allocateReprocessStreamFromStream(
-        uint32_t output_stream_id,
-        const camera2_stream_in_ops_t *reprocess_stream_ops,
-        uint32_t *stream_id) {
-    return INVALID_OPERATION;
-}
-
 int EmulatedCamera2::releaseReprocessStream(uint32_t stream_id) {
     return INVALID_OPERATION;
 }
@@ -295,17 +288,6 @@
             reprocess_stream_ops, stream_id, consumer_usage, max_buffers);
 }
 
-int EmulatedCamera2::allocate_reprocess_stream_from_stream(
-            const camera2_device_t *d,
-            uint32_t output_stream_id,
-            const camera2_stream_in_ops_t *reprocess_stream_ops,
-            uint32_t *stream_id) {
-    EmulatedCamera2* ec = getInstance(d);
-    return ec->allocateReprocessStreamFromStream(output_stream_id,
-            reprocess_stream_ops, stream_id);
-}
-
-
 int EmulatedCamera2::release_reprocess_stream(const camera2_device_t *d,
         uint32_t stream_id) {
     EmulatedCamera2* ec = getInstance(d);
@@ -397,7 +379,6 @@
     EmulatedCamera2::register_stream_buffers,
     EmulatedCamera2::release_stream,
     EmulatedCamera2::allocate_reprocess_stream,
-    EmulatedCamera2::allocate_reprocess_stream_from_stream,
     EmulatedCamera2::release_reprocess_stream,
     EmulatedCamera2::trigger_action,
     EmulatedCamera2::set_notify_callback,
diff --git a/tools/emulator/system/camera/EmulatedCamera2.h b/tools/emulator/system/camera/EmulatedCamera2.h
index 755ed0e..a294454 100644
--- a/tools/emulator/system/camera/EmulatedCamera2.h
+++ b/tools/emulator/system/camera/EmulatedCamera2.h
@@ -126,11 +126,6 @@
             uint32_t *consumer_usage,
             uint32_t *max_buffers);
 
-    virtual int allocateReprocessStreamFromStream(
-            uint32_t output_stream_id,
-            const camera2_stream_in_ops_t *reprocess_stream_ops,
-            uint32_t *stream_id);
-
     virtual int releaseReprocessStream(uint32_t stream_id);
 
     /** 3A action triggering */
@@ -202,11 +197,6 @@
             uint32_t *consumer_usage,
             uint32_t *max_buffers);
 
-    static int allocate_reprocess_stream_from_stream(const camera2_device_t *,
-            uint32_t output_stream_id,
-            const camera2_stream_in_ops_t *reprocess_stream_ops,
-            uint32_t *stream_id);
-
     static int release_reprocess_stream(const camera2_device_t *,
             uint32_t stream_id);
 
diff --git a/tools/emulator/system/camera/EmulatedFakeCamera2.cpp b/tools/emulator/system/camera/EmulatedFakeCamera2.cpp
index bc1517e..08475b1 100644
--- a/tools/emulator/system/camera/EmulatedFakeCamera2.cpp
+++ b/tools/emulator/system/camera/EmulatedFakeCamera2.cpp
@@ -31,10 +31,6 @@
 
 namespace android {
 
-const int64_t USEC = 1000LL;
-const int64_t MSEC = USEC * 1000LL;
-const int64_t SEC = MSEC * 1000LL;
-
 const uint32_t EmulatedFakeCamera2::kAvailableFormats[4] = {
         HAL_PIXEL_FORMAT_RAW_SENSOR,
         HAL_PIXEL_FORMAT_BLOB,
@@ -122,12 +118,10 @@
     }
     if (res != OK) return res;
 
-    mNextStreamId = 1;
-    mNextReprocessStreamId = 1;
+    mNextStreamId = 0;
     mRawStreamCount = 0;
     mProcessedStreamCount = 0;
     mJpegStreamCount = 0;
-    mReprocessStreamCount = 0;
 
     return NO_ERROR;
 }
@@ -146,8 +140,7 @@
     mSensor = new Sensor(this);
     mJpegCompressor = new JpegCompressor(this);
 
-    mNextStreamId = 1;
-    mNextReprocessStreamId = 1;
+    mNextStreamId = 0;
 
     res = mSensor->startUp();
     if (res != NO_ERROR) return res;
@@ -442,69 +435,6 @@
     return NO_ERROR;
 }
 
-int EmulatedFakeCamera2::allocateReprocessStreamFromStream(
-        uint32_t output_stream_id,
-        const camera2_stream_in_ops_t *stream_ops,
-        uint32_t *stream_id) {
-    Mutex::Autolock l(mMutex);
-
-    ssize_t baseStreamIndex = mStreams.indexOfKey(output_stream_id);
-    if (baseStreamIndex < 0) {
-        ALOGE("%s: Unknown output stream id %d!", __FUNCTION__, output_stream_id);
-        return BAD_VALUE;
-    }
-
-    const Stream &baseStream = mStreams[baseStreamIndex];
-
-    // We'll reprocess anything we produced
-
-    if (mReprocessStreamCount >= kMaxReprocessStreamCount) {
-        ALOGE("%s: Cannot allocate another reprocess stream (%d already allocated)",
-                __FUNCTION__, mReprocessStreamCount);
-        return INVALID_OPERATION;
-    }
-    mReprocessStreamCount++;
-
-    ReprocessStream newStream;
-    newStream.ops = stream_ops;
-    newStream.width = baseStream.width;
-    newStream.height = baseStream.height;
-    newStream.format = baseStream.format;
-    newStream.stride = baseStream.stride;
-    newStream.sourceStreamId = output_stream_id;
-
-    *stream_id = mNextReprocessStreamId;
-    mReprocessStreams.add(mNextReprocessStreamId, newStream);
-
-    ALOGV("Reprocess stream allocated: %d: %d, %d, 0x%x. Parent stream: %d",
-            *stream_id, newStream.width, newStream.height, newStream.format,
-            output_stream_id);
-
-    mNextReprocessStreamId++;
-    return NO_ERROR;
-}
-
-int EmulatedFakeCamera2::releaseReprocessStream(uint32_t stream_id) {
-    Mutex::Autolock l(mMutex);
-
-    ssize_t streamIndex = mReprocessStreams.indexOfKey(stream_id);
-    if (streamIndex < 0) {
-        ALOGE("%s: Unknown reprocess stream id %d!", __FUNCTION__, stream_id);
-        return BAD_VALUE;
-    }
-
-    if (isReprocessStreamInUse(stream_id)) {
-        ALOGE("%s: Cannot release reprocessing stream %d; in use!", __FUNCTION__,
-                stream_id);
-        return BAD_VALUE;
-    }
-
-    mReprocessStreamCount--;
-    mReprocessStreams.removeItemsAt(streamIndex);
-
-    return NO_ERROR;
-}
-
 int EmulatedFakeCamera2::triggerAction(uint32_t trigger_id,
         int32_t ext1,
         int32_t ext2) {
@@ -673,6 +603,7 @@
 }
 
 bool EmulatedFakeCamera2::ConfigureThread::threadLoop() {
+    static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms
     status_t res;
 
     // Check if we're currently processing or just waiting
@@ -714,32 +645,105 @@
             Mutex::Autolock lock(mInputMutex);
             mRequestCount++;
         }
+        // Get necessary parameters for sensor config
 
-        camera_metadata_entry_t type;
+        mParent->mControlThread->processRequest(mRequest);
+
+        camera_metadata_entry_t streams;
         res = find_camera_metadata_entry(mRequest,
-                ANDROID_REQUEST_TYPE,
-                &type);
+                ANDROID_REQUEST_OUTPUT_STREAMS,
+                &streams);
         if (res != NO_ERROR) {
-            ALOGE("%s: error reading request type", __FUNCTION__);
+            ALOGE("%s: error reading output stream tag", __FUNCTION__);
             mParent->signalError();
             return false;
         }
-        bool success = false;;
-        switch (type.data.u8[0]) {
-            case ANDROID_REQUEST_TYPE_CAPTURE:
-                success = setupCapture();
-                break;
-            case ANDROID_REQUEST_TYPE_REPROCESS:
-                success = setupReprocess();
-                break;
-            default:
-                ALOGE("%s: Unexpected request type %d",
-                        __FUNCTION__, type.data.u8[0]);
-                mParent->signalError();
-                break;
-        }
-        if (!success) return false;
 
+        mNextBuffers = new Buffers;
+        mNextNeedsJpeg = false;
+        ALOGV("Configure: Setting up buffers for capture");
+        for (size_t i = 0; i < streams.count; i++) {
+            int streamId = streams.data.u8[i];
+            const Stream &s = mParent->getStreamInfo(streamId);
+            if (s.format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+                ALOGE("%s: Stream %d does not have a concrete pixel format, but "
+                        "is included in a request!", __FUNCTION__, streamId);
+                mParent->signalError();
+                return false;
+            }
+            StreamBuffer b;
+            b.streamId = streams.data.u8[i];
+            b.width  = s.width;
+            b.height = s.height;
+            b.format = s.format;
+            b.stride = s.stride;
+            mNextBuffers->push_back(b);
+            ALOGV("Configure:    Buffer %d: Stream %d, %d x %d, format 0x%x, "
+                    "stride %d",
+                    i, b.streamId, b.width, b.height, b.format, b.stride);
+            if (b.format == HAL_PIXEL_FORMAT_BLOB) {
+                mNextNeedsJpeg = true;
+            }
+        }
+
+        camera_metadata_entry_t e;
+        res = find_camera_metadata_entry(mRequest,
+                ANDROID_REQUEST_FRAME_COUNT,
+                &e);
+        if (res != NO_ERROR) {
+            ALOGE("%s: error reading frame count tag: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
+            mParent->signalError();
+            return false;
+        }
+        mNextFrameNumber = *e.data.i32;
+
+        res = find_camera_metadata_entry(mRequest,
+                ANDROID_SENSOR_EXPOSURE_TIME,
+                &e);
+        if (res != NO_ERROR) {
+            ALOGE("%s: error reading exposure time tag: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
+            mParent->signalError();
+            return false;
+        }
+        mNextExposureTime = *e.data.i64;
+
+        res = find_camera_metadata_entry(mRequest,
+                ANDROID_SENSOR_FRAME_DURATION,
+                &e);
+        if (res != NO_ERROR) {
+            ALOGE("%s: error reading frame duration tag", __FUNCTION__);
+            mParent->signalError();
+            return false;
+        }
+        mNextFrameDuration = *e.data.i64;
+
+        if (mNextFrameDuration <
+                mNextExposureTime + Sensor::kMinVerticalBlank) {
+            mNextFrameDuration = mNextExposureTime + Sensor::kMinVerticalBlank;
+        }
+        res = find_camera_metadata_entry(mRequest,
+                ANDROID_SENSOR_SENSITIVITY,
+                &e);
+        if (res != NO_ERROR) {
+            ALOGE("%s: error reading sensitivity tag", __FUNCTION__);
+            mParent->signalError();
+            return false;
+        }
+        mNextSensitivity = *e.data.i32;
+
+        res = find_camera_metadata_entry(mRequest,
+                EMULATOR_SCENE_HOUROFDAY,
+                &e);
+        if (res == NO_ERROR) {
+            ALOGV("Setting hour: %d", *e.data.i32);
+            mParent->mSensor->getScene().setHour(*e.data.i32);
+        }
+
+        // Start waiting on readout thread
+        mWaitingForReadout = true;
+        ALOGV("Configure: Waiting for readout thread");
     }
 
     if (mWaitingForReadout) {
@@ -763,134 +767,49 @@
         ALOGV("Configure: Waiting for sensor");
         mNextNeedsJpeg = false;
     }
-
-    if (mNextIsCapture) {
-        return configureNextCapture();
-    } else {
-        return configureNextReprocess();
-    }
-}
-
-bool EmulatedFakeCamera2::ConfigureThread::setupCapture() {
-    status_t res;
-
-    mNextIsCapture = true;
-    // Get necessary parameters for sensor config
-    mParent->mControlThread->processRequest(mRequest);
-
-    camera_metadata_entry_t streams;
-    res = find_camera_metadata_entry(mRequest,
-            ANDROID_REQUEST_OUTPUT_STREAMS,
-            &streams);
-    if (res != NO_ERROR) {
-        ALOGE("%s: error reading output stream tag", __FUNCTION__);
-        mParent->signalError();
-        return false;
-    }
-
-    mNextBuffers = new Buffers;
-    mNextNeedsJpeg = false;
-    ALOGV("Configure: Setting up buffers for capture");
-    for (size_t i = 0; i < streams.count; i++) {
-        int streamId = streams.data.u8[i];
-        const Stream &s = mParent->getStreamInfo(streamId);
-        if (s.format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
-            ALOGE("%s: Stream %d does not have a concrete pixel format, but "
-                    "is included in a request!", __FUNCTION__, streamId);
-            mParent->signalError();
-            return false;
-        }
-        StreamBuffer b;
-        b.streamId = streams.data.u8[i];
-        b.width  = s.width;
-        b.height = s.height;
-        b.format = s.format;
-        b.stride = s.stride;
-        mNextBuffers->push_back(b);
-        ALOGV("Configure:    Buffer %d: Stream %d, %d x %d, format 0x%x, "
-                "stride %d",
-                i, b.streamId, b.width, b.height, b.format, b.stride);
-        if (b.format == HAL_PIXEL_FORMAT_BLOB) {
-            mNextNeedsJpeg = true;
-        }
-    }
-
-    camera_metadata_entry_t e;
-    res = find_camera_metadata_entry(mRequest,
-            ANDROID_REQUEST_FRAME_COUNT,
-            &e);
-    if (res != NO_ERROR) {
-        ALOGE("%s: error reading frame count tag: %s (%d)",
-                __FUNCTION__, strerror(-res), res);
-        mParent->signalError();
-        return false;
-    }
-    mNextFrameNumber = *e.data.i32;
-
-    res = find_camera_metadata_entry(mRequest,
-            ANDROID_SENSOR_EXPOSURE_TIME,
-            &e);
-    if (res != NO_ERROR) {
-        ALOGE("%s: error reading exposure time tag: %s (%d)",
-                __FUNCTION__, strerror(-res), res);
-        mParent->signalError();
-        return false;
-    }
-    mNextExposureTime = *e.data.i64;
-
-    res = find_camera_metadata_entry(mRequest,
-            ANDROID_SENSOR_FRAME_DURATION,
-            &e);
-    if (res != NO_ERROR) {
-        ALOGE("%s: error reading frame duration tag", __FUNCTION__);
-        mParent->signalError();
-        return false;
-    }
-    mNextFrameDuration = *e.data.i64;
-
-    if (mNextFrameDuration <
-            mNextExposureTime + Sensor::kMinVerticalBlank) {
-        mNextFrameDuration = mNextExposureTime + Sensor::kMinVerticalBlank;
-    }
-    res = find_camera_metadata_entry(mRequest,
-            ANDROID_SENSOR_SENSITIVITY,
-            &e);
-    if (res != NO_ERROR) {
-        ALOGE("%s: error reading sensitivity tag", __FUNCTION__);
-        mParent->signalError();
-        return false;
-    }
-    mNextSensitivity = *e.data.i32;
-
-    res = find_camera_metadata_entry(mRequest,
-            EMULATOR_SCENE_HOUROFDAY,
-            &e);
-    if (res == NO_ERROR) {
-        ALOGV("Setting hour: %d", *e.data.i32);
-        mParent->mSensor->getScene().setHour(*e.data.i32);
-    }
-
-    // Start waiting on readout thread
-    mWaitingForReadout = true;
-    ALOGV("Configure: Waiting for readout thread");
-
-    return true;
-}
-
-bool EmulatedFakeCamera2::ConfigureThread::configureNextCapture() {
     bool vsync = mParent->mSensor->waitForVSync(kWaitPerLoop);
+
     if (!vsync) return true;
 
     Mutex::Autolock il(mInternalsMutex);
-    ALOGV("Configure: Configuring sensor for capture %d", mNextFrameNumber);
+    ALOGV("Configure: Configuring sensor for frame %d", mNextFrameNumber);
     mParent->mSensor->setExposureTime(mNextExposureTime);
     mParent->mSensor->setFrameDuration(mNextFrameDuration);
     mParent->mSensor->setSensitivity(mNextSensitivity);
 
-    getBuffers();
+    /** Get buffers to fill for this frame */
+    for (size_t i = 0; i < mNextBuffers->size(); i++) {
+        StreamBuffer &b = mNextBuffers->editItemAt(i);
 
-    ALOGV("Configure: Done configure for capture %d", mNextFrameNumber);
-    mParent->mReadoutThread->setNextOperation(true, mRequest, mNextBuffers);
+        Stream s = mParent->getStreamInfo(b.streamId);
+        ALOGV("Configure: Dequeing buffer from stream %d", b.streamId);
+        res = s.ops->dequeue_buffer(s.ops, &(b.buffer) );
+        if (res != NO_ERROR || b.buffer == NULL) {
+            ALOGE("%s: Unable to dequeue buffer from stream %d: %s (%d)",
+                    __FUNCTION__, b.streamId, strerror(-res), res);
+            mParent->signalError();
+            return false;
+        }
+
+        /* Lock the buffer from the perspective of the graphics mapper */
+        uint8_t *img;
+        const Rect rect(s.width, s.height);
+
+        res = GraphicBufferMapper::get().lock(*(b.buffer),
+                GRALLOC_USAGE_HW_CAMERA_WRITE,
+                rect, (void**)&(b.img) );
+
+        if (res != NO_ERROR) {
+            ALOGE("%s: grbuffer_mapper.lock failure: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
+            s.ops->cancel_buffer(s.ops,
+                    b.buffer);
+            mParent->signalError();
+            return false;
+        }
+    }
+    ALOGV("Configure: Done configure for frame %d", mNextFrameNumber);
+    mParent->mReadoutThread->setNextCapture(mRequest, mNextBuffers);
     mParent->mSensor->setDestinationBuffers(mNextBuffers);
 
     mRequest = NULL;
@@ -902,172 +821,6 @@
     return true;
 }
 
-bool EmulatedFakeCamera2::ConfigureThread::setupReprocess() {
-    status_t res;
-
-    mNextNeedsJpeg = true;
-    mNextIsCapture = false;
-
-    camera_metadata_entry_t reprocessStreams;
-    res = find_camera_metadata_entry(mRequest,
-            ANDROID_REQUEST_INPUT_STREAMS,
-            &reprocessStreams);
-    if (res != NO_ERROR) {
-        ALOGE("%s: error reading output stream tag", __FUNCTION__);
-        mParent->signalError();
-        return false;
-    }
-
-    mNextBuffers = new Buffers;
-
-    ALOGV("Configure: Setting up input buffers for reprocess");
-    for (size_t i = 0; i < reprocessStreams.count; i++) {
-        int streamId = reprocessStreams.data.u8[i];
-        const ReprocessStream &s = mParent->getReprocessStreamInfo(streamId);
-        if (s.format != HAL_PIXEL_FORMAT_RGB_888) {
-            ALOGE("%s: Only ZSL reprocessing supported!",
-                    __FUNCTION__);
-            mParent->signalError();
-            return false;
-        }
-        StreamBuffer b;
-        b.streamId = -streamId;
-        b.width = s.width;
-        b.height = s.height;
-        b.format = s.format;
-        b.stride = s.stride;
-        mNextBuffers->push_back(b);
-    }
-
-    camera_metadata_entry_t streams;
-    res = find_camera_metadata_entry(mRequest,
-            ANDROID_REQUEST_OUTPUT_STREAMS,
-            &streams);
-    if (res != NO_ERROR) {
-        ALOGE("%s: error reading output stream tag", __FUNCTION__);
-        mParent->signalError();
-        return false;
-    }
-
-    ALOGV("Configure: Setting up output buffers for reprocess");
-    for (size_t i = 0; i < streams.count; i++) {
-        int streamId = streams.data.u8[i];
-        const Stream &s = mParent->getStreamInfo(streamId);
-        if (s.format != HAL_PIXEL_FORMAT_BLOB) {
-            // TODO: Support reprocess to YUV
-            ALOGE("%s: Non-JPEG output stream %d for reprocess not supported",
-                    __FUNCTION__, streamId);
-            mParent->signalError();
-            return false;
-        }
-        StreamBuffer b;
-        b.streamId = streams.data.u8[i];
-        b.width  = s.width;
-        b.height = s.height;
-        b.format = s.format;
-        b.stride = s.stride;
-        mNextBuffers->push_back(b);
-        ALOGV("Configure:    Buffer %d: Stream %d, %d x %d, format 0x%x, "
-                "stride %d",
-                i, b.streamId, b.width, b.height, b.format, b.stride);
-    }
-
-    camera_metadata_entry_t e;
-    res = find_camera_metadata_entry(mRequest,
-            ANDROID_REQUEST_FRAME_COUNT,
-            &e);
-    if (res != NO_ERROR) {
-        ALOGE("%s: error reading frame count tag: %s (%d)",
-                __FUNCTION__, strerror(-res), res);
-        mParent->signalError();
-        return false;
-    }
-    mNextFrameNumber = *e.data.i32;
-
-    return true;
-}
-
-bool EmulatedFakeCamera2::ConfigureThread::configureNextReprocess() {
-    Mutex::Autolock il(mInternalsMutex);
-
-    getBuffers();
-
-    ALOGV("Configure: Done configure for reprocess %d", mNextFrameNumber);
-    mParent->mReadoutThread->setNextOperation(false, mRequest, mNextBuffers);
-
-    mRequest = NULL;
-    mNextBuffers = NULL;
-
-    Mutex::Autolock lock(mInputMutex);
-    mRequestCount--;
-
-    return true;
-}
-
-bool EmulatedFakeCamera2::ConfigureThread::getBuffers() {
-    status_t res;
-    /** Get buffers to fill for this frame */
-    for (size_t i = 0; i < mNextBuffers->size(); i++) {
-        StreamBuffer &b = mNextBuffers->editItemAt(i);
-
-        if (b.streamId > 0) {
-            Stream s = mParent->getStreamInfo(b.streamId);
-            ALOGV("Configure: Dequeing buffer from stream %d", b.streamId);
-            res = s.ops->dequeue_buffer(s.ops, &(b.buffer) );
-            if (res != NO_ERROR || b.buffer == NULL) {
-                ALOGE("%s: Unable to dequeue buffer from stream %d: %s (%d)",
-                        __FUNCTION__, b.streamId, strerror(-res), res);
-                mParent->signalError();
-                return false;
-            }
-
-            /* Lock the buffer from the perspective of the graphics mapper */
-            const Rect rect(s.width, s.height);
-
-            res = GraphicBufferMapper::get().lock(*(b.buffer),
-                    GRALLOC_USAGE_HW_CAMERA_WRITE,
-                    rect, (void**)&(b.img) );
-
-            if (res != NO_ERROR) {
-                ALOGE("%s: grbuffer_mapper.lock failure: %s (%d)",
-                        __FUNCTION__, strerror(-res), res);
-                s.ops->cancel_buffer(s.ops,
-                        b.buffer);
-                mParent->signalError();
-                return false;
-            }
-        } else {
-            ReprocessStream s = mParent->getReprocessStreamInfo(-b.streamId);
-            ALOGV("Configure: Acquiring buffer from reprocess stream %d",
-                    -b.streamId);
-            res = s.ops->acquire_buffer(s.ops, &(b.buffer) );
-            if (res != NO_ERROR || b.buffer == NULL) {
-                ALOGE("%s: Unable to acquire buffer from reprocess stream %d: "
-                        "%s (%d)", __FUNCTION__, -b.streamId,
-                        strerror(-res), res);
-                mParent->signalError();
-                return false;
-            }
-
-            /* Lock the buffer from the perspective of the graphics mapper */
-            const Rect rect(s.width, s.height);
-
-            res = GraphicBufferMapper::get().lock(*(b.buffer),
-                    GRALLOC_USAGE_HW_CAMERA_READ,
-                    rect, (void**)&(b.img) );
-            if (res != NO_ERROR) {
-                ALOGE("%s: grbuffer_mapper.lock failure: %s (%d)",
-                        __FUNCTION__, strerror(-res), res);
-                s.ops->release_buffer(s.ops,
-                        b.buffer);
-                mParent->signalError();
-                return false;
-            }
-        }
-    }
-    return true;
-}
-
 EmulatedFakeCamera2::ReadoutThread::ReadoutThread(EmulatedFakeCamera2 *parent):
         Thread(false),
         mParent(parent),
@@ -1121,8 +874,7 @@
     return (mInFlightTail + 1) % kInFlightQueueSize != mInFlightHead;
 }
 
-void EmulatedFakeCamera2::ReadoutThread::setNextOperation(
-        bool isCapture,
+void EmulatedFakeCamera2::ReadoutThread::setNextCapture(
         camera_metadata_t *request,
         Buffers *buffers) {
     Mutex::Autolock lock(mInputMutex);
@@ -1131,7 +883,6 @@
         mParent->signalError();
         return;
     }
-    mInFlightQueue[mInFlightTail].isCapture = isCapture;
     mInFlightQueue[mInFlightTail].request = request;
     mInFlightQueue[mInFlightTail].buffers = buffers;
     mInFlightTail = (mInFlightTail + 1) % kInFlightQueueSize;
@@ -1201,7 +952,6 @@
             } else {
                 Mutex::Autolock iLock(mInternalsMutex);
                 mReadySignal.signal();
-                mIsCapture = mInFlightQueue[mInFlightHead].isCapture;
                 mRequest = mInFlightQueue[mInFlightHead].request;
                 mBuffers  = mInFlightQueue[mInFlightHead].buffers;
                 mInFlightQueue[mInFlightHead].request = NULL;
@@ -1217,30 +967,15 @@
 
     nsecs_t captureTime;
 
-    if (mIsCapture) {
-        bool gotFrame;
-        gotFrame = mParent->mSensor->waitForNewFrame(kWaitPerLoop,
-                &captureTime);
+    bool gotFrame;
+    gotFrame = mParent->mSensor->waitForNewFrame(kWaitPerLoop,
+            &captureTime);
 
-        if (!gotFrame) return true;
-    }
+    if (!gotFrame) return true;
 
     Mutex::Autolock iLock(mInternalsMutex);
 
     camera_metadata_entry_t entry;
-    if (!mIsCapture) {
-        res = find_camera_metadata_entry(mRequest,
-                ANDROID_SENSOR_TIMESTAMP,
-            &entry);
-        if (res != NO_ERROR) {
-            ALOGE("%s: error reading reprocessing timestamp: %s (%d)",
-                    __FUNCTION__, strerror(-res), res);
-            mParent->signalError();
-            return false;
-        }
-        captureTime = entry.data.i64[0];
-    }
-
     res = find_camera_metadata_entry(mRequest,
             ANDROID_REQUEST_FRAME_COUNT,
             &entry);
@@ -1292,34 +1027,31 @@
             ALOGE("Unable to append request metadata");
         }
 
-        if (mIsCapture) {
-            add_camera_metadata_entry(frame,
-                    ANDROID_SENSOR_TIMESTAMP,
-                    &captureTime,
-                    1);
+        add_camera_metadata_entry(frame,
+                ANDROID_SENSOR_TIMESTAMP,
+                &captureTime,
+                1);
 
-            int32_t hourOfDay = (int32_t)mParent->mSensor->getScene().getHour();
-            camera_metadata_entry_t requestedHour;
-            res = find_camera_metadata_entry(frame,
+        int32_t hourOfDay = (int32_t)mParent->mSensor->getScene().getHour();
+        camera_metadata_entry_t requestedHour;
+        res = find_camera_metadata_entry(frame,
+                EMULATOR_SCENE_HOUROFDAY,
+                &requestedHour);
+        if (res == NAME_NOT_FOUND) {
+            res = add_camera_metadata_entry(frame,
                     EMULATOR_SCENE_HOUROFDAY,
-                    &requestedHour);
-            if (res == NAME_NOT_FOUND) {
-                res = add_camera_metadata_entry(frame,
-                        EMULATOR_SCENE_HOUROFDAY,
-                        &hourOfDay, 1);
-                if (res != NO_ERROR) {
-                    ALOGE("Unable to add vendor tag");
-                }
-            } else if (res == OK) {
-                *requestedHour.data.i32 = hourOfDay;
-            } else {
-                ALOGE("%s: Error looking up vendor tag", __FUNCTION__);
+                    &hourOfDay, 1);
+            if (res != NO_ERROR) {
+                ALOGE("Unable to add vendor tag");
             }
-
-            collectStatisticsMetadata(frame);
-            // TODO: Collect all final values used from sensor in addition to timestamp
+        } else if (res == OK) {
+            *requestedHour.data.i32 = hourOfDay;
+        } else {
+            ALOGE("%s: Error looking up vendor tag", __FUNCTION__);
         }
 
+        collectStatisticsMetadata(frame);
+        // TODO: Collect all final values used from sensor in addition to timestamp
         ALOGV("Readout: Enqueue frame %d", frameNumber);
         mParent->mFrameQueueDst->enqueue_frame(mParent->mFrameQueueDst,
                 frame);
@@ -1340,13 +1072,13 @@
         const StreamBuffer &b = (*mBuffers)[i];
         ALOGV("Readout:    Buffer %d: Stream %d, %d x %d, format 0x%x, stride %d",
                 i, b.streamId, b.width, b.height, b.format, b.stride);
-        if (b.streamId > 0) {
+        if (b.streamId >= 0) {
             if (b.format == HAL_PIXEL_FORMAT_BLOB) {
                 // Assumes only one BLOB buffer type per capture
                 compressedBufferIndex = i;
             } else {
-                ALOGV("Readout:    Sending image buffer %d (%p) to output stream %d",
-                        i, (void*)*(b.buffer), b.streamId);
+                ALOGV("Readout:    Sending image buffer %d to output stream %d",
+                        i, b.streamId);
                 GraphicBufferMapper::get().unlock(*(b.buffer));
                 const Stream &s = mParent->getStreamInfo(b.streamId);
                 res = s.ops->enqueue_buffer(s.ops, captureTime, b.buffer);
@@ -1521,8 +1253,6 @@
     mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
     mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
 
-    mExposureTime = kNormalExposureTime;
-
     mInputSignal.signal();
     return NO_ERROR;
 }
@@ -1578,24 +1308,13 @@
             &mode);
     mAwbMode = mode.data.u8[0];
 
-    // TODO: Override more control fields
-
-    if (mAeMode != ANDROID_CONTROL_AE_OFF) {
-        camera_metadata_entry_t exposureTime;
-        res = find_camera_metadata_entry(request,
-                ANDROID_SENSOR_EXPOSURE_TIME,
-                &exposureTime);
-        if (res == OK) {
-            exposureTime.data.i64[0] = mExposureTime;
-        }
-    }
+    // TODO: Override control fields
 
     return OK;
 }
 
 status_t EmulatedFakeCamera2::ControlThread::triggerAction(uint32_t msgType,
         int32_t ext1, int32_t ext2) {
-    ALOGV("%s: Triggering %d (%d, %d)", __FUNCTION__, msgType, ext1, ext2);
     Mutex::Autolock lock(mInputMutex);
     switch (msgType) {
         case CAMERA2_TRIGGER_AUTOFOCUS:
@@ -1620,24 +1339,12 @@
     return OK;
 }
 
-const nsecs_t EmulatedFakeCamera2::ControlThread::kControlCycleDelay = 100 * MSEC;
-const nsecs_t EmulatedFakeCamera2::ControlThread::kMinAfDuration = 500 * MSEC;
-const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxAfDuration = 900 * MSEC;
+const nsecs_t EmulatedFakeCamera2::ControlThread::kControlCycleDelay = 100000000;
+const nsecs_t EmulatedFakeCamera2::ControlThread::kMinAfDuration = 500000000;
+const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxAfDuration = 900000000;
 const float EmulatedFakeCamera2::ControlThread::kAfSuccessRate = 0.9;
- // Once every 5 seconds
 const float EmulatedFakeCamera2::ControlThread::kContinuousAfStartRate =
-        kControlCycleDelay / 5.0 * SEC;
-const nsecs_t EmulatedFakeCamera2::ControlThread::kMinAeDuration = 500 * MSEC;
-const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxAeDuration = 2 * SEC;
-const nsecs_t EmulatedFakeCamera2::ControlThread::kMinPrecaptureAeDuration = 100 * MSEC;
-const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxPrecaptureAeDuration = 400 * MSEC;
- // Once every 3 seconds
-const float EmulatedFakeCamera2::ControlThread::kAeScanStartRate =
-    kControlCycleDelay / 3000000000.0;
-
-const nsecs_t EmulatedFakeCamera2::ControlThread::kNormalExposureTime = 10 * MSEC;
-const nsecs_t EmulatedFakeCamera2::ControlThread::kExposureJump = 5 * MSEC;
-const nsecs_t EmulatedFakeCamera2::ControlThread::kMinExposureTime = 1 * MSEC;
+    kControlCycleDelay / 5000000000.0; // Once every 5 seconds
 
 bool EmulatedFakeCamera2::ControlThread::threadLoop() {
     bool afModeChange = false;
@@ -1646,20 +1353,14 @@
     uint8_t afState;
     uint8_t afMode;
     int32_t afTriggerId;
-    bool precaptureTriggered = false;
-    uint8_t aeState;
-    uint8_t aeMode;
-    int32_t precaptureTriggerId;
     nsecs_t nextSleep = kControlCycleDelay;
 
     {
         Mutex::Autolock lock(mInputMutex);
         if (mStartAf) {
-            ALOGD("Starting AF trigger processing");
             afTriggered = true;
             mStartAf = false;
         } else if (mCancelAf) {
-            ALOGD("Starting cancel AF trigger processing");
             afCancelled = true;
             mCancelAf = false;
         }
@@ -1669,15 +1370,6 @@
         mAfModeChange = false;
 
         afTriggerId = mAfTriggerId;
-
-        if(mStartPrecapture) {
-            ALOGD("Starting precapture trigger processing");
-            precaptureTriggered = true;
-            mStartPrecapture = false;
-        }
-        aeState = mAeState;
-        aeMode = mAeMode;
-        precaptureTriggerId = mPrecaptureTriggerId;
     }
 
     if (afCancelled || afModeChange) {
@@ -1700,16 +1392,6 @@
 
     updateAfState(afState, afTriggerId);
 
-    if (precaptureTriggered) {
-        aeState = processPrecaptureTrigger(aeMode, aeState);
-    }
-
-    aeState = maybeStartAeScan(aeMode, aeState);
-
-    aeState = updateAeScan(aeMode, aeState, &nextSleep);
-
-    updateAeState(aeState, precaptureTriggerId);
-
     int ret;
     timespec t;
     t.tv_sec = 0;
@@ -1718,13 +1400,6 @@
         ret = nanosleep(&t, &t);
     } while (ret != 0);
 
-    if (mAfScanDuration > 0) {
-        mAfScanDuration -= nextSleep;
-    }
-    if (mAeScanDuration > 0) {
-        mAeScanDuration -= nextSleep;
-    }
-
     return true;
 }
 
@@ -1829,7 +1504,7 @@
         return afState;
     }
 
-    if (mAfScanDuration <= 0) {
+    if (mAfScanDuration == 0) {
         ALOGV("%s: AF scan done", __FUNCTION__);
         switch (afMode) {
             case ANDROID_CONTROL_AF_MACRO:
@@ -1859,6 +1534,9 @@
     } else {
         if (mAfScanDuration <= *maxSleep) {
             *maxSleep = mAfScanDuration;
+            mAfScanDuration = 0;
+        } else {
+            mAfScanDuration -= *maxSleep;
         }
     }
     return afState;
@@ -1876,97 +1554,6 @@
     }
 }
 
-int EmulatedFakeCamera2::ControlThread::processPrecaptureTrigger(uint8_t aeMode,
-        uint8_t aeState) {
-    switch (aeMode) {
-        case ANDROID_CONTROL_AE_OFF:
-        case ANDROID_CONTROL_AE_LOCKED:
-            // Don't do anything for these
-            return aeState;
-        case ANDROID_CONTROL_AE_ON:
-        case ANDROID_CONTROL_AE_ON_AUTO_FLASH:
-        case ANDROID_CONTROL_AE_ON_ALWAYS_FLASH:
-        case ANDROID_CONTROL_AE_ON_AUTO_FLASH_REDEYE:
-            // Trigger a precapture cycle
-            aeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
-            mAeScanDuration = ((double)rand() / RAND_MAX) *
-                    (kMaxPrecaptureAeDuration - kMinPrecaptureAeDuration) +
-                    kMinPrecaptureAeDuration;
-            ALOGD("%s: AE precapture scan start, duration %lld ms",
-                    __FUNCTION__, mAeScanDuration / 1000000);
-
-    }
-    return aeState;
-}
-
-int EmulatedFakeCamera2::ControlThread::maybeStartAeScan(uint8_t aeMode,
-        uint8_t aeState) {
-    switch (aeMode) {
-        case ANDROID_CONTROL_AE_OFF:
-        case ANDROID_CONTROL_AE_LOCKED:
-            // Don't do anything for these
-            break;
-        case ANDROID_CONTROL_AE_ON:
-        case ANDROID_CONTROL_AE_ON_AUTO_FLASH:
-        case ANDROID_CONTROL_AE_ON_ALWAYS_FLASH:
-        case ANDROID_CONTROL_AE_ON_AUTO_FLASH_REDEYE: {
-            if (aeState != ANDROID_CONTROL_AE_STATE_INACTIVE &&
-                    aeState != ANDROID_CONTROL_AE_STATE_CONVERGED) break;
-
-            bool startScan = ((double)rand() / RAND_MAX) < kAeScanStartRate;
-            if (startScan) {
-                mAeScanDuration = ((double)rand() / RAND_MAX) *
-                (kMaxAeDuration - kMinAeDuration) + kMinAeDuration;
-                aeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
-                ALOGD("%s: AE scan start, duration %lld ms",
-                        __FUNCTION__, mAeScanDuration / 1000000);
-            }
-        }
-    }
-
-    return aeState;
-}
-
-int EmulatedFakeCamera2::ControlThread::updateAeScan(uint8_t aeMode,
-        uint8_t aeState, nsecs_t *maxSleep) {
-    if ((aeState == ANDROID_CONTROL_AE_STATE_SEARCHING) ||
-            (aeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE ) ) {
-        if (mAeScanDuration <= 0) {
-            ALOGD("%s: AE scan done", __FUNCTION__);
-            aeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
-
-            Mutex::Autolock lock(mInputMutex);
-            mExposureTime = kNormalExposureTime;
-        } else {
-            if (mAeScanDuration <= *maxSleep) {
-                *maxSleep = mAeScanDuration;
-            }
-
-            int64_t exposureDelta =
-                    ((double)rand() / RAND_MAX) * 2 * kExposureJump -
-                    kExposureJump;
-            Mutex::Autolock lock(mInputMutex);
-            mExposureTime = mExposureTime + exposureDelta;
-            if (mExposureTime < kMinExposureTime) mExposureTime = kMinExposureTime;
-        }
-    }
-
-    return aeState;
-}
-
-
-void EmulatedFakeCamera2::ControlThread::updateAeState(uint8_t newState,
-        int32_t triggerId) {
-    Mutex::Autolock lock(mInputMutex);
-    if (mAeState != newState) {
-        ALOGD("%s: Autoexposure state now %d, id %d", __FUNCTION__,
-                newState, triggerId);
-        mAeState = newState;
-        mParent->sendNotification(CAMERA2_MSG_AUTOEXPOSURE,
-                newState, triggerId, 0);
-    }
-}
-
 /** Private methods */
 
 status_t EmulatedFakeCamera2::constructStaticInfo(
@@ -2307,10 +1894,11 @@
     if ( ( ret = addOrSize(*request, sizeRequest, &entryCount, &dataCount, \
             tag, data, count) ) != OK ) return ret
 
-    /** android.request */
+    static const int64_t USEC = 1000LL;
+    static const int64_t MSEC = USEC * 1000LL;
+    static const int64_t SEC = MSEC * 1000LL;
 
-    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
-    ADD_OR_SIZE(ANDROID_REQUEST_TYPE, &requestType, 1);
+    /** android.request */
 
     static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_FULL;
     ADD_OR_SIZE(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
@@ -2662,12 +2250,7 @@
         return true;
     }
     return false;
-}
-
-bool EmulatedFakeCamera2::isReprocessStreamInUse(uint32_t id) {
-    // TODO: implement
-    return false;
-}
+ }
 
 const Stream& EmulatedFakeCamera2::getStreamInfo(uint32_t streamId) {
     Mutex::Autolock lock(mMutex);
@@ -2675,10 +2258,4 @@
     return mStreams.valueFor(streamId);
 }
 
-const ReprocessStream& EmulatedFakeCamera2::getReprocessStreamInfo(uint32_t streamId) {
-    Mutex::Autolock lock(mMutex);
-
-    return mReprocessStreams.valueFor(streamId);
-}
-
 };  /* namespace android */
diff --git a/tools/emulator/system/camera/EmulatedFakeCamera2.h b/tools/emulator/system/camera/EmulatedFakeCamera2.h
index d2420e8..11016e8 100644
--- a/tools/emulator/system/camera/EmulatedFakeCamera2.h
+++ b/tools/emulator/system/camera/EmulatedFakeCamera2.h
@@ -110,12 +110,7 @@
     //         uint32_t *usage,
     //         uint32_t *max_buffers);
 
-    virtual int allocateReprocessStreamFromStream(
-            uint32_t output_stream_id,
-            const camera2_stream_in_ops_t *stream_ops,
-            uint32_t *stream_id);
-
-    virtual int releaseReprocessStream(uint32_t stream_id);
+    // virtual int releaseReprocessStream(uint32_t stream_id);
 
     virtual int triggerAction(uint32_t trigger_id,
             int32_t ext1,
@@ -137,7 +132,6 @@
 
     // Get information about a given stream. Will lock mMutex
     const Stream &getStreamInfo(uint32_t streamId);
-    const ReprocessStream &getReprocessStreamInfo(uint32_t streamId);
 
     // Notifies rest of camera subsystem of serious error
     void signalError();
@@ -169,10 +163,6 @@
      * requests. Assumes mMutex is locked */
     bool isStreamInUse(uint32_t streamId);
 
-    /** Determine if the reprocess stream id is listed in any
-     * currently-in-flight requests. Assumes mMutex is locked */
-    bool isReprocessStreamInUse(uint32_t streamId);
-
     /****************************************************************************
      * Pipeline controller threads
      ***************************************************************************/
@@ -190,19 +180,10 @@
         int getInProgressCount();
       private:
         EmulatedFakeCamera2 *mParent;
-        static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms
 
         bool mRunning;
         bool threadLoop();
 
-        bool setupCapture();
-        bool setupReprocess();
-
-        bool configureNextCapture();
-        bool configureNextReprocess();
-
-        bool getBuffers();
-
         Mutex mInputMutex; // Protects mActive, mRequestCount
         Condition mInputSignal;
         bool mActive; // Whether we're waiting for input requests or actively
@@ -214,7 +195,6 @@
         Mutex mInternalsMutex; // Lock before accessing below members.
         bool    mWaitingForReadout;
         bool    mNextNeedsJpeg;
-        bool    mNextIsCapture;
         int32_t mNextFrameNumber;
         int64_t mNextExposureTime;
         int64_t mNextFrameDuration;
@@ -232,9 +212,9 @@
         // Input
         status_t waitUntilRunning();
         bool waitForReady(nsecs_t timeout);
-        void setNextOperation(bool isCapture,
-                camera_metadata_t *request,
+        void setNextCapture(camera_metadata_t *request,
                 Buffers *buffers);
+
         bool isStreamInUse(uint32_t id);
         int getInProgressCount();
       private:
@@ -255,7 +235,6 @@
 
         static const int kInFlightQueueSize = 4;
         struct InFlightQueue {
-            bool isCapture;
             camera_metadata_t *request;
             Buffers *buffers;
         } *mInFlightQueue;
@@ -267,8 +246,6 @@
 
         // Internals
         Mutex mInternalsMutex;
-
-        bool mIsCapture;
         camera_metadata_t *mRequest;
         Buffers *mBuffers;
 
@@ -301,16 +278,6 @@
         static const float kAfSuccessRate;
         static const float kContinuousAfStartRate;
 
-        static const float kAeScanStartRate;
-        static const nsecs_t kMinAeDuration;
-        static const nsecs_t kMaxAeDuration;
-        static const nsecs_t kMinPrecaptureAeDuration;
-        static const nsecs_t kMaxPrecaptureAeDuration;
-
-        static const nsecs_t kNormalExposureTime;
-        static const nsecs_t kExposureJump;
-        static const nsecs_t kMinExposureTime;
-
         EmulatedFakeCamera2 *mParent;
 
         bool mRunning;
@@ -345,26 +312,17 @@
         uint8_t mAeState;
         uint8_t mAwbState;
 
-        // Current control parameters
-        nsecs_t mExposureTime;
-
         // Private to threadLoop and its utility methods
 
         nsecs_t mAfScanDuration;
-        nsecs_t mAeScanDuration;
         bool mLockAfterPassiveScan;
 
-        // Utility methods for AF
+        // Utility methods
         int processAfTrigger(uint8_t afMode, uint8_t afState);
         int maybeStartAfScan(uint8_t afMode, uint8_t afState);
         int updateAfScan(uint8_t afMode, uint8_t afState, nsecs_t *maxSleep);
         void updateAfState(uint8_t newState, int32_t triggerId);
 
-        // Utility methods for precapture trigger
-        int processPrecaptureTrigger(uint8_t aeMode, uint8_t aeState);
-        int maybeStartAeScan(uint8_t aeMode, uint8_t aeState);
-        int updateAeScan(uint8_t aeMode, uint8_t aeState, nsecs_t *maxSleep);
-        void updateAeState(uint8_t newState, int32_t triggerId);
     };
 
     /****************************************************************************
@@ -374,7 +332,6 @@
     static const uint32_t kMaxRawStreamCount = 1;
     static const uint32_t kMaxProcessedStreamCount = 3;
     static const uint32_t kMaxJpegStreamCount = 1;
-    static const uint32_t kMaxReprocessStreamCount = 2;
     static const uint32_t kMaxBufferCount = 4;
     static const uint32_t kAvailableFormats[];
     static const uint32_t kAvailableRawSizes[];
@@ -401,11 +358,7 @@
     uint32_t mProcessedStreamCount;
     uint32_t mJpegStreamCount;
 
-    uint32_t mNextReprocessStreamId;
-    uint32_t mReprocessStreamCount;
-
     KeyedVector<uint32_t, Stream> mStreams;
-    KeyedVector<uint32_t, ReprocessStream> mReprocessStreams;
 
     /** Simulated hardware interfaces */
     sp<Sensor> mSensor;
diff --git a/tools/emulator/system/camera/fake-pipeline2/Base.h b/tools/emulator/system/camera/fake-pipeline2/Base.h
index 057629b..f7ef9b1 100644
--- a/tools/emulator/system/camera/fake-pipeline2/Base.h
+++ b/tools/emulator/system/camera/fake-pipeline2/Base.h
@@ -31,9 +31,6 @@
 
 /* Internal structure for passing buffers across threads */
 struct StreamBuffer {
-    // Positive numbers are output streams
-    // Negative numbers are input reprocess streams
-    // Zero is an auxillary buffer
     int streamId;
     uint32_t width, height;
     uint32_t format;
@@ -50,15 +47,6 @@
     uint32_t stride;
 };
 
-struct ReprocessStream {
-    const camera2_stream_in_ops_t *ops;
-    uint32_t width, height;
-    int32_t format;
-    uint32_t stride;
-    // -1 if the reprocessing stream is independent
-    int32_t sourceStreamId;
-};
-
 } // namespace android;
 
 #endif
diff --git a/tools/emulator/system/camera/fake-pipeline2/JpegCompressor.cpp b/tools/emulator/system/camera/fake-pipeline2/JpegCompressor.cpp
index 20b9634..76fbb94 100644
--- a/tools/emulator/system/camera/fake-pipeline2/JpegCompressor.cpp
+++ b/tools/emulator/system/camera/fake-pipeline2/JpegCompressor.cpp
@@ -77,8 +77,7 @@
     Mutex::Autolock lock(mMutex);
     ALOGV("%s: Starting compression thread", __FUNCTION__);
 
-    // Find source and target buffers. Assumes only one buffer matches
-    // each condition!
+    // Find source and target buffers
 
     bool foundJpeg = false, mFoundAux = false;
     for (size_t i = 0; i < mBuffers->size(); i++) {
@@ -86,7 +85,7 @@
         if (b.format == HAL_PIXEL_FORMAT_BLOB) {
             mJpegBuffer = b;
             mFoundJpeg = true;
-        } else if (b.streamId <= 0) {
+        } else if (b.streamId == -1) {
             mAuxBuffer = b;
             mFoundAux = true;
         }
@@ -217,24 +216,11 @@
 }
 
 void JpegCompressor::cleanUp() {
-    status_t res;
     jpeg_destroy_compress(&mCInfo);
     Mutex::Autolock lock(mBusyMutex);
 
     if (mFoundAux) {
-        if (mAuxBuffer.streamId == 0) {
-            delete[] mAuxBuffer.img;
-        } else {
-            GraphicBufferMapper::get().unlock(*(mAuxBuffer.buffer));
-            const ReprocessStream &s =
-                    mParent->getReprocessStreamInfo(-mAuxBuffer.streamId);
-            res = s.ops->release_buffer(s.ops, mAuxBuffer.buffer);
-            if (res != OK) {
-                ALOGE("Error releasing reprocess buffer %p: %s (%d)",
-                        mAuxBuffer.buffer, strerror(-res), res);
-                mParent->signalError();
-            }
-        }
+        delete[] mAuxBuffer.img;
     }
     delete mBuffers;
     mBuffers = NULL;
diff --git a/tools/emulator/system/camera/fake-pipeline2/Sensor.cpp b/tools/emulator/system/camera/fake-pipeline2/Sensor.cpp
index 73f1fb5..d00b6ee 100644
--- a/tools/emulator/system/camera/fake-pipeline2/Sensor.cpp
+++ b/tools/emulator/system/camera/fake-pipeline2/Sensor.cpp
@@ -293,8 +293,6 @@
                 (float)exposureDuration/1e6, gain);
         mScene.setExposureDuration((float)exposureDuration/1e9);
         mScene.calculateScene(mNextCaptureTime);
-
-        // Might be adding more buffers, so size isn't constant
         for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
             const StreamBuffer &b = (*mNextCapturedBuffers)[i];
             ALOGVV("Sensor capturing buffer %d: stream %d,"
@@ -305,9 +303,6 @@
                 case HAL_PIXEL_FORMAT_RAW_SENSOR:
                     captureRaw(b.img, gain, b.stride);
                     break;
-                case HAL_PIXEL_FORMAT_RGB_888:
-                    captureRGB(b.img, gain, b.stride);
-                    break;
                 case HAL_PIXEL_FORMAT_RGBA_8888:
                     captureRGBA(b.img, gain, b.stride);
                     break;
@@ -316,7 +311,7 @@
                     // Assumes only one BLOB (JPEG) buffer in
                     // mNextCapturedBuffers
                     StreamBuffer bAux;
-                    bAux.streamId = 0;
+                    bAux.streamId = -1;
                     bAux.width = b.width;
                     bAux.height = b.height;
                     bAux.format = HAL_PIXEL_FORMAT_RGB_888;
@@ -324,6 +319,7 @@
                     bAux.buffer = NULL;
                     // TODO: Reuse these
                     bAux.img = new uint8_t[b.width * b.height * 3];
+                    captureRGB(bAux.img, gain, b.stride);
                     mNextCapturedBuffers->push_back(bAux);
                     break;
                 case HAL_PIXEL_FORMAT_YCrCb_420_SP: