Camera3: Support for still capture

- Basic 3A routine with AE precapture support
- JPEG compression

Change-Id: I329bb6d2c5315e13ed657b096b8bb573c14a3738
diff --git a/camera/EmulatedFakeCamera3.cpp b/camera/EmulatedFakeCamera3.cpp
index 8fd4df0..43ffc44 100644
--- a/camera/EmulatedFakeCamera3.cpp
+++ b/camera/EmulatedFakeCamera3.cpp
@@ -31,6 +31,7 @@
 #include "gralloc_cb.h"
 #include "fake-pipeline2/Sensor.h"
 #include "fake-pipeline2/JpegCompressor.h"
+#include <cmath>
 
 namespace android {
 
@@ -89,6 +90,21 @@
 };
 
 /**
+ * 3A constants
+ */
+
+// Default exposure and gain targets for different scenarios
+const nsecs_t EmulatedFakeCamera3::kNormalExposureTime       = 10 * MSEC;
+const nsecs_t EmulatedFakeCamera3::kFacePriorityExposureTime = 30 * MSEC;
+const int     EmulatedFakeCamera3::kNormalSensitivity        = 100;
+const int     EmulatedFakeCamera3::kFacePrioritySensitivity  = 400;
+const float   EmulatedFakeCamera3::kExposureTrackRate        = 0.1;
+const int     EmulatedFakeCamera3::kPrecaptureMinFrames      = 10;
+const int     EmulatedFakeCamera3::kStableAeMaxFrames        = 100;
+const float   EmulatedFakeCamera3::kExposureWanderMin        = -2;
+const float   EmulatedFakeCamera3::kExposureWanderMax        = 1;
+
+/**
  * Camera device lifecycle methods
  */
 
@@ -147,10 +163,26 @@
     if (res != NO_ERROR) return res;
 
     mReadoutThread = new ReadoutThread(this);
+    mJpegCompressor = new JpegCompressor(NULL);
 
     res = mReadoutThread->run("EmuCam3::readoutThread");
     if (res != NO_ERROR) return res;
 
+    // Initialize fake 3A
+
+    mControlMode  = ANDROID_CONTROL_MODE_AUTO;
+    mFacePriority = false;
+    mAeMode       = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
+    mAfMode       = ANDROID_CONTROL_AF_MODE_AUTO;
+    mAwbMode      = ANDROID_CONTROL_AWB_MODE_AUTO;
+    mAeState      = ANDROID_CONTROL_AE_STATE_INACTIVE;
+    mAfState      = ANDROID_CONTROL_AF_STATE_INACTIVE;
+    mAwbState     = ANDROID_CONTROL_AWB_STATE_INACTIVE;
+    mAfTriggerId  = 0;
+    mAeTriggerId  = 0;
+    mAeCurrentExposureTime = kNormalExposureTime;
+    mAeCurrentSensitivity  = kNormalSensitivity;
+
     return EmulatedCamera3::connectCamera(device);
 }
 
@@ -754,7 +786,10 @@
         settings = request->settings;
     }
 
-    // TODO: Apply 3A overrides
+    res = process3A(settings);
+    if (res != OK) {
+        return res;
+    }
 
     // TODO: Handle reprocessing
 
@@ -783,12 +818,12 @@
         const cb_handle_t *privBuffer =
                 static_cast<const cb_handle_t*>(*srcBuf.buffer);
         StreamBuffer destBuf;
-        destBuf.streamId = 0;
-        destBuf.width  = srcBuf.stream->width;
-        destBuf.height = srcBuf.stream->height;
-        destBuf.format = privBuffer->format; // Use real private format
-        destBuf.stride = srcBuf.stream->width; // TODO: query from gralloc
-        destBuf.buffer = srcBuf.buffer;
+        destBuf.streamId = kGenericStreamId;
+        destBuf.width    = srcBuf.stream->width;
+        destBuf.height   = srcBuf.stream->height;
+        destBuf.format   = privBuffer->format; // Use real private format
+        destBuf.stride   = srcBuf.stream->width; // TODO: query from gralloc
+        destBuf.buffer   = srcBuf.buffer;
 
         // Wait on fence
         sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence);
@@ -1198,6 +1233,268 @@
     return OK;
 }
 
+status_t EmulatedFakeCamera3::process3A(CameraMetadata &settings) {
+    /**
+     * Extract top-level 3A controls
+     */
+    status_t res;
+
+    bool facePriority = false;
+
+    camera_metadata_entry e;
+
+    e = settings.find(ANDROID_CONTROL_MODE);
+    if (e.count == 0) {
+        ALOGE("%s: No control mode entry!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+    uint8_t controlMode = e.data.u8[0];
+
+    e = settings.find(ANDROID_CONTROL_SCENE_MODE);
+    if (e.count == 0) {
+        ALOGE("%s: No scene mode entry!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+    uint8_t sceneMode = e.data.u8[0];
+
+    if (controlMode == ANDROID_CONTROL_MODE_OFF) {
+        mAeState  = ANDROID_CONTROL_AE_STATE_INACTIVE;
+        mAfState  = ANDROID_CONTROL_AF_STATE_INACTIVE;
+        mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
+        update3A(settings);
+        return OK;
+    } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
+        switch(sceneMode) {
+            case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
+                mFacePriority = true;
+                break;
+            default:
+                ALOGE("%s: Emulator doesn't support scene mode %d",
+                        __FUNCTION__, sceneMode);
+                return BAD_VALUE;
+        }
+    } else {
+        mFacePriority = false;
+    }
+
+    // controlMode == AUTO or sceneMode = FACE_PRIORITY
+    // Process individual 3A controls
+
+    res = doFakeAE(settings);
+    if (res != OK) return res;
+
+    res = doFakeAF(settings);
+    if (res != OK) return res;
+
+    res = doFakeAWB(settings);
+    if (res != OK) return res;
+
+    update3A(settings);
+    return OK;
+}
+
+status_t EmulatedFakeCamera3::doFakeAE(CameraMetadata &settings) {
+    camera_metadata_entry e;
+
+    e = settings.find(ANDROID_CONTROL_AE_MODE);
+    if (e.count == 0) {
+        ALOGE("%s: No AE mode entry!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+    uint8_t aeMode = e.data.u8[0];
+
+    switch (aeMode) {
+        case ANDROID_CONTROL_AE_MODE_OFF:
+            // AE is OFF
+            mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
+            return OK;
+        case ANDROID_CONTROL_AE_MODE_ON:
+            // OK for AUTO modes
+            break;
+        default:
+            ALOGE("%s: Emulator doesn't support AE mode %d",
+                    __FUNCTION__, aeMode);
+            return BAD_VALUE;
+    }
+
+    e = settings.find(ANDROID_CONTROL_AE_LOCK);
+    if (e.count == 0) {
+        ALOGE("%s: No AE lock entry!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+    bool aeLocked = (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON);
+
+    e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
+    bool precaptureTrigger = false;
+    if (e.count != 0) {
+        precaptureTrigger =
+                (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START);
+    }
+
+    if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
+        // Run precapture sequence
+        if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
+            mAeCounter = 0;
+            mAeTriggerId++;
+        }
+
+        if (mFacePriority) {
+            mAeTargetExposureTime = kFacePriorityExposureTime;
+        } else {
+            mAeTargetExposureTime = kNormalExposureTime;
+        }
+
+        if (mAeCounter > kPrecaptureMinFrames &&
+                (mAeTargetExposureTime - mAeCurrentExposureTime) <
+                mAeTargetExposureTime / 10) {
+            // Done with precapture
+            mAeCounter = 0;
+            mAeState = aeLocked ? ANDROID_CONTROL_AE_STATE_LOCKED :
+                    ANDROID_CONTROL_AE_STATE_CONVERGED;
+        } else {
+            // Converge some more
+            mAeCurrentExposureTime +=
+                    (mAeTargetExposureTime - mAeCurrentExposureTime) *
+                    kExposureTrackRate;
+            mAeCounter++;
+            mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
+        }
+
+    } else if (!aeLocked) {
+        // Run standard occasional AE scan
+        switch (mAeState) {
+            case ANDROID_CONTROL_AE_STATE_CONVERGED:
+            case ANDROID_CONTROL_AE_STATE_INACTIVE:
+                mAeCounter++;
+                if (mAeCounter > kStableAeMaxFrames) {
+                    mAeTargetExposureTime =
+                            mFacePriority ? kFacePriorityExposureTime :
+                            kNormalExposureTime;
+                    float exposureStep = ((double)rand() / RAND_MAX) *
+                            (kExposureWanderMax - kExposureWanderMin) +
+                            kExposureWanderMin;
+                    mAeTargetExposureTime *= std::pow(2, exposureStep);
+                    mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
+                }
+                break;
+            case ANDROID_CONTROL_AE_STATE_SEARCHING:
+                mAeCurrentExposureTime +=
+                        (mAeTargetExposureTime - mAeCurrentExposureTime) *
+                        kExposureTrackRate;
+                if (abs(mAeTargetExposureTime - mAeCurrentExposureTime) <
+                        mAeTargetExposureTime / 10) {
+                    // Close enough
+                    mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
+                    mAeCounter = 0;
+                }
+                break;
+            case ANDROID_CONTROL_AE_STATE_LOCKED:
+                mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
+                mAeCounter = 0;
+                break;
+            default:
+                ALOGE("%s: Emulator in unexpected AE state %d",
+                        __FUNCTION__, mAeState);
+                return INVALID_OPERATION;
+        }
+    } else {
+        // AE is locked
+        mAeState = ANDROID_CONTROL_AE_STATE_LOCKED;
+    }
+
+    return OK;
+}
+
+status_t EmulatedFakeCamera3::doFakeAF(CameraMetadata &settings) {
+    camera_metadata_entry e;
+
+    e = settings.find(ANDROID_CONTROL_AF_MODE);
+    if (e.count == 0) {
+        ALOGE("%s: No AF mode entry!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+    uint8_t afMode = e.data.u8[0];
+
+    switch (afMode) {
+        case ANDROID_CONTROL_AF_MODE_OFF:
+            mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
+            return OK;
+        case ANDROID_CONTROL_AF_MODE_AUTO:
+        case ANDROID_CONTROL_AF_MODE_MACRO:
+        case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
+        case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
+            if (!mFacingBack) {
+                ALOGE("%s: Front camera doesn't support AF mode %d",
+                        __FUNCTION__, afMode);
+                return BAD_VALUE;
+            }
+            // OK
+            break;
+        default:
+            ALOGE("%s: Emulator doesn't support AF mode %d",
+                    __FUNCTION__, afMode);
+            return BAD_VALUE;
+    }
+
+    return OK;
+}
+
+status_t EmulatedFakeCamera3::doFakeAWB(CameraMetadata &settings) {
+    camera_metadata_entry e;
+
+    e = settings.find(ANDROID_CONTROL_AWB_MODE);
+    if (e.count == 0) {
+        ALOGE("%s: No AWB mode entry!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+    uint8_t awbMode = e.data.u8[0];
+
+    // TODO: Add white balance simulation
+
+    switch (awbMode) {
+        case ANDROID_CONTROL_AWB_MODE_OFF:
+            mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
+            return OK;
+        case ANDROID_CONTROL_AWB_MODE_AUTO:
+        case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
+        case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
+        case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
+        case ANDROID_CONTROL_AWB_MODE_SHADE:
+            // OK
+            break;
+        default:
+            ALOGE("%s: Emulator doesn't support AWB mode %d",
+                    __FUNCTION__, awbMode);
+            return BAD_VALUE;
+    }
+
+    return OK;
+}
+
+
+void EmulatedFakeCamera3::update3A(CameraMetadata &settings) {
+    if (mAeState != ANDROID_CONTROL_AE_STATE_INACTIVE) {
+        settings.update(ANDROID_SENSOR_EXPOSURE_TIME,
+                &mAeCurrentExposureTime, 1);
+        settings.update(ANDROID_SENSOR_SENSITIVITY,
+                &mAeCurrentSensitivity, 1);
+    }
+
+    settings.update(ANDROID_CONTROL_AE_STATE,
+            &mAeState, 1);
+    settings.update(ANDROID_CONTROL_AF_STATE,
+            &mAfState, 1);
+    settings.update(ANDROID_CONTROL_AWB_STATE,
+            &mAwbState, 1);
+    /**
+     * TODO: Trigger IDs need a think-through
+     */
+    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
+            &mAeTriggerId, 1);
+    settings.update(ANDROID_CONTROL_AF_TRIGGER_ID,
+            &mAfTriggerId, 1);
+}
+
 void EmulatedFakeCamera3::signalReadoutIdle() {
     Mutex::Autolock l(mLock);
     // Need to chek isIdle again because waiting on mLock may have allowed
@@ -1286,6 +1583,20 @@
             mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime);
     if (!gotFrame) return true;
 
+    // Check if we need to JPEG encode a buffer
+
+    for (size_t i = 0; i < mCurrentRequest.buffers->size(); i++) {
+        if ((*mCurrentRequest.buffers)[i].stream->format ==
+                HAL_PIXEL_FORMAT_BLOB) {
+            res = mParent->mJpegCompressor->
+                compressSynchronous(mCurrentRequest.sensorBuffers);
+            if (res != OK) {
+                ALOGE("%s: Error compressing output buffer: %s (%d)",
+                        __FUNCTION__, strerror(-res), res);
+            }
+        }
+    }
+
     // Got everything, construct result
 
     camera3_capture_result result;
diff --git a/camera/EmulatedFakeCamera3.h b/camera/EmulatedFakeCamera3.h
index 74f8496..dd9e6a1 100644
--- a/camera/EmulatedFakeCamera3.h
+++ b/camera/EmulatedFakeCamera3.h
@@ -33,7 +33,8 @@
 
 namespace android {
 
-/* Encapsulates functionality common to all version 3.0 emulated camera devices
+/**
+ * Encapsulates functionality for a v3 HAL camera which produces synthetic data.
  *
  * Note that EmulatedCameraFactory instantiates an object of this class just
  * once, when EmulatedCameraFactory instance gets constructed. Connection to /
@@ -69,7 +70,7 @@
     virtual status_t getCameraInfo(struct camera_info *info);
 
     /****************************************************************************
-     * EmualtedCamera3 abstract API implementation
+     * EmulatedCamera3 abstract API implementation
      ***************************************************************************/
 
 protected:
@@ -98,8 +99,22 @@
 
 private:
 
+    /**
+     * Build the static info metadata buffer for this device
+     */
     status_t constructStaticInfo();
 
+    /**
+     * Run the fake 3A algorithms as needed. May override/modify settings
+     * values.
+     */
+    status_t process3A(CameraMetadata &settings);
+
+    status_t doFakeAE(CameraMetadata &settings);
+    status_t doFakeAF(CameraMetadata &settings);
+    status_t doFakeAWB(CameraMetadata &settings);
+    void     update3A(CameraMetadata &settings);
+
     /** Signal from readout thread that it doesn't have anything to do */
     void     signalReadoutIdle();
 
@@ -112,6 +127,10 @@
     static const uint32_t kMaxJpegStreamCount = 1;
     static const uint32_t kMaxReprocessStreamCount = 2;
     static const uint32_t kMaxBufferCount = 4;
+    // We need a positive stream ID to distinguish external buffers from
+    // sensor-generated buffers which use a nonpositive ID. Otherwise, HAL3 has
+    // no concept of a stream id.
+    static const uint32_t kGenericStreamId = 1;
     static const uint32_t kAvailableFormats[];
     static const uint32_t kAvailableRawSizes[];
     static const uint64_t kAvailableRawMinDurations[];
@@ -131,10 +150,10 @@
      ***************************************************************************/
 
     /* HAL interface serialization lock. */
-    Mutex mLock;
+    Mutex              mLock;
 
     /* Facing back (true) or front (false) switch. */
-    bool mFacingBack;
+    bool               mFacingBack;
 
     /**
      * Cache for default templates. Once one is requested, the pointer must be
@@ -151,18 +170,19 @@
     };
 
     // Shortcut to the input stream
-    camera3_stream_t* mInputStream;
+    camera3_stream_t*  mInputStream;
 
-    // All streams, including input stream
-    List<camera3_stream_t*> mStreams;
-
+    typedef List<camera3_stream_t*>           StreamList;
     typedef List<camera3_stream_t*>::iterator StreamIterator;
 
+    // All streams, including input stream
+    StreamList         mStreams;
+
     // Cached settings from latest submitted request
-    CameraMetadata mPrevSettings;
+    CameraMetadata     mPrevSettings;
 
     /** Fake hardware interfaces */
-    sp<Sensor> mSensor;
+    sp<Sensor>         mSensor;
     sp<JpegCompressor> mJpegCompressor;
 
     /** Processing thread for sending out results */
@@ -179,8 +199,18 @@
             Buffers *sensorBuffers;
         };
 
-        void queueCaptureRequest(const Request &r);
-        bool isIdle();
+        /**
+         * Interface to parent class
+         */
+
+        // Place request in the in-flight queue to wait for sensor capture
+        void     queueCaptureRequest(const Request &r);
+
+        // Test if the readout thread is idle (no in-flight requests, not
+        // currently reading out anything
+        bool     isIdle();
+
+        // Wait until isIdle is true
         status_t waitForReadout();
 
       private:
@@ -202,8 +232,47 @@
         Request mCurrentRequest;
 
     };
-
     sp<ReadoutThread> mReadoutThread;
+
+    /** Fake 3A constants */
+
+    static const nsecs_t kNormalExposureTime;
+    static const nsecs_t kFacePriorityExposureTime;
+    static const int     kNormalSensitivity;
+    static const int     kFacePrioritySensitivity;
+    // Rate of converging AE to new target value, as fraction of difference between
+    // current and target value.
+    static const float   kExposureTrackRate;
+    // Minimum duration for precapture state. May be longer if slow to converge
+    // to target exposure
+    static const int     kPrecaptureMinFrames;
+    // How often to restart AE 'scanning'
+    static const int     kStableAeMaxFrames;
+    // Maximum stop below 'normal' exposure time that we'll wander to while
+    // pretending to converge AE. In powers of 2. (-2 == 1/4 as bright)
+    static const float   kExposureWanderMin;
+    // Maximum stop above 'normal' exposure time that we'll wander to while
+    // pretending to converge AE. In powers of 2. (2 == 4x as bright)
+    static const float   kExposureWanderMax;
+
+    /** Fake 3A state */
+
+    uint8_t mControlMode;
+    bool    mFacePriority;
+    uint8_t mAeState;
+    uint8_t mAfState;
+    uint8_t mAwbState;
+    uint8_t mAeMode;
+    uint8_t mAfMode;
+    uint8_t mAwbMode;
+    int     mAfTriggerId;
+    int     mAeTriggerId;
+
+    int     mAeCounter;
+    nsecs_t mAeCurrentExposureTime;
+    nsecs_t mAeTargetExposureTime;
+    int     mAeCurrentSensitivity;
+
 };
 
 } // namespace android
diff --git a/camera/fake-pipeline2/JpegCompressor.cpp b/camera/fake-pipeline2/JpegCompressor.cpp
index 20b9634..a193ce1 100644
--- a/camera/fake-pipeline2/JpegCompressor.cpp
+++ b/camera/fake-pipeline2/JpegCompressor.cpp
@@ -28,6 +28,7 @@
 JpegCompressor::JpegCompressor(EmulatedFakeCamera2 *parent):
         Thread(false),
         mIsBusy(false),
+        mSynchronous(false),
         mParent(parent),
         mBuffers(NULL),
         mCaptureTime(0) {
@@ -49,7 +50,7 @@
         }
 
         mIsBusy = true;
-
+        mSynchronous = false;
         mBuffers = buffers;
         mCaptureTime = captureTime;
     }
@@ -64,6 +65,30 @@
     return res;
 }
 
+status_t JpegCompressor::compressSynchronous(Buffers *buffers) {
+    status_t res;
+
+    Mutex::Autolock lock(mMutex);
+    {
+        Mutex::Autolock busyLock(mBusyMutex);
+
+        if (mIsBusy) {
+            ALOGE("%s: Already processing a buffer!", __FUNCTION__);
+            return INVALID_OPERATION;
+        }
+
+        mIsBusy = true;
+        mSynchronous = true;
+        mBuffers = buffers;
+    }
+
+    res = compress();
+    if (res == OK) {
+        cleanUp();
+    }
+    return res;
+}
+
 status_t JpegCompressor::cancel() {
     requestExitAndWait();
     return OK;
@@ -75,8 +100,34 @@
 
 bool JpegCompressor::threadLoop() {
     Mutex::Autolock lock(mMutex);
+    status_t res;
     ALOGV("%s: Starting compression thread", __FUNCTION__);
 
+    res = compress();
+
+    if (res == OK) {
+        // Write to JPEG output stream
+
+        ALOGV("%s: Compression complete, pushing to stream %d", __FUNCTION__,
+                mJpegBuffer.streamId);
+
+        GraphicBufferMapper::get().unlock(*(mJpegBuffer.buffer));
+        status_t res;
+        const Stream &s = mParent->getStreamInfo(mJpegBuffer.streamId);
+        res = s.ops->enqueue_buffer(s.ops, mCaptureTime, mJpegBuffer.buffer);
+        if (res != OK) {
+            ALOGE("%s: Error queueing compressed image buffer %p: %s (%d)",
+                    __FUNCTION__, mJpegBuffer.buffer, strerror(-res), res);
+            mParent->signalError();
+        }
+
+        cleanUp();
+    }
+
+    return false;
+}
+
+status_t JpegCompressor::compress() {
     // Find source and target buffers. Assumes only one buffer matches
     // each condition!
 
@@ -96,7 +147,7 @@
         ALOGE("%s: Unable to find buffers for JPEG source/destination",
                 __FUNCTION__);
         cleanUp();
-        return false;
+        return BAD_VALUE;
     }
 
     // Set up error management
@@ -109,7 +160,7 @@
     mCInfo.err->error_exit = jpegErrorHandler;
 
     jpeg_create_compress(&mCInfo);
-    if (checkError("Error initializing compression")) return false;
+    if (checkError("Error initializing compression")) return NO_INIT;
 
     // Route compressed data straight to output stream buffer
 
@@ -129,12 +180,12 @@
     mCInfo.in_color_space = JCS_RGB;
 
     jpeg_set_defaults(&mCInfo);
-    if (checkError("Error configuring defaults")) return false;
+    if (checkError("Error configuring defaults")) return NO_INIT;
 
     // Do compression
 
     jpeg_start_compress(&mCInfo, TRUE);
-    if (checkError("Error starting compression")) return false;
+    if (checkError("Error starting compression")) return NO_INIT;
 
     size_t rowStride = mAuxBuffer.stride * 3;
     const size_t kChunkSize = 32;
@@ -145,37 +196,20 @@
                     (mAuxBuffer.img + (i + mCInfo.next_scanline) * rowStride);
         }
         jpeg_write_scanlines(&mCInfo, chunk, kChunkSize);
-        if (checkError("Error while compressing")) return false;
+        if (checkError("Error while compressing")) return NO_INIT;
         if (exitPending()) {
             ALOGV("%s: Cancel called, exiting early", __FUNCTION__);
             cleanUp();
-            return false;
+            return TIMED_OUT;
         }
     }
 
     jpeg_finish_compress(&mCInfo);
-    if (checkError("Error while finishing compression")) return false;
-
-    // Write to JPEG output stream
-
-    ALOGV("%s: Compression complete, pushing to stream %d", __FUNCTION__,
-          mJpegBuffer.streamId);
-
-    GraphicBufferMapper::get().unlock(*(mJpegBuffer.buffer));
-    status_t res;
-    const Stream &s = mParent->getStreamInfo(mJpegBuffer.streamId);
-    res = s.ops->enqueue_buffer(s.ops, mCaptureTime, mJpegBuffer.buffer);
-    if (res != OK) {
-        ALOGE("%s: Error queueing compressed image buffer %p: %s (%d)",
-                __FUNCTION__, mJpegBuffer.buffer, strerror(-res), res);
-        mParent->signalError();
-    }
+    if (checkError("Error while finishing compression")) return NO_INIT;
 
     // All done
 
-    cleanUp();
-
-    return false;
+    return OK;
 }
 
 bool JpegCompressor::isBusy() {
@@ -224,7 +258,7 @@
     if (mFoundAux) {
         if (mAuxBuffer.streamId == 0) {
             delete[] mAuxBuffer.img;
-        } else {
+        } else if (!mSynchronous) {
             GraphicBufferMapper::get().unlock(*(mAuxBuffer.buffer));
             const ReprocessStream &s =
                     mParent->getReprocessStreamInfo(-mAuxBuffer.streamId);
@@ -236,7 +270,10 @@
             }
         }
     }
-    delete mBuffers;
+    if (!mSynchronous) {
+        delete mBuffers;
+    }
+
     mBuffers = NULL;
 
     mIsBusy = false;
diff --git a/camera/fake-pipeline2/JpegCompressor.h b/camera/fake-pipeline2/JpegCompressor.h
index ea2a84f..56e420f 100644
--- a/camera/fake-pipeline2/JpegCompressor.h
+++ b/camera/fake-pipeline2/JpegCompressor.h
@@ -51,6 +51,9 @@
     status_t start(Buffers *buffers,
             nsecs_t captureTime);
 
+    // Compress and block until buffer is complete.
+    status_t compressSynchronous(Buffers *buffers);
+
     status_t cancel();
 
     bool isBusy();
@@ -65,6 +68,7 @@
     Mutex mBusyMutex;
     bool mIsBusy;
     Condition mDone;
+    bool mSynchronous;
 
     Mutex mMutex;
 
@@ -94,6 +98,8 @@
     static void jpegTermDestination(j_compress_ptr cinfo);
 
     bool checkError(const char *msg);
+    status_t compress();
+
     void cleanUp();
 
     /**