merge in nyc-release history after reset to nyc-dev
diff --git a/QCamera2/Android.mk b/QCamera2/Android.mk
index 4b6df30..8081fe7 100644
--- a/QCamera2/Android.mk
+++ b/QCamera2/Android.mk
@@ -79,11 +79,11 @@
 LOCAL_CFLAGS += -DTARGET_TS_MAKEUP
 LOCAL_C_INCLUDES += $(LOCAL_PATH)/HAL/tsMakeuplib/include
 endif
-ifneq (,$(filter msm8974 msm8916 msm8226 msm8610 msm8916 apq8084 msm8084 msm8994 msm8992 msm8952 msm8937 titanium msm8996,$(TARGET_BOARD_PLATFORM)))
+ifneq (,$(filter msm8974 msm8916 msm8226 msm8610 msm8916 apq8084 msm8084 msm8994 msm8992 msm8952 msm8937 msm8953 msm8996 msmcobalt, $(TARGET_BOARD_PLATFORM)))
     LOCAL_CFLAGS += -DVENUS_PRESENT
 endif
 
-ifneq (,$(filter msm8996,$(TARGET_BOARD_PLATFORM)))
+ifneq (,$(filter msm8996 msmcobalt,$(TARGET_BOARD_PLATFORM)))
     LOCAL_CFLAGS += -DUBWC_PRESENT
 endif
 
diff --git a/QCamera2/HAL/QCamera2HWI.cpp b/QCamera2/HAL/QCamera2HWI.cpp
index e419595..b5719c6 100644
--- a/QCamera2/HAL/QCamera2HWI.cpp
+++ b/QCamera2/HAL/QCamera2HWI.cpp
@@ -72,6 +72,7 @@
 #define CAMERA_DEFERRED_MAP_BUF_TIMEOUT 2000000000 // 2 seconds
 #define CAMERA_MIN_METADATA_BUFFERS 10 // Need at least 10 for ZSL snapshot
 #define CAMERA_INITIAL_MAPPABLE_PREVIEW_BUFFERS 5
+#define CAMERA_MAX_PARAM_APPLY_DELAY 3
 
 namespace qcamera {
 
@@ -671,11 +672,6 @@
     hw->m_bRecordStarted = true;
     LOGI("[KPI Perf]: X ret = %d", ret);
 
-    if (ret == NO_ERROR) {
-        // Set power Hint for video encoding
-        hw->m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, true);
-    }
-
     return ret;
 }
 
@@ -701,9 +697,6 @@
     LOGI("[KPI Perf]: E PROFILE_STOP_RECORDING camera id %d",
              hw->getCameraId());
 
-    // Disable power hint for video encoding
-    hw->m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
-
     hw->lockAPI();
     qcamera_api_result_t apiResult;
     int32_t ret = hw->processAPI(QCAMERA_SM_EVT_STOP_RECORDING, NULL);
@@ -1651,6 +1644,7 @@
       mPLastFrameCount(0),
       mPLastFpsTime(0),
       mPFps(0),
+      mInstantAecFrameCount(0),
       m_bIntJpegEvtPending(false),
       m_bIntRawEvtPending(false),
       mReprocJob(0),
@@ -1668,8 +1662,7 @@
       mJpegHandleOwner(false),
       mMetadataMem(NULL),
       mCACDoneReceived(false),
-      m_bNeedRestart(false),
-      mIgnoredPreviewCount(0)
+      m_bNeedRestart(false)
 {
 #ifdef TARGET_TS_MAKEUP
     memset(&mFaceRect, -1, sizeof(mFaceRect));
@@ -1712,6 +1705,9 @@
 
     pthread_mutex_init(&mGrallocLock, NULL);
     mEnqueuedBuffers = 0;
+    mFrameSkipStart = 0;
+    mFrameSkipEnd = 0;
+    mLastPreviewFrameID = 0;
 
     //Load and read GPU library.
     lib_surface_utils = NULL;
@@ -2448,6 +2444,9 @@
             if (bufferCnt > CAMERA_ISP_PING_PONG_BUFFERS )
                 bufferCnt -= CAMERA_ISP_PING_PONG_BUFFERS;
 
+            if (mParameters.getRecordingHintValue() == true)
+                bufferCnt += EXTRA_ZSL_PREVIEW_STREAM_BUF;
+
             // Add the display minUndequeCount count on top of camera requirement
             bufferCnt += minUndequeCount;
 
@@ -3394,7 +3393,9 @@
     m_perfLock.lock_acq();
 
     updateThermalLevel((void *)&mThermalLevel);
-    mIgnoredPreviewCount = 0;
+
+    setDisplayFrameSkip();
+
     // start preview stream
     if (mParameters.isZSLMode() && mParameters.getRecordingHintValue() != true) {
         rc = startChannel(QCAMERA_CH_TYPE_ZSL);
@@ -3638,6 +3639,11 @@
         rc = pChannel->start();
     }
 
+    if (rc == NO_ERROR) {
+        // Set power Hint for video encoding
+        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, true);
+    }
+
     LOGI("X rc = %d", rc);
     return rc;
 }
@@ -3663,6 +3669,9 @@
     }
     int rc = stopChannel(QCAMERA_CH_TYPE_VIDEO);
 
+    // Disable power hint for video encoding
+    m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
+
     LOGI("X rc = %d", rc);
     return rc;
 }
@@ -3876,6 +3885,7 @@
             rc = configureAFBracketing(false);
         } else if (mParameters.isOptiZoomEnabled()) {
             rc = mParameters.setAndCommitZoom(mZoomLevel);
+            setDisplaySkip(FALSE, CAMERA_MAX_PARAM_APPLY_DELAY);
         } else if (mParameters.isStillMoreEnabled()) {
             cam_still_more_t stillmore_config = mParameters.getStillMoreSettings();
             stillmore_config.burst_count = 0;
@@ -3926,7 +3936,7 @@
     mInputCount = 0;
     mAdvancedCaptureConfigured = true;
     /* Display should be disabled for advanced modes */
-    bool bDisplay = false;
+    bool bSkipDisplay = true;
 
     if (getRelatedCamSyncInfo()->mode == CAM_MODE_SECONDARY) {
         // no Advance capture settings for Aux camera
@@ -3937,7 +3947,7 @@
     /* Do not stop display if in stillmore livesnapshot */
     if (mParameters.isStillMoreEnabled() &&
             mParameters.isSeeMoreEnabled()) {
-        bDisplay = true;
+        bSkipDisplay = false;
     }
     if (mParameters.isUbiFocusEnabled() || mParameters.isUbiRefocus()) {
         rc = configureAFBracketing();
@@ -3965,15 +3975,15 @@
         rc = mParameters.configFrameCapture(TRUE);
     } else if (mFlashNeeded && !mLongshotEnabled) {
         rc = mParameters.configFrameCapture(TRUE);
-        bDisplay = true;
+        bSkipDisplay = false;
     } else {
         LOGH("Advanced Capture feature not enabled!! ");
         mAdvancedCaptureConfigured = false;
-        bDisplay = true;
+        bSkipDisplay = false;
     }
 
     LOGH("Stop preview temporarily for advanced captures");
-    m_stateMachine.setDisplayFrame(bDisplay);
+    setDisplaySkip(bSkipDisplay);
 
     LOGH("X rc = %d", rc);
     return rc;
@@ -4462,7 +4472,11 @@
             mJpegJob = queueDeferredWork(CMD_DEF_CREATE_JPEG_SESSION,
                     args);
             if (mJpegJob == 0) {
-                LOGE("Failure: Unable to create jpeg session");
+                LOGE("Failed to queue CREATE_JPEG_SESSION");
+                if (NO_ERROR != waitDeferredWork(mReprocJob)) {
+                        LOGE("Reprocess Deferred work was failed");
+                }
+                m_postprocessor.stop();
                 return -ENOMEM;
             }
 
@@ -4581,6 +4595,10 @@
                         args);
                 if (mJpegJob == 0) {
                     LOGE("Failed to queue CREATE_JPEG_SESSION");
+                    if (NO_ERROR != waitDeferredWork(mReprocJob)) {
+                        LOGE("Reprocess Deferred work was failed");
+                    }
+                    m_postprocessor.stop();
                     return -ENOMEM;
                 }
 
@@ -4845,7 +4863,7 @@
 
     unconfigureAdvancedCapture();
     LOGH("Enable display frames again");
-    m_stateMachine.setDisplayFrame(TRUE);
+    setDisplaySkip(FALSE);
 
     if (!mLongshotEnabled) {
         m_perfLock.lock_rel();
@@ -5250,14 +5268,32 @@
         goto end;
     }
 
-    // start post processor
-    if (NO_ERROR != waitDeferredWork(mInitPProcJob)) {
-        LOGE("Init PProc Deferred work failed");
-        return UNKNOWN_ERROR;
+    DeferWorkArgs args;
+    memset(&args, 0, sizeof(DeferWorkArgs));
+
+    args.pprocArgs = pChannel;
+
+    // No need to wait for mInitPProcJob here, because it was
+    // queued in startPreview, and will definitely be processed before
+    // mReprocJob can begin.
+    mReprocJob = queueDeferredWork(CMD_DEF_PPROC_START,
+            args);
+    if (mReprocJob == 0) {
+        LOGE("Failed to queue CMD_DEF_PPROC_START");
+        rc = -ENOMEM;
+        goto end;
     }
-    rc = m_postprocessor.start(pChannel);
-    if (NO_ERROR != rc) {
-        LOGE("Post-processor start failed %d", rc);
+
+    // Create JPEG session
+    mJpegJob = queueDeferredWork(CMD_DEF_CREATE_JPEG_SESSION,
+            args);
+    if (mJpegJob == 0) {
+        LOGE("Failed to queue CREATE_JPEG_SESSION");
+        if (NO_ERROR != waitDeferredWork(mReprocJob)) {
+            LOGE("Reprocess Deferred work was failed");
+        }
+        m_postprocessor.stop();
+        rc = -ENOMEM;
         goto end;
     }
 
@@ -5275,6 +5311,12 @@
         rc = configureOnlineRotation(*m_channels[QCAMERA_CH_TYPE_SNAPSHOT]);
         if (rc != NO_ERROR) {
             LOGE("online rotation failed");
+            if (NO_ERROR != waitDeferredWork(mReprocJob)) {
+                LOGE("Reprocess Deferred work was failed");
+            }
+            if (NO_ERROR != waitDeferredWork(mJpegJob)) {
+                LOGE("Jpeg Deferred work was failed");
+            }
             m_postprocessor.stop();
             return rc;
         }
@@ -5312,6 +5354,7 @@
             // Find and try to link a metadata stream from preview channel
             QCameraChannel *pMetaChannel = NULL;
             QCameraStream *pMetaStream = NULL;
+            QCameraStream *pPreviewStream = NULL;
 
             if (m_channels[QCAMERA_CH_TYPE_PREVIEW] != NULL) {
                 pMetaChannel = m_channels[QCAMERA_CH_TYPE_PREVIEW];
@@ -5319,10 +5362,12 @@
                 QCameraStream *pStream = NULL;
                 for (uint32_t i = 0 ; i < streamNum ; i++ ) {
                     pStream = pMetaChannel->getStreamByIndex(i);
-                    if ((NULL != pStream) &&
-                            (CAM_STREAM_TYPE_METADATA == pStream->getMyType())) {
-                        pMetaStream = pStream;
-                        break;
+                    if (NULL != pStream) {
+                        if (CAM_STREAM_TYPE_METADATA == pStream->getMyType()) {
+                            pMetaStream = pStream;
+                        } else if (CAM_STREAM_TYPE_PREVIEW == pStream->getMyType()) {
+                            pPreviewStream = pStream;
+                        }
                     }
                 }
             }
@@ -5333,6 +5378,12 @@
                     LOGE("Metadata stream link failed %d", rc);
                 }
             }
+            if ((NULL != pMetaChannel) && (NULL != pPreviewStream)) {
+                rc = pChannel->linkStream(pMetaChannel, pPreviewStream);
+                if (NO_ERROR != rc) {
+                    LOGE("Preview stream link failed %d", rc);
+                }
+            }
         }
         rc = pChannel->start();
     }
@@ -5362,7 +5413,7 @@
 
     unconfigureAdvancedCapture();
     LOGH("Enable display frames again");
-    m_stateMachine.setDisplayFrame(TRUE);
+    setDisplaySkip(FALSE);
 
     if (!mLongshotEnabled) {
         m_perfLock.lock_rel();
@@ -5925,6 +5976,32 @@
 int32_t QCamera2HardwareInterface::processAEInfo(cam_3a_params_t &ae_params)
 {
     mParameters.updateAEInfo(ae_params);
+    if (mParameters.isInstantAECEnabled()) {
+        // Reset Instant AEC info only if instant aec enabled.
+        bool bResetInstantAec = false;
+        if (ae_params.settled) {
+            // If AEC settled, reset instant AEC
+            bResetInstantAec = true;
+        } else if ((mParameters.isInstantCaptureEnabled()) &&
+                (mInstantAecFrameCount >= mParameters.getAecFrameBoundValue())) {
+            // if AEC not settled, and instant capture enabled,
+            // reset instant AEC only when frame count is
+            // more or equal to AEC frame bound value.
+            bResetInstantAec = true;
+        } else if ((mParameters.isInstantAECEnabled()) &&
+                (mInstantAecFrameCount >= mParameters.getAecSkipDisplayFrameBound())) {
+            // if AEC not settled, and only instant AEC enabled,
+            // reset instant AEC only when frame count is
+            // more or equal to AEC skip display frame bound value.
+            bResetInstantAec = true;
+        }
+
+        if (bResetInstantAec) {
+            LOGD("setting instant AEC to false");
+            mParameters.setInstantAEC(false, true);
+            mInstantAecFrameCount = 0;
+        }
+    }
     return NO_ERROR;
 }
 
@@ -6303,7 +6380,7 @@
  *              none-zero failure code
  *==========================================================================*/
 int32_t QCamera2HardwareInterface::processASDUpdate(
-        __unused cam_auto_scene_t scene)
+        __unused cam_asd_decision_t asd_decision)
 {
     size_t data_len = sizeof(cam_auto_scene_t);
     size_t buffer_len = 1 *sizeof(int)       //meta type
@@ -6327,7 +6404,7 @@
 #ifndef VANILLA_HAL
     pASDData[0] = CAMERA_META_DATA_ASD;
     pASDData[1] = (int)data_len;
-    pASDData[2] = scene;
+    pASDData[2] = asd_decision.detected_scene;
 
     qcamera_callback_argm_t cbArg;
     memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
@@ -6674,7 +6751,7 @@
         }
     }
 
-    if (((mParameters.isFDInVideoEnabled())
+    if (((mParameters.fdModeInVideo())
             || (mParameters.getDcrf() == true)
             || (mParameters.getRecordingHintValue() != true))
             && (!mParameters.isSecureMode())) {
@@ -6803,9 +6880,9 @@
     mm_camera_channel_attr_t attr;
     memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
     attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
-    attr.look_back = mParameters.getZSLBackLookCount();
+    attr.look_back = 0; //wait for future frame for liveshot
     attr.post_frame_skip = mParameters.getZSLBurstInterval();
-    attr.water_mark = mParameters.getZSLQueueDepth();
+    attr.water_mark = 1; //hold min buffers possible in Q
     attr.max_unmatched_frames = mParameters.getMaxUnmatchedFramesInQueue();
     attr.priority = MM_CAMERA_SUPER_BUF_PRIORITY_LOW;
     rc = pChannel->init(&attr, snapshot_channel_cb_routine, this);
@@ -6953,6 +7030,8 @@
     }
     attr.water_mark = mParameters.getZSLQueueDepth();
     attr.max_unmatched_frames = mParameters.getMaxUnmatchedFramesInQueue();
+    attr.user_expected_frame_id =
+        mParameters.isInstantCaptureEnabled() ? (uint8_t)mParameters.getAecFrameBoundValue() : 0;
 
     //Enabled matched queue
     if (getRelatedCamSyncInfo()->is_frame_sync_enabled) {
@@ -7883,7 +7962,7 @@
                 sendCommand(CAMERA_CMD_LONGSHOT_OFF, arg, arg);
             }
             if (mParameters.isFaceDetectionEnabled()
-                    && (!mParameters.isFDInVideoEnabled())) {
+                    && (!mParameters.fdModeInVideo())) {
                 sendCommand(CAMERA_CMD_STOP_FACE_DETECTION, arg, arg);
             }
             if (mParameters.isHistogramEnabled()) {
@@ -9108,16 +9187,18 @@
  * DESCRIPTION: returns whether preview frame need to be displayed
  *
  * PARAMETERS :
+ *   @frameID : frameID of frame to be processed
  *
  * RETURN     : int32_t type of status
  *              NO_ERROR  -- success
  *              none-zero failure code
  *==========================================================================*/
-bool QCamera2HardwareInterface::needProcessPreviewFrame()
+bool QCamera2HardwareInterface::needProcessPreviewFrame(uint32_t frameID)
 {
-    return m_stateMachine.isPreviewRunning()
-            && m_stateMachine.isDisplayFrameNeeded();
-};
+    return ((m_stateMachine.isPreviewRunning()) &&
+            (!isDisplayFrameToSkip(frameID)) &&
+            (!mParameters.isInstantAECEnabled()));
+}
 
 /*===========================================================================
  * FUNCTION   : needSendPreviewCallback
@@ -9138,6 +9219,74 @@
 };
 
 /*===========================================================================
+ * FUNCTION   : setDisplaySkip
+ *
+ * DESCRIPTION: set range of frames to skip for preview
+ *
+ * PARAMETERS :
+ *   @enabled : TRUE to start skipping frame to display
+                FALSE to stop skipping frame to display
+ *   @skipCnt : Number of frame to skip. 0 by default
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::setDisplaySkip(bool enabled, uint8_t skipCnt)
+{
+    pthread_mutex_lock(&mGrallocLock);
+    if (enabled) {
+        setDisplayFrameSkip();
+        setDisplayFrameSkip(mLastPreviewFrameID + skipCnt + 1);
+    } else {
+        setDisplayFrameSkip(mFrameSkipStart, (mLastPreviewFrameID + skipCnt + 1));
+    }
+    pthread_mutex_unlock(&mGrallocLock);
+}
+
+/*===========================================================================
+ * FUNCTION   : setDisplayFrameSkip
+ *
+ * DESCRIPTION: set range of frames to skip for preview
+ *
+ * PARAMETERS :
+ *   @start   : frameId to start skip
+ *   @end     : frameId to stop skip
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::setDisplayFrameSkip(uint32_t start,
+        uint32_t end)
+{
+    if (start == 0) {
+        mFrameSkipStart = 0;
+        mFrameSkipEnd = 0;
+        return;
+    }
+    if ((mFrameSkipStart == 0) || (mFrameSkipStart > start)) {
+        mFrameSkipStart = start;
+    }
+    if ((end == 0) || (end > mFrameSkipEnd)) {
+        mFrameSkipEnd = end;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : isDisplayFrameToSkip
+ *
+ * DESCRIPTION: function to determin if input frame falls under skip range
+ *
+ * PARAMETERS :
+ *   @frameId : frameId to verify
+ *
+ * RETURN     : true : need to skip
+ *              false: no need to skip
+ *==========================================================================*/
+bool QCamera2HardwareInterface::isDisplayFrameToSkip(uint32_t frameId)
+{
+    return ((mFrameSkipStart != 0) && (frameId >= mFrameSkipStart) &&
+            (frameId <= mFrameSkipEnd || mFrameSkipEnd == 0)) ? TRUE : FALSE;
+}
+
+/*===========================================================================
  * FUNCTION   : prepareHardwareForSnapshot
  *
  * DESCRIPTION: prepare hardware for snapshot, such as LED
@@ -9332,6 +9481,13 @@
                         QCameraChannel * pChannel = dw->args.pprocArgs;
                         assert(pChannel);
 
+                        int32_t ret = pme->getDefJobStatus(pme->mReprocJob);
+                        if (ret != NO_ERROR) {
+                            job_status = ret;
+                            LOGE("Jpeg create failed");
+                            break;
+                        }
+
                         if (pme->m_postprocessor.createJpegSession(pChannel)
                             != NO_ERROR) {
                             LOGE("cannot create JPEG session");
diff --git a/QCamera2/HAL/QCamera2HWI.h b/QCamera2/HAL/QCamera2HWI.h
index c1b7d2d..4caec59 100644
--- a/QCamera2/HAL/QCamera2HWI.h
+++ b/QCamera2/HAL/QCamera2HWI.h
@@ -386,7 +386,7 @@
     int32_t processAutoFocusEvent(cam_auto_focus_data_t &focus_data);
     int32_t processZoomEvent(cam_crop_data_t &crop_info);
     int32_t processPrepSnapshotDoneEvent(cam_prep_snapshot_state_t prep_snapshot_state);
-    int32_t processASDUpdate(cam_auto_scene_t scene);
+    int32_t processASDUpdate(cam_asd_decision_t asd_decision);
     int32_t processJpegNotify(qcamera_jpeg_evt_payload_t *jpeg_job);
     int32_t processHDRData(cam_asd_hdr_scene_data_t hdr_scene);
     int32_t processRetroAECUnlock();
@@ -444,7 +444,7 @@
     int32_t setHistogram(bool histogram_en);
     int32_t setFaceDetection(bool enabled);
     int32_t prepareHardwareForSnapshot(int32_t afNeeded);
-    bool needProcessPreviewFrame();
+    bool needProcessPreviewFrame(uint32_t frameID);
     bool needSendPreviewCallback();
     bool isNoDisplayMode() {return mParameters.isNoDisplayMode();};
     bool isZSLMode() {return mParameters.isZSLMode();};
@@ -554,6 +554,15 @@
     inline bool getNeedRestart() {return m_bNeedRestart;}
     inline void setNeedRestart(bool needRestart) {m_bNeedRestart = needRestart;}
 
+    /*Start display skip. Skip starts after
+    skipCnt number of frames from current frame*/
+    void setDisplaySkip(bool enabled, uint8_t skipCnt = 0);
+    /*Caller can specify range frameID to skip.
+    if end is 0, all the frames after start will be skipped*/
+    void setDisplayFrameSkip(uint32_t start = 0, uint32_t end = 0);
+    /*Verifies if frameId is valid to skip*/
+    bool isDisplayFrameToSkip(uint32_t frameId);
+
 private:
     camera_device_t   mCameraDevice;
     uint32_t          mCameraId;
@@ -633,6 +642,7 @@
     int mPLastFrameCount;
     nsecs_t mPLastFpsTime;
     double mPFps;
+    uint8_t mInstantAecFrameCount;
 
     //eztune variables for communication with eztune server at backend
     bool m_bIntJpegEvtPending;
@@ -764,8 +774,14 @@
     bool m_bNeedRestart;
     Mutex mMapLock;
     Condition mMapCond;
-    // Count to determine the number of preview frames ignored for displaying.
-    uint8_t mIgnoredPreviewCount;
+
+    //Used to decide the next frameID to be skipped
+    uint32_t mLastPreviewFrameID;
+    //FrameID to start frame skip.
+    uint32_t mFrameSkipStart;
+    /*FrameID to stop frameskip. If this is not set,
+    all frames are skipped till we set this*/
+    uint32_t mFrameSkipEnd;
 };
 
 }; // namespace qcamera
diff --git a/QCamera2/HAL/QCamera2HWICallbacks.cpp b/QCamera2/HAL/QCamera2HWICallbacks.cpp
index 6c1e4e1..c6312dd 100644
--- a/QCamera2/HAL/QCamera2HWICallbacks.cpp
+++ b/QCamera2/HAL/QCamera2HWICallbacks.cpp
@@ -264,13 +264,9 @@
     }
 
     // Wait on Postproc initialization if needed
-    if (NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) {
-        LOGE("Reprocess Deferred work failed");
-        return;
-    }
-
-    // send to postprocessor
-    if (NO_ERROR != pme->m_postprocessor.processData(frame)) {
+    // then send to postprocessor
+    if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
+            (NO_ERROR != pme->m_postprocessor.processData(frame))) {
         LOGE("Failed to trigger process data");
         pChannel->bufDone(recvd_frame);
         free(frame);
@@ -449,10 +445,9 @@
     }
 
     // Wait on Postproc initialization if needed
-    pme->waitDeferredWork(pme->mReprocJob);
-
-    // send to postprocessor
-    if (NO_ERROR != pme->m_postprocessor.processData(frame)) {
+    // then send to postprocessor
+    if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
+            (NO_ERROR != pme->m_postprocessor.processData(frame))) {
         LOGE("Failed to trigger process data");
         pChannel->bufDone(recvd_frame);
         free(frame);
@@ -726,11 +721,9 @@
         pme->m_bPreviewStarted = false;
     }
 
-    if (!pme->needProcessPreviewFrame()) {
+    if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
         pthread_mutex_lock(&pme->mGrallocLock);
-        // Increment the counter here to make sure,
-        // these many frames will be skipped in preview channel cb as well
-        pme->mIgnoredPreviewCount++;
+        pme->mLastPreviewFrameID = frame->frame_idx;
         pthread_mutex_unlock(&pme->mGrallocLock);
         LOGH("preview is not running, no need to process");
         return;
@@ -754,6 +747,7 @@
 
     if (err == NO_ERROR) {
         pthread_mutex_lock(&pme->mGrallocLock);
+        pme->mLastPreviewFrameID = frame->frame_idx;
         pme->mEnqueuedBuffers++;
         pthread_mutex_unlock(&pme->mGrallocLock);
     } else {
@@ -811,12 +805,24 @@
         free(super_frame);
         return;
     }
+
+    // For instant capture and for instant AEC, keep track of the frame counter.
+    // This count will be used to check against the corresponding bound values.
+    if (pme->mParameters.isInstantAECEnabled() ||
+            pme->mParameters.isInstantCaptureEnabled()) {
+        pme->mInstantAecFrameCount++;
+    }
+
     pthread_mutex_lock(&pme->mGrallocLock);
-    if (!pme->needProcessPreviewFrame() ||
-            pme->mIgnoredPreviewCount > 0) {
-        if (pme->mIgnoredPreviewCount > 0) {
-            pme->mIgnoredPreviewCount--;
-        }
+    if (!stream->isSyncCBEnabled()) {
+        pme->mLastPreviewFrameID = frame->frame_idx;
+    }
+    if (((!stream->isSyncCBEnabled()) &&
+            (!pme->needProcessPreviewFrame(frame->frame_idx))) ||
+            ((stream->isSyncCBEnabled()) &&
+            (memory->isBufOwnedByCamera(frame->buf_idx)))) {
+        //If buffer owned by camera, then it is not enqueued to display.
+        // bufDone it back to backend.
         pthread_mutex_unlock(&pme->mGrallocLock);
         LOGH("preview is not running, no need to process");
         stream->bufDone(frame->buf_idx);
@@ -974,6 +980,11 @@
     stream->getFrameDimension(preview_dim);
     stream->getFormat(previewFmt);
 
+    yStrideToApp = preview_dim.width;
+    yScanlineToApp = preview_dim.height;
+    uvStrideToApp = yStrideToApp;
+    uvScanlineToApp = yScanlineToApp / 2;
+
     /* The preview buffer size in the callback should be
      * (width*height*bytes_per_pixel). As all preview formats we support,
      * use 12 bits per pixel, buffer size = previewWidth * previewHeight * 3/2.
@@ -1001,11 +1012,6 @@
             uvStride = streamInfo->buf_planes.plane_info.mp[1].stride;
             uvScanline = streamInfo->buf_planes.plane_info.mp[1].scanline;
 
-            yStrideToApp = preview_dim.width;
-            yScanlineToApp = preview_dim.height;
-            uvStrideToApp = yStrideToApp;
-            uvScanlineToApp = yScanlineToApp / 2;
-
             previewBufSize = (size_t)
                     ((yStrideToApp * yScanlineToApp) + (uvStrideToApp * uvScanlineToApp));
 
@@ -1051,8 +1057,19 @@
             }
         }
     } else {
-        LOGE("Invalid preview format for preview callback");
-        return BAD_VALUE;
+        /*Invalid Buffer content. But can be used as a first preview frame trigger in
+        framework/app */
+        previewBufSize = (size_t)
+                    ((yStrideToApp * yScanlineToApp) +
+                    (uvStrideToApp * uvScanlineToApp));
+        previewBufSizeFromCallback = 0;
+        LOGW("Invalid preview format. Buffer content cannot be processed size = %d",
+                previewBufSize);
+        dataToApp = mGetMemory(-1, previewBufSize, 1, mCallbackCookie);
+        if (!dataToApp || !dataToApp->data) {
+            LOGE("mGetMemory failed.\n");
+            return NO_MEMORY;
+        }
     }
     qcamera_callback_argm_t cbArg;
     memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
@@ -1124,7 +1141,7 @@
         return;
     }
 
-    if (!pme->needProcessPreviewFrame()) {
+    if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
         LOGH("preview is not running, no need to process");
         stream->bufDone(frame->buf_idx);
         free(super_frame);
@@ -1143,7 +1160,7 @@
     if (NULL != previewMemObj && NULL != preview_mem) {
         pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_PREVIEW);
 
-        if ((pme->needProcessPreviewFrame()) &&
+        if ((pme->needProcessPreviewFrame(frame->frame_idx)) &&
                 pme->needSendPreviewCallback() &&
                 (pme->getRelatedCamSyncInfo()->mode != CAM_MODE_SECONDARY)) {
             qcamera_callback_argm_t cbArg;
@@ -1203,7 +1220,7 @@
         LOGE("preview frame is NLUL");
         goto end;
     }
-    if (!pme->needProcessPreviewFrame()) {
+    if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
         LOGE("preview is not running, no need to process");
         stream->bufDone(frame->buf_idx);
         goto end;
@@ -1226,7 +1243,7 @@
             // Dump RAW frame
             pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_RAW);
             // Notify Preview callback frame
-            if (pme->needProcessPreviewFrame() &&
+            if (pme->needProcessPreviewFrame(frame->frame_idx) &&
                     pme->mDataCb != NULL &&
                     pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) {
                 qcamera_callback_argm_t cbArg;
@@ -1259,7 +1276,7 @@
 
         int fd = previewMemObj->getFd(frame->buf_idx);
         LOGD("Preview frame fd =%d for index = %d ", fd, frame->buf_idx);
-        if (pme->needProcessPreviewFrame() &&
+        if (pme->needProcessPreviewFrame(frame->frame_idx) &&
                 pme->mDataCb != NULL &&
                 pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) {
             // Prepare Callback structure
@@ -1643,7 +1660,9 @@
         LOGI("[KPI Perf]: superbuf frame_idx %d",
                 frame->bufs[0]->frame_idx);
     }
-    if (NO_ERROR != pme->m_postprocessor.processData(frame)) {
+
+    if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
+            (NO_ERROR != pme->m_postprocessor.processData(frame))) {
         LOGE("Failed to trigger process data");
         pChannel->bufDone(super_frame);
         free(frame);
@@ -1774,15 +1793,9 @@
     }
 
     // Wait on Postproc initialization if needed
-    if (NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) {
-        LOGE("Reprocess Deferred work failed");
-        pChannel->bufDone(super_frame);
-        free(frame);
-        frame = NULL;
-        return;
-    }
-
-    if (NO_ERROR != pme->m_postprocessor.processData(frame)) {
+    // then send to postprocessor
+    if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
+            (NO_ERROR != pme->m_postprocessor.processData(frame))) {
         LOGE("Failed to trigger process data");
         pChannel->bufDone(super_frame);
         free(frame);
@@ -1950,6 +1963,7 @@
     //rotation & device rotation
     uint32_t prmRotation = mParameters.getJpegRotation();
     cam_rotation_info_t rotation_info;
+    memset(&rotation_info, 0, sizeof(cam_rotation_info_t));
     if (prmRotation == 0) {
        rotation_info.rotation = ROTATE_0;
     } else if (prmRotation == 90) {
@@ -2074,12 +2088,12 @@
             payload->faces_data = faces_data;
             int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
             if (rc != NO_ERROR) {
-                LOGW("%s: processEvt face detection failed", __func__);
+                LOGW("processEvt face detection failed");
                 free(payload);
                 payload = NULL;
             }
         } else {
-            LOGE("%s: No memory for face detect qcamera_sm_internal_evt_payload_t", __func__);
+            LOGE("No memory for face detect qcamera_sm_internal_evt_payload_t");
         }
     }
 
@@ -2122,8 +2136,7 @@
                 //to focused/not focused state.
                 payload->focus_data.flush_info.needFlush =
                         ((prevFocusState == CAM_AF_STATE_PASSIVE_SCAN) ||
-                        (prevFocusState == CAM_AF_STATE_ACTIVE_SCAN) ||
-                        (prevFocusState == CAM_AF_STATE_INACTIVE)) &&
+                        (prevFocusState == CAM_AF_STATE_ACTIVE_SCAN)) &&
                         ((pme->m_currentFocusState == CAM_AF_STATE_FOCUSED_LOCKED) ||
                         (pme->m_currentFocusState == CAM_AF_STATE_NOT_FOCUSED_LOCKED));
                 payload->focus_data.flush_info.focused_frame_idx = frame->frame_idx;
@@ -2222,13 +2235,14 @@
         }
     }
 
-    IF_META_AVAILABLE(int32_t, scene, CAM_INTF_META_ASD_SCENE_TYPE, pMetaData) {
+    IF_META_AVAILABLE(cam_asd_decision_t, cam_asd_info,
+            CAM_INTF_META_ASD_SCENE_INFO, pMetaData) {
         qcamera_sm_internal_evt_payload_t *payload =
             (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
         if (NULL != payload) {
             memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
             payload->evt_type = QCAMERA_INTERNAL_EVT_ASD_UPDATE;
-            payload->asd_data = (cam_auto_scene_t)*scene;
+            payload->asd_data = (cam_asd_decision_t)*cam_asd_info;
             int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
             if (rc != NO_ERROR) {
                 LOGW("processEvt asd_update failed");
@@ -2489,7 +2503,7 @@
         return;
     }
 
-    if (!pme->needProcessPreviewFrame()) {
+    if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
         LOGH("preview is not running, no need to process");
         stream->bufDone(frame->buf_idx);
         free(super_frame);
@@ -2938,8 +2952,8 @@
             faces_data.detection_data.num_faces_detected = MAX_ROI;
         }
 
-        LOGH("[KPI Perf] %s: PROFILE_NUMBER_OF_FACES_DETECTED %d",
-                __func__,faces_data.detection_data.num_faces_detected);
+        LOGH("[KPI Perf] PROFILE_NUMBER_OF_FACES_DETECTED %d",
+                faces_data.detection_data.num_faces_detected);
 
         IF_META_AVAILABLE(cam_face_recog_data_t, p_recog_data,
                 CAM_INTF_META_FACE_RECOG, metadata) {
diff --git a/QCamera2/HAL/QCameraChannel.cpp b/QCamera2/HAL/QCameraChannel.cpp
index fe1b221..cff8df5 100644
--- a/QCamera2/HAL/QCameraChannel.cpp
+++ b/QCamera2/HAL/QCameraChannel.cpp
@@ -372,25 +372,23 @@
 int32_t QCameraChannel::stop()
 {
     int32_t rc = NO_ERROR;
-    ssize_t linkedIdx = -1;
+    size_t i = 0;
 
     if (!m_bIsActive) {
         return NO_INIT;
     }
 
-    for (size_t i = 0; i < mStreams.size(); i++) {
+    while(i < mStreams.size()) {
         if (mStreams[i] != NULL) {
-               if (m_handle == mStreams[i]->getChannelHandle()) {
-                   mStreams[i]->stop();
-               } else {
-                   // Remove linked stream from stream list
-                   linkedIdx = (ssize_t)i;
-               }
+            if (m_handle == mStreams[i]->getChannelHandle()) {
+                mStreams[i]->stop();
+                i++;
+            } else {
+                // Remove linked stream from stream list
+                mStreams.removeAt(i);
+            }
         }
     }
-    if (linkedIdx > 0) {
-        mStreams.removeAt((size_t)linkedIdx);
-    }
 
     rc = m_camOps->stop_channel(m_camHandle, m_handle);
 
diff --git a/QCamera2/HAL/QCameraMem.cpp b/QCamera2/HAL/QCameraMem.cpp
index 7094b7c..cdb1ae1 100644
--- a/QCamera2/HAL/QCameraMem.cpp
+++ b/QCamera2/HAL/QCameraMem.cpp
@@ -815,7 +815,7 @@
                     deallocOneBuffer(mMemInfo[j]);
                 }
                 // Deallocate remaining buffers that have already been allocated
-                for (int j = i; j < count; j --) {
+                for (int j = i; j < count; j++) {
                     deallocOneBuffer(mMemInfo[j]);
                 }
                 ATRACE_END();
diff --git a/QCamera2/HAL/QCameraMem.h b/QCamera2/HAL/QCameraMem.h
index 2b99668..c52cea1 100644
--- a/QCamera2/HAL/QCameraMem.h
+++ b/QCamera2/HAL/QCameraMem.h
@@ -270,6 +270,7 @@
     void setMaxFPS(int maxFPS);
     int32_t enqueueBuffer(uint32_t index, nsecs_t timeStamp = 0);
     int32_t dequeueBuffer();
+    inline bool isBufOwnedByCamera(uint32_t index){return mLocalFlag[index] == BUFFER_OWNED;};
 
 private:
     buffer_handle_t *mBufferHandle[MM_CAMERA_MAX_NUM_FRAMES];
diff --git a/QCamera2/HAL/QCameraMuxer.cpp b/QCamera2/HAL/QCameraMuxer.cpp
index 411007f..1c0c9c1 100644
--- a/QCamera2/HAL/QCameraMuxer.cpp
+++ b/QCamera2/HAL/QCameraMuxer.cpp
@@ -2560,7 +2560,7 @@
                             !gMuxer->m_AuxJpegQ.isEmpty()) {
                         main_jpeg_node = (cam_compose_jpeg_info_t *)
                                 gMuxer->m_MainJpegQ.dequeue();
-                        if (main_jpeg_node) {
+                        if (main_jpeg_node != NULL) {
                             LOGD("main_jpeg_node found frame idx %d"
                                     "ptr %p buffer_ptr %p buffer_size %d",
                                      main_jpeg_node->frame_idx,
@@ -2571,7 +2571,7 @@
                             aux_jpeg_node =
                                     (cam_compose_jpeg_info_t *) gMuxer->
                                     m_AuxJpegQ.dequeue();
-                            if (aux_jpeg_node) {
+                            if (aux_jpeg_node != NULL) {
                                 LOGD("aux_jpeg_node found frame idx %d"
                                         "ptr %p buffer_ptr %p buffer_size %d",
                                          aux_jpeg_node->frame_idx,
@@ -2584,7 +2584,7 @@
                                         aux_jpeg_node);
                             }
                         }
-                        if (main_jpeg_node) {
+                        if (main_jpeg_node != NULL) {
                             if ( main_jpeg_node->release_cb ) {
                                 main_jpeg_node->release_cb(
                                         main_jpeg_node->release_data,
@@ -2592,17 +2592,19 @@
                                         NO_ERROR);
                             }
                             free(main_jpeg_node);
+                            main_jpeg_node = NULL;
                         } else {
                             LOGH("Mpo Match not found");
                         }
-                        if (aux_jpeg_node) {
-                            if ( aux_jpeg_node->release_cb ) {
+                        if (aux_jpeg_node != NULL) {
+                            if (aux_jpeg_node->release_cb) {
                                 aux_jpeg_node->release_cb(
                                         aux_jpeg_node->release_data,
                                         aux_jpeg_node->release_cookie,
                                         NO_ERROR);
                             }
                             free(aux_jpeg_node);
+                            aux_jpeg_node = NULL;
                         } else {
                             LOGH("Mpo Match not found");
                         }
diff --git a/QCamera2/HAL/QCameraParameters.cpp b/QCamera2/HAL/QCameraParameters.cpp
index bee68a7..60cd5f7 100644
--- a/QCamera2/HAL/QCameraParameters.cpp
+++ b/QCamera2/HAL/QCameraParameters.cpp
@@ -207,6 +207,10 @@
 
 const char QCameraParameters::KEY_QC_LONG_SHOT[] = "long-shot";
 const char QCameraParameters::KEY_QC_INITIAL_EXPOSURE_INDEX[] = "initial-exp-index";
+const char QCameraParameters::KEY_QC_INSTANT_AEC[] = "instant-aec";
+const char QCameraParameters::KEY_QC_INSTANT_CAPTURE[] = "instant-capture";
+const char QCameraParameters::KEY_QC_INSTANT_AEC_SUPPORTED_MODES[] = "instant-aec-values";
+const char QCameraParameters::KEY_QC_INSTANT_CAPTURE_SUPPORTED_MODES[] = "instant-capture-values";
 
 // Values for effect settings.
 const char QCameraParameters::EFFECT_EMBOSS[] = "emboss";
@@ -342,6 +346,16 @@
 const char QCameraParameters::AUTO_EXPOSURE_SPOT_METERING_ADV[] = "spot-metering-adv";
 const char QCameraParameters::AUTO_EXPOSURE_CENTER_WEIGHTED_ADV[] = "center-weighted-adv";
 
+// Values for instant AEC modes
+const char QCameraParameters::KEY_QC_INSTANT_AEC_DISABLE[] = "0";
+const char QCameraParameters::KEY_QC_INSTANT_AEC_AGGRESSIVE_AEC[] = "1";
+const char QCameraParameters::KEY_QC_INSTANT_AEC_FAST_AEC[] = "2";
+
+// Values for instant capture modes
+const char QCameraParameters::KEY_QC_INSTANT_CAPTURE_DISABLE[] = "0";
+const char QCameraParameters::KEY_QC_INSTANT_CAPTURE_AGGRESSIVE_AEC[] = "1";
+const char QCameraParameters::KEY_QC_INSTANT_CAPTURE_FAST_AEC[] = "2";
+
 const char QCameraParameters::KEY_QC_GPS_LATITUDE_REF[] = "gps-latitude-ref";
 const char QCameraParameters::KEY_QC_GPS_LONGITUDE_REF[] = "gps-longitude-ref";
 const char QCameraParameters::KEY_QC_GPS_ALTITUDE_REF[] = "gps-altitude-ref";
@@ -503,6 +517,20 @@
     { AUTO_EXPOSURE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
 };
 
+const QCameraParameters::QCameraMap<cam_aec_convergence_type>
+        QCameraParameters::INSTANT_AEC_MODES_MAP[] = {
+    { KEY_QC_INSTANT_AEC_DISABLE,        CAM_AEC_NORMAL_CONVERGENCE },
+    { KEY_QC_INSTANT_AEC_AGGRESSIVE_AEC, CAM_AEC_AGGRESSIVE_CONVERGENCE },
+    { KEY_QC_INSTANT_AEC_FAST_AEC,       CAM_AEC_FAST_CONVERGENCE },
+};
+
+const QCameraParameters::QCameraMap<cam_aec_convergence_type>
+        QCameraParameters::INSTANT_CAPTURE_MODES_MAP[] = {
+    { KEY_QC_INSTANT_CAPTURE_DISABLE,        CAM_AEC_NORMAL_CONVERGENCE },
+    { KEY_QC_INSTANT_CAPTURE_AGGRESSIVE_AEC, CAM_AEC_AGGRESSIVE_CONVERGENCE },
+    { KEY_QC_INSTANT_CAPTURE_FAST_AEC,       CAM_AEC_FAST_CONVERGENCE },
+};
+
 const QCameraParameters::QCameraMap<cam_format_t>
         QCameraParameters::PREVIEW_FORMATS_MAP[] = {
     {PIXEL_FORMAT_YUV420SP,        CAM_FORMAT_YUV_420_NV21},
@@ -893,7 +921,6 @@
       m_bTNRVideoOn(false),
       m_bTNRSnapshotOn(false),
       m_bInited(false),
-      m_nBurstNum(1),
       m_nRetroBurstNum(0),
       m_nBurstLEDOnPeriod(100),
       m_bUpdateEffects(false),
@@ -941,7 +968,11 @@
       m_expTime(0),
       m_isoValue(0),
       m_ManualCaptureMode(CAM_MANUAL_CAPTURE_TYPE_OFF),
-      m_dualLedCalibration(0)
+      m_dualLedCalibration(0),
+      m_bInstantAEC(false),
+      m_bInstantCapture(false),
+      mAecFrameBound(0),
+      mAecSkipDisplayFrameBound(0)
 {
     char value[PROPERTY_VALUE_MAX];
     // TODO: may move to parameter instead of sysprop
@@ -1022,7 +1053,6 @@
     m_bTNRVideoOn(false),
     m_bTNRSnapshotOn(false),
     m_bInited(false),
-    m_nBurstNum(1),
     m_nRetroBurstNum(0),
     m_nBurstLEDOnPeriod(100),
     m_bPreviewFlipChanged(false),
@@ -1067,7 +1097,11 @@
     m_expTime(0),
     m_isoValue(0),
     m_ManualCaptureMode(CAM_MANUAL_CAPTURE_TYPE_OFF),
-    m_dualLedCalibration(0)
+    m_dualLedCalibration(0),
+    m_bInstantAEC(false),
+    m_bInstantCapture(false),
+    mAecFrameBound(0),
+    mAecSkipDisplayFrameBound(0)
 {
     memset(&m_LiveSnapshotSize, 0, sizeof(m_LiveSnapshotSize));
     memset(&m_default_fps_range, 0, sizeof(m_default_fps_range));
@@ -4082,7 +4116,7 @@
  *==========================================================================*/
 int32_t QCameraParameters::setNumOfSnapshot()
 {
-    int nBurstNum = getBurstNum();
+    int nBurstNum = 1;
     int nExpnum = 0;
 
     const char *bracket_str = get(KEY_QC_AE_BRACKET_HDR);
@@ -4149,7 +4183,7 @@
                 updateParamEntry(KEY_RECORDING_HINT, str);
                 setRecordingHintValue(value);
                 if (getFaceDetectionOption() == true) {
-                    if (!isFDInVideoEnabled()) {
+                    if (!fdModeInVideo()) {
                         setFaceDetection(value > 0 ? false : true, false);
                     } else {
                         setFaceDetection(true, false);
@@ -4792,45 +4826,6 @@
 }
 
 /*===========================================================================
- * FUNCTION   : setBurstNum
- *
- * DESCRIPTION: set burst number of snapshot
- *
- * PARAMETERS :
- *   @params  : user setting parameters
- *
- * RETURN     : int32_t type of status
- *              NO_ERROR  -- success
- *              none-zero failure code
- *==========================================================================*/
-int32_t QCameraParameters::setBurstNum(const QCameraParameters& params)
-{
-    int nBurstNum = params.getInt(KEY_QC_SNAPSHOT_BURST_NUM);
-    if (isAdvCamFeaturesEnabled()) {
-        nBurstNum = 1;
-    }
-    if (nBurstNum <= 0) {
-        // if burst number is not set in parameters,
-        // read from sys prop
-        char prop[PROPERTY_VALUE_MAX];
-        memset(prop, 0, sizeof(prop));
-        property_get("persist.camera.snapshot.number", prop, "0");
-        nBurstNum = atoi(prop);
-        if (nBurstNum <= 0) {
-            nBurstNum = 1;
-        }
-    }
-    set(KEY_QC_SNAPSHOT_BURST_NUM, nBurstNum);
-    m_nBurstNum = (uint8_t)nBurstNum;
-    LOGH("m_nBurstNum = %d", m_nBurstNum);
-    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_BURST_NUM, (uint32_t)nBurstNum)) {
-        return BAD_VALUE;
-    }
-
-    return NO_ERROR;
-}
-
-/*===========================================================================
  * FUNCTION   : setSnapshotFDReq
  *
  * DESCRIPTION: set requirement of Face Detection Metadata in Snapshot mode.
@@ -5089,7 +5084,6 @@
     if ((rc = setChromaFlash(params)))                  final_rc = rc;
     if ((rc = setTruePortrait(params)))                 final_rc = rc;
     if ((rc = setOptiZoom(params)))                     final_rc = rc;
-    if ((rc = setBurstNum(params)))                     final_rc = rc;
     if ((rc = setBurstLEDOnPeriod(params)))             final_rc = rc;
     if ((rc = setRetroActiveBurstNum(params)))          final_rc = rc;
     if ((rc = setSnapshotFDReq(params)))                final_rc = rc;
@@ -5098,6 +5092,8 @@
     if ((rc = setTemporalDenoise(params)))              final_rc = rc;
     if ((rc = setCacheVideoBuffers(params)))            final_rc = rc;
     if ((rc = setInitialExposureIndex(params)))         final_rc = rc;
+    if ((rc = setInstantCapture(params)))               final_rc = rc;
+    if ((rc = setInstantAEC(params)))                   final_rc = rc;
 
     // update live snapshot size after all other parameters are set
     if ((rc = setLiveSnapshotSize(params)))             final_rc = rc;
@@ -5132,6 +5128,8 @@
         final_rc = rc;
     }
 #endif
+
+    if ((rc = setAdvancedCaptureMode()))                final_rc = rc;
 UPDATE_PARAM_DONE:
     needRestart = m_bNeedRestart;
     return final_rc;
@@ -5486,6 +5484,23 @@
     setFloat(KEY_EXPOSURE_COMPENSATION_STEP, m_pCapability->exposure_compensation_step); // 1/6
     setExposureCompensation(m_pCapability->exposure_compensation_default); // 0
 
+    // Set Instant AEC modes
+    String8 instantAECModes = createValuesString(
+            m_pCapability->supported_instant_aec_modes,
+            m_pCapability->supported_instant_aec_modes_cnt,
+            INSTANT_AEC_MODES_MAP,
+            PARAM_MAP_SIZE(INSTANT_AEC_MODES_MAP));
+    set(KEY_QC_INSTANT_AEC_SUPPORTED_MODES, instantAECModes.string());
+
+    // Set Instant Capture modes
+    String8 instantCaptureModes = createValuesString(
+            m_pCapability->supported_instant_aec_modes,
+            m_pCapability->supported_instant_aec_modes_cnt,
+            INSTANT_CAPTURE_MODES_MAP,
+            PARAM_MAP_SIZE(INSTANT_CAPTURE_MODES_MAP));
+    set(KEY_QC_INSTANT_CAPTURE_SUPPORTED_MODES, instantCaptureModes.string());
+
+
     // Set Antibanding
     String8 antibandingValues = createValuesString(
             m_pCapability->supported_antibandings,
@@ -6868,7 +6883,7 @@
                     value = CAM_SENSOR_HDR_OFF;
                 LOGH("%s: Overriding to sensor HDR Mode to:%d", __func__, value);
                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_SENSOR_HDR, (cam_sensor_hdr_type_t) value)) {
-                    ALOGE("%s: Override to sensor HDR mode for video HDR failed", __func__);
+                    LOGE("%s: Override to sensor HDR mode for video HDR failed", __func__);
                     return BAD_VALUE;
                 }
                 updateParamEntry(KEY_QC_VIDEO_HDR, videoHDR);
@@ -7976,6 +7991,154 @@
 }
 
 /*===========================================================================
+ * FUNCTION   : setInstantCapture
+ *
+ * DESCRIPTION: Set Instant Capture related params
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setInstantCapture(const QCameraParameters& params)
+{
+    int32_t rc = NO_ERROR;
+    int value = -1;
+    // Check for instant capture, this will enable instant AEC as well.
+    // This param will trigger the instant AEC param to backend
+    // And also will be useful for instant capture.
+    const char *str = params.get(KEY_QC_INSTANT_CAPTURE);
+    const char *prev_str = get(KEY_QC_INSTANT_CAPTURE);
+    if (str) {
+        if ((prev_str == NULL) || (strcmp(str, prev_str) != 0)) {
+            value = lookupAttr(INSTANT_CAPTURE_MODES_MAP,
+                    PARAM_MAP_SIZE(INSTANT_CAPTURE_MODES_MAP), str);
+            LOGD("Set instant Capture from param = %d", value);
+            if(value != NAME_NOT_FOUND) {
+                updateParamEntry(KEY_QC_INSTANT_CAPTURE, str);
+            }
+        }
+    } else {
+        char prop[PROPERTY_VALUE_MAX];
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.camera.instant.capture", prop, KEY_QC_INSTANT_CAPTURE_DISABLE);
+        if ((prev_str == NULL) || (strcmp(prop, prev_str) != 0)) {
+            value = lookupAttr(INSTANT_CAPTURE_MODES_MAP,
+                    PARAM_MAP_SIZE(INSTANT_CAPTURE_MODES_MAP), prop);
+            LOGD("Set instant capture from setprop = %d", value);
+            if (value != NAME_NOT_FOUND) {
+                updateParamEntry(KEY_QC_INSTANT_CAPTURE, prop);
+            }
+        }
+    }
+
+    // Set instant AEC param to the backend for either instant capture or instant AEC
+    // 0 - disbale (normal AEC)
+    // 1 - Aggressive AEC (algo used in backend)
+    // 2 - Fast AEC (algo used in backend)
+    if (value != NAME_NOT_FOUND && value != -1) {
+        m_bInstantCapture = (value > 0)? true : false;
+        setInstantAEC((uint8_t)value, false);
+    }
+
+
+    // get frame aec bound value from setprop.
+    // This value indicates the number of frames, camera interface
+    // will wait for getting the instant capture frame.
+    // Default value set to 7.
+    // This value also indicates the number of frames, that HAL
+    // will not display and will not send preview frames to app
+    // This will be applicable only if instant capture is set.
+    if (m_bInstantCapture) {
+        char prop[PROPERTY_VALUE_MAX];
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.camera.ae.capture.bound", prop, "7");
+        int32_t frame_bound = atoi(prop);
+        if (frame_bound >= 0) {
+            mAecFrameBound = (uint8_t)frame_bound;
+        } else {
+            LOGE("Invalid prop for aec frame bound %d", frame_bound);
+            rc = BAD_VALUE;
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setInstantAEC
+ *
+ * DESCRIPTION: Set Instant AEC related params
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setInstantAEC(const QCameraParameters& params)
+{
+    int32_t rc = NO_ERROR;
+    int value = -1;
+
+    // Check for instant AEC only when instant capture is not enabled.
+    // Instant capture already takes care of the instant AEC as well.
+    if (!m_bInstantCapture) {
+        // Check for instant AEC. Instant AEC will only enable fast AEC.
+        // It will not enable instant capture.
+        // This param will trigger the instant AEC param to backend
+        const char *str = params.get(KEY_QC_INSTANT_AEC);
+        const char *prev_str = get(KEY_QC_INSTANT_AEC);
+        if (str) {
+            if ((prev_str == NULL) || (strcmp(str, prev_str) != 0)) {
+                value = lookupAttr(INSTANT_AEC_MODES_MAP,
+                        PARAM_MAP_SIZE(INSTANT_AEC_MODES_MAP), str);
+                LOGD("Set instant AEC from param = %d", value);
+            }
+        } else {
+            char prop[PROPERTY_VALUE_MAX];
+            memset(prop, 0, sizeof(prop));
+            property_get("persist.camera.instant.aec", prop, KEY_QC_INSTANT_AEC_DISABLE);
+            if ((prev_str == NULL) || (strcmp(prop, prev_str) != 0)) {
+                value = lookupAttr(INSTANT_AEC_MODES_MAP,
+                        PARAM_MAP_SIZE(INSTANT_AEC_MODES_MAP), prop);
+                LOGD("Set instant AEC from setprop = %d", value);
+            }
+        }
+
+        // Set instant AEC param to the backend for either instant capture or instant AEC
+        // 0 - disbale (normal AEC)
+        // 1 - Aggressive AEC (algo used in backend)
+        // 2 - Fast AEC (algo used in backend)
+        if (value != NAME_NOT_FOUND && value != -1) {
+            setInstantAEC((uint8_t)value, false);
+        }
+
+    }
+
+    // get frame aec preview skip count from setprop.
+    // This value indicates the number of frames, that HAL
+    // will not display and will not send preview frames to app
+    // Default value set to 7.
+    // This will be applicable only if instant aec is set.
+    if (m_bInstantAEC) {
+        char prop[PROPERTY_VALUE_MAX];
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.camera.ae.instant.bound", prop, "7");
+        int32_t aec_frame_skip_cnt = atoi(prop);
+        if (aec_frame_skip_cnt >= 0) {
+            mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
+        } else {
+            LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
+            rc = BAD_VALUE;
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
  * FUNCTION   : setDISValue
  *
  * DESCRIPTION: set DIS value
@@ -9978,6 +10141,13 @@
         max_dim.width = m_pCapability->analysis_max_res.width;
         max_dim.height = m_pCapability->analysis_max_res.height;
 
+        if ((getRecordingHintValue() == true)
+                && fdModeInVideo()
+                && m_pCapability->hw_analysis_supported) {
+            max_dim.width /= 2;
+            max_dim.height /= 2;
+        }
+
         if (prv_dim.width > max_dim.width || prv_dim.height > max_dim.height) {
             double max_ratio, requested_ratio;
 
@@ -10242,7 +10412,7 @@
  *==========================================================================*/
 uint8_t QCameraParameters::getMaxUnmatchedFramesInQueue()
 {
-    return (uint8_t)(m_pCapability->min_num_pp_bufs + (m_nBurstNum / 10));
+    return (uint8_t)(m_pCapability->min_num_pp_bufs);
 }
 
 /*===========================================================================
@@ -10398,7 +10568,7 @@
         numOfBufs--; // Only additional buffers need to be returned
     }
 
-    return (uint8_t)(numOfBufs * getBurstNum());
+    return (uint8_t)(numOfBufs);
 }
 
 /*===========================================================================
@@ -10418,22 +10588,7 @@
         numOfBufs++;
     }
 
-    return (uint8_t)(numOfBufs * getBurstNum());
-}
-
-/*===========================================================================
- * FUNCTION   : getBurstNum
- *
- * DESCRIPTION: get burst number of snapshot
- *
- * PARAMETERS : none
- *
- * RETURN     : number of burst
- *==========================================================================*/
-uint8_t QCameraParameters::getBurstNum()
-{
-    LOGH("m_nBurstNum = %d", m_nBurstNum);
-    return m_nBurstNum;
+    return (uint8_t)(numOfBufs);
 }
 
 /*===========================================================================
@@ -10546,9 +10701,15 @@
     char exifRotation[PROPERTY_VALUE_MAX];
 
     property_get("persist.camera.exif.rotation", exifRotation, "off");
+
     if (!strcmp(exifRotation, "on")) {
         return true;
     }
+
+    if (!(m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
+        return true;
+    }
+
     return false;
 }
 
@@ -11127,6 +11288,9 @@
         if (getRecordingHintValue() > 0) {
             faceProcMask = 0;
             faceProcMask |= CAM_FACE_PROCESS_MASK_FOCUS;
+            if (fdModeInVideo() == CAM_FACE_PROCESS_MASK_DETECTION) {
+                faceProcMask |= CAM_FACE_PROCESS_MASK_DETECTION;
+            }
         } else {
             faceProcMask |= CAM_FACE_PROCESS_MASK_FOCUS;
             faceProcMask |= CAM_FACE_PROCESS_MASK_DETECTION;
@@ -12350,7 +12514,7 @@
  *==========================================================================*/
 bool QCameraParameters::isHDREnabled()
 {
-    return ((m_nBurstNum == 1) && (m_bHDREnabled || m_HDRSceneEnabled));
+    return ((m_bHDREnabled || m_HDRSceneEnabled));
 }
 
 /*===========================================================================
@@ -12589,7 +12753,7 @@
         /* Analysis stream is needed by DCRF regardless of recording hint */
         if ((getDcrf() == true) ||
                 (getRecordingHintValue() != true) ||
-                (isFDInVideoEnabled())) {
+                (fdModeInVideo())) {
             stream_config_info.type[stream_config_info.num_streams] =
                     CAM_STREAM_TYPE_ANALYSIS;
             getStreamDimension(CAM_STREAM_TYPE_ANALYSIS,
@@ -12863,7 +13027,7 @@
         numOfBufs += 1;
     }
 
-    return (uint8_t)(numOfBufs * getBurstNum());
+    return (uint8_t)(numOfBufs);
 }
 
 /*===========================================================================
@@ -13125,7 +13289,8 @@
 
     if (isHighQualityNoiseReductionMode() &&
             ((stream_type == CAM_STREAM_TYPE_VIDEO) ||
-            (stream_type == CAM_STREAM_TYPE_PREVIEW && getRecordingHintValue()))) {
+            (stream_type == CAM_STREAM_TYPE_PREVIEW && getRecordingHintValue() &&
+            isPreviewSeeMoreRequired()))) {
         feature_mask |= CAM_QTI_FEATURE_SW_TNR;
     }
 
@@ -13587,9 +13752,6 @@
         getNumOfExtraHDROutBufsIfNeeded());
     str += s;
 
-    snprintf(s, 128, "getBurstNum: %d\n", getBurstNum());
-    str += s;
-
     snprintf(s, 128, "getRecordingHintValue: %d\n", getRecordingHintValue());
     str += s;
 
@@ -13848,28 +14010,30 @@
 }
 
 /*===========================================================================
- * FUNCTION   : isFDInVideoEnabled
+ * FUNCTION   : fdModeInVideo
  *
  * DESCRIPTION: FD in Video change
  *
  * PARAMETERS : none
  *
- * RETURN     : TRUE  : If FD in Video enabled
- *              FALSE : If FD in Video disabled
+ * RETURN     : FD Mode in Video
+ *              0 : If FD in Video disabled
+ *              1 : If FD in Video enabled for Detection, focus
+ *              2 : If FD in Video enabled only for focus
  *==========================================================================*/
-bool QCameraParameters::isFDInVideoEnabled()
+uint8_t QCameraParameters::fdModeInVideo()
 {
     char value[PROPERTY_VALUE_MAX];
-    bool fdvideo = FALSE;
+    uint8_t fdvideo = 0;
 
     if (!m_pCapability->hw_analysis_supported) {
-        return FALSE;
+        return 0;
     }
 
     property_get("persist.camera.fdvideo", value, "0");
-    fdvideo = (atoi(value) > 0) ? TRUE : FALSE;
+    fdvideo = (atoi(value) > 0) ? atoi(value) : 0;
 
-    LOGD("FD in Video enabled : %d", fdvideo);
+    LOGD("FD mode in Video : %d", fdvideo);
     return fdvideo;
 }
 
@@ -14002,4 +14166,75 @@
     return NO_ERROR;
 }
 
+/*===========================================================================
+ * FUNCTION   : setinstantAEC
+ *
+ * DESCRIPTION: set instant AEC value to backend
+ *
+ * PARAMETERS :
+ *   @value : instant aec enabled or not.
+ *            0 - disable
+ *            1 - Enable and set agressive AEC algo to the backend
+ *            2 - Enable and set fast AEC algo to the backend
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setInstantAEC(uint8_t value, bool initCommit)
+{
+    if (initCommit) {
+        if (initBatchUpdate(m_pParamBuf) < 0) {
+            LOGE("Failed to initialize group update table");
+            return FAILED_TRANSACTION;
+        }
+    }
+
+    int32_t rc = NO_ERROR;
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_INSTANT_AEC, value)) {
+        LOGE("Failed to instant aec value");
+        return BAD_VALUE;
+    }
+
+    // set the new value
+    char val[8];
+    snprintf(val, sizeof(val), "%d", value);
+    updateParamEntry(KEY_QC_INSTANT_AEC, val);
+
+    if (initCommit) {
+        rc = commitSetBatch();
+        if (NO_ERROR != rc) {
+            LOGE("Failed to instant aec value");
+            return rc;
+        }
+    }
+
+    LOGD(" Instant AEC value set to backend %d", value);
+    m_bInstantAEC = value;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAdvancedCaptureMode
+ *
+ * DESCRIPTION: set advanced capture mode
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAdvancedCaptureMode()
+{
+    uint8_t value = isAdvCamFeaturesEnabled();
+    LOGD("updating advanced capture mode value to %d",value);
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+            CAM_INTF_PARM_ADV_CAPTURE_MODE, value)) {
+        LOGE("Failed to set advanced capture mode param");
+        return BAD_VALUE;
+    }
+    return NO_ERROR;
+}
+
 }; // namespace qcamera
diff --git a/QCamera2/HAL/QCameraParameters.h b/QCamera2/HAL/QCameraParameters.h
index 55109b9..931b26d 100644
--- a/QCamera2/HAL/QCameraParameters.h
+++ b/QCamera2/HAL/QCameraParameters.h
@@ -209,6 +209,10 @@
     static const char FOCUS_MODE_MANUAL_POSITION[];
     static const char KEY_QC_LONG_SHOT[];
     static const char KEY_QC_INITIAL_EXPOSURE_INDEX[];
+    static const char KEY_QC_INSTANT_AEC[];
+    static const char KEY_QC_INSTANT_CAPTURE[];
+    static const char KEY_QC_INSTANT_AEC_SUPPORTED_MODES[];
+    static const char KEY_QC_INSTANT_CAPTURE_SUPPORTED_MODES[];
 
     static const char KEY_QC_MANUAL_FOCUS_POSITION[];
     static const char KEY_QC_MANUAL_FOCUS_POS_TYPE[];
@@ -461,6 +465,16 @@
     static const char AUTO_EXPOSURE_SPOT_METERING_ADV[];
     static const char AUTO_EXPOSURE_CENTER_WEIGHTED_ADV[];
 
+    // Values for instant AEC modes
+    static const char KEY_QC_INSTANT_AEC_DISABLE[];
+    static const char KEY_QC_INSTANT_AEC_AGGRESSIVE_AEC[];
+    static const char KEY_QC_INSTANT_AEC_FAST_AEC[];
+
+    // Values for instant capture modes
+    static const char KEY_QC_INSTANT_CAPTURE_DISABLE[];
+    static const char KEY_QC_INSTANT_CAPTURE_AGGRESSIVE_AEC[];
+    static const char KEY_QC_INSTANT_CAPTURE_FAST_AEC[];
+
     static const char KEY_QC_SHARPNESS[];
     static const char KEY_QC_MIN_SHARPNESS[];
     static const char KEY_QC_MAX_SHARPNESS[];
@@ -664,6 +678,10 @@
     uint32_t getJpegExifRotation();
     bool useJpegExifRotation();
     int32_t getEffectValue();
+    bool isInstantAECEnabled() {return m_bInstantAEC;};
+    bool isInstantCaptureEnabled() {return m_bInstantCapture;};
+    uint8_t getAecFrameBoundValue() {return mAecFrameBound;};
+    uint8_t getAecSkipDisplayFrameBound() {return mAecSkipDisplayFrameBound;};
 
     int32_t getExifDateTime(String8 &dateTime, String8 &subsecTime);
     int32_t getExifFocalLength(rat_t *focalLenght);
@@ -831,7 +849,7 @@
     int32_t getRelatedCamCalibration(
             cam_related_system_calibration_data_t* calib);
     int32_t bundleRelatedCameras(bool sync, uint32_t sessionid);
-    bool isFDInVideoEnabled();
+    uint8_t fdModeInVideo();
     bool isOEMFeatEnabled() { return m_bOEMFeatEnabled; }
 
     int32_t setZslMode(bool value);
@@ -842,6 +860,7 @@
     int32_t getPicSizeFromAPK(int &width, int &height);
 
     int32_t checkFeatureConcurrency();
+    int32_t setInstantAEC(uint8_t enable, bool initCommit);
 private:
     int32_t setPreviewSize(const QCameraParameters& );
     int32_t setVideoSize(const QCameraParameters& );
@@ -910,7 +929,6 @@
     int32_t setSceneSelectionMode(const QCameraParameters& params);
     int32_t setFaceRecognition(const QCameraParameters& );
     int32_t setFlip(const QCameraParameters& );
-    int32_t setBurstNum(const QCameraParameters& params);
     int32_t setRetroActiveBurstNum(const QCameraParameters& params);
     int32_t setBurstLEDOnPeriod(const QCameraParameters& params);
     int32_t setSnapshotFDReq(const QCameraParameters& );
@@ -919,6 +937,8 @@
     int32_t setTintlessValue(const QCameraParameters& params);
     int32_t setCDSMode(const QCameraParameters& params);
     int32_t setInitialExposureIndex(const QCameraParameters& params);
+    int32_t setInstantCapture(const QCameraParameters& params);
+    int32_t setInstantAEC(const QCameraParameters& params);
     int32_t setMobicat(const QCameraParameters& params);
     int32_t setRdiMode(const QCameraParameters& );
     int32_t setSecureMode(const QCameraParameters& );
@@ -999,7 +1019,6 @@
 
     bool isTNRPreviewEnabled() {return m_bTNRPreviewOn;};
     bool isTNRVideoEnabled() {return m_bTNRVideoOn;};
-    uint8_t getBurstNum();
     bool getFaceDetectionOption() { return  m_bFaceDetectionOn;}
     bool isAVTimerEnabled();
     void getLiveSnapshotSize(cam_dimension_t &dim);
@@ -1028,6 +1047,7 @@
     String8 createFpsString(cam_fps_range_t &fps);
     String8 createZoomRatioValuesString(uint32_t *zoomRatios, size_t length);
     int32_t setDualLedCalibration(const QCameraParameters& params);
+    int32_t setAdvancedCaptureMode();
 
     // ops for batch set/get params with server
     int32_t initBatchUpdate(parm_buffer_t *p_table);
@@ -1042,6 +1062,8 @@
     // Map from strings to values
     static const cam_dimension_t THUMBNAIL_SIZES_MAP[];
     static const QCameraMap<cam_auto_exposure_mode_type> AUTO_EXPOSURE_MAP[];
+    static const QCameraMap<cam_aec_convergence_type> INSTANT_CAPTURE_MODES_MAP[];
+    static const QCameraMap<cam_aec_convergence_type> INSTANT_AEC_MODES_MAP[];
     static const QCameraMap<cam_format_t> PREVIEW_FORMATS_MAP[];
     static const QCameraMap<cam_format_t> PICTURE_TYPES_MAP[];
     static const QCameraMap<cam_focus_mode_type> FOCUS_MODES_MAP[];
@@ -1106,7 +1128,6 @@
     bool m_bTNRVideoOn;
     bool m_bTNRSnapshotOn;
     bool m_bInited;
-    uint8_t m_nBurstNum;
     int m_nRetroBurstNum;
     int m_nBurstLEDOnPeriod;
     cam_exp_bracketing_t m_AEBracketingClient;
@@ -1184,6 +1205,14 @@
     QCameraManualCaptureModes m_ManualCaptureMode;
     cam_dyn_img_data_t m_DynamicImgData;
     int32_t m_dualLedCalibration;
+    // Param to trigger instant AEC.
+    bool m_bInstantAEC;
+    // Param to trigger instant capture.
+    bool m_bInstantCapture;
+    // Number of frames, camera interface will wait for getting the instant capture frame.
+    uint8_t mAecFrameBound;
+    // Number of preview frames, that HAL will hold without displaying, for instant AEC mode.
+    uint8_t mAecSkipDisplayFrameBound;
 };
 
 }; // namespace qcamera
diff --git a/QCamera2/HAL/QCameraParametersIntf.cpp b/QCamera2/HAL/QCameraParametersIntf.cpp
index b970fc9..c2e0269 100644
--- a/QCamera2/HAL/QCameraParametersIntf.cpp
+++ b/QCamera2/HAL/QCameraParametersIntf.cpp
@@ -342,6 +342,34 @@
     return mImpl->getEffectValue();
 }
 
+bool QCameraParametersIntf::isInstantAECEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isInstantAECEnabled();
+}
+
+bool QCameraParametersIntf::isInstantCaptureEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isInstantCaptureEnabled();
+}
+
+uint8_t QCameraParametersIntf::getAecFrameBoundValue()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getAecFrameBoundValue();
+}
+
+uint8_t QCameraParametersIntf::getAecSkipDisplayFrameBound()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getAecSkipDisplayFrameBound();
+}
+
 int32_t QCameraParametersIntf::getExifDateTime(
         String8 &dateTime, String8 &subsecTime)
 {
@@ -1280,11 +1308,11 @@
     return mImpl->bundleRelatedCameras(sync, sessionid);
 }
 
-bool QCameraParametersIntf::isFDInVideoEnabled()
+uint8_t QCameraParametersIntf::fdModeInVideo()
 {
     Mutex::Autolock lock(mLock);
     CHECK_PARAM_INTF(mImpl);
-    return mImpl->isFDInVideoEnabled();
+    return mImpl->fdModeInVideo();
 }
 
 bool QCameraParametersIntf::isOEMFeatEnabled()
@@ -1336,4 +1364,11 @@
     return mImpl->checkFeatureConcurrency();
 }
 
+int32_t QCameraParametersIntf::setInstantAEC(uint8_t enable, bool initCommit)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->setInstantAEC(enable, initCommit);
+}
+
 }; // namespace qcamera
diff --git a/QCamera2/HAL/QCameraParametersIntf.h b/QCamera2/HAL/QCameraParametersIntf.h
index 69de623..b607f10 100644
--- a/QCamera2/HAL/QCameraParametersIntf.h
+++ b/QCamera2/HAL/QCameraParametersIntf.h
@@ -117,6 +117,10 @@
     uint32_t getJpegExifRotation();
     bool useJpegExifRotation();
     int32_t getEffectValue();
+    bool isInstantAECEnabled();
+    bool isInstantCaptureEnabled();
+    uint8_t getAecFrameBoundValue();
+    uint8_t getAecSkipDisplayFrameBound();
 
     int32_t getExifDateTime(String8 &dateTime, String8 &subsecTime);
     int32_t getExifFocalLength(rat_t *focalLenght);
@@ -273,7 +277,7 @@
     int32_t getRelatedCamCalibration(
             cam_related_system_calibration_data_t* calib);
     int32_t bundleRelatedCameras(bool sync, uint32_t sessionid);
-    bool isFDInVideoEnabled();
+    uint8_t fdModeInVideo();
     bool isOEMFeatEnabled();
 
     int32_t setZslMode(bool value);
@@ -284,6 +288,7 @@
     int32_t getPicSizeFromAPK(int &width, int &height);
 
     int32_t checkFeatureConcurrency();
+    int32_t setInstantAEC(uint8_t enable, bool initCommit);
 private:
     QCameraParameters *mImpl;
     mutable Mutex mLock;
diff --git a/QCamera2/HAL/QCameraPostProc.cpp b/QCamera2/HAL/QCameraPostProc.cpp
index 5cb1b83..22413a1 100644
--- a/QCamera2/HAL/QCameraPostProc.cpp
+++ b/QCamera2/HAL/QCameraPostProc.cpp
@@ -369,9 +369,7 @@
         return UNKNOWN_ERROR;
     }
 
-    if ( !m_parent->mParameters.getRecordingHintValue() &&
-            !m_parent->isLongshotEnabled() && (mPPChannelCount > 0)) {
-
+    if (mPPChannelCount > 0) {
         QCameraChannel *pChannel = NULL;
         int ppChannel_idx = mPPChannelCount - 1;
         pChannel = m_parent->needReprocess() ? mPPChannels[ppChannel_idx] :
@@ -644,6 +642,10 @@
             encode_parm.thumb_dim.dst_dim.height);
     }
 
+    if (m_parent->mParameters.useJpegExifRotation()){
+        encode_parm.thumb_rotation = m_parent->mParameters.getJpegExifRotation();
+    }
+
     encode_parm.num_dst_bufs = 1;
     if (mUseJpegBurst) {
         encode_parm.num_dst_bufs = MAX_JPEG_BURST;
@@ -661,6 +663,7 @@
         if (m_pJpegOutputMem[i] != NULL)
           free(m_pJpegOutputMem[i]);
         omx_jpeg_ouput_buf_t omx_out_buf;
+        memset(&omx_out_buf, 0, sizeof(omx_jpeg_ouput_buf_t));
         omx_out_buf.handle = this;
         // allocate output buf for jpeg encoding
         m_pJpegOutputMem[i] = malloc(out_size);
@@ -3043,12 +3046,15 @@
         ppInputFrame = src_frame;
     }
 
+    if (mPPChannelCount >= CAM_PP_CHANNEL_MAX) {
+        LOGE("invalid channel count");
+        return UNKNOWN_ERROR;
+    }
+
     // find meta data stream and index of meta data frame in the superbuf
     for (int8_t j = 0; j < mPPChannelCount; j++) {
-        uint32_t i;
-
         /*First search in src buffer for any offline metadata */
-        for (i = 0; i < src_frame->num_bufs; i++) {
+        for (uint32_t i = 0; i < src_frame->num_bufs; i++) {
             QCameraStream *pStream = mPPChannels[j]->getStreamByHandle(
                     src_frame->bufs[i]->stream_id);
             if (pStream != NULL && pStream->isOrignalTypeOf(CAM_STREAM_TYPE_METADATA)) {
@@ -3068,11 +3074,10 @@
 
     if ((pMetaStream == NULL) && (meta_buf == NULL)) {
         for (int8_t j = 0; j < mPPChannelCount; j++) {
-            uint32_t i;
             m_pSrcChannel = mPPChannels[j]->getSrcChannel();
             if (m_pSrcChannel == NULL)
                 continue;
-            for (i = 0; i < src_reproc_frame->num_bufs; i++) {
+            for (uint32_t i = 0; i < src_reproc_frame->num_bufs; i++) {
                 QCameraStream *pStream =
                         m_pSrcChannel->getStreamByHandle(
                         src_reproc_frame->bufs[i]->stream_id);
@@ -3527,13 +3532,14 @@
         break;
     case EXIF_SHORT:
         {
+            uint16_t *exif_data = (uint16_t *)data;
             if (count > 1) {
                 uint16_t *values = (uint16_t *)malloc(count * sizeof(uint16_t));
                 if (values == NULL) {
                     LOGE("No memory for short array");
                     rc = NO_MEMORY;
                 } else {
-                    memcpy(values, data, count * sizeof(uint16_t));
+                    memcpy(values, exif_data, count * sizeof(uint16_t));
                     m_Entries[m_nNumEntries].tag_entry.data._shorts = values;
                 }
             } else {
@@ -3543,13 +3549,14 @@
         break;
     case EXIF_LONG:
         {
+            uint32_t *exif_data = (uint32_t *)data;
             if (count > 1) {
                 uint32_t *values = (uint32_t *)malloc(count * sizeof(uint32_t));
                 if (values == NULL) {
                     LOGE("No memory for long array");
                     rc = NO_MEMORY;
                 } else {
-                    memcpy(values, data, count * sizeof(uint32_t));
+                    memcpy(values, exif_data, count * sizeof(uint32_t));
                     m_Entries[m_nNumEntries].tag_entry.data._longs = values;
                 }
             } else {
@@ -3559,13 +3566,14 @@
         break;
     case EXIF_RATIONAL:
         {
+            rat_t *exif_data = (rat_t *)data;
             if (count > 1) {
                 rat_t *values = (rat_t *)malloc(count * sizeof(rat_t));
                 if (values == NULL) {
                     LOGE("No memory for rational array");
                     rc = NO_MEMORY;
                 } else {
-                    memcpy(values, data, count * sizeof(rat_t));
+                    memcpy(values, exif_data, count * sizeof(rat_t));
                     m_Entries[m_nNumEntries].tag_entry.data._rats = values;
                 }
             } else {
@@ -3587,13 +3595,14 @@
         break;
     case EXIF_SLONG:
         {
+            uint32_t *exif_data = (uint32_t *)data;
             if (count > 1) {
                 int32_t *values = (int32_t *)malloc(count * sizeof(int32_t));
                 if (values == NULL) {
                     LOGE("No memory for signed long array");
                     rc = NO_MEMORY;
                 } else {
-                    memcpy(values, data, count * sizeof(int32_t));
+                    memcpy(values, exif_data, count * sizeof(int32_t));
                     m_Entries[m_nNumEntries].tag_entry.data._slongs = values;
                 }
             } else {
@@ -3603,13 +3612,14 @@
         break;
     case EXIF_SRATIONAL:
         {
+            srat_t *exif_data = (srat_t *)data;
             if (count > 1) {
                 srat_t *values = (srat_t *)malloc(count * sizeof(srat_t));
                 if (values == NULL) {
                     LOGE("No memory for signed rational array");
                     rc = NO_MEMORY;
                 } else {
-                    memcpy(values, data, count * sizeof(srat_t));
+                    memcpy(values, exif_data, count * sizeof(srat_t));
                     m_Entries[m_nNumEntries].tag_entry.data._srats = values;
                 }
             } else {
diff --git a/QCamera2/HAL/QCameraStateMachine.cpp b/QCamera2/HAL/QCameraStateMachine.cpp
index abc9f8d..a177010 100644
--- a/QCamera2/HAL/QCameraStateMachine.cpp
+++ b/QCamera2/HAL/QCameraStateMachine.cpp
@@ -131,8 +131,6 @@
     m_bDelayPreviewMsgs = false;
     m_DelayedMsgs = 0;
     m_RestoreZSL = TRUE;
-
-    m_bDisplayFrame = TRUE;
     m_bPreviewCallbackNeeded = TRUE;
 }
 
@@ -723,7 +721,7 @@
     case QCAMERA_SM_EVT_SNAPSHOT_DONE:
         {
             // No ops, but need to notify
-            LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+            LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
             result.status = rc;
             result.request_api = evt;
             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
@@ -744,14 +742,14 @@
                }
                break;
            default:
-               LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+               LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
                break;
            }
        }
        break;
     case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
     default:
-        LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+        LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
         break;
     }
 
@@ -1091,7 +1089,7 @@
     case QCAMERA_SM_EVT_SNAPSHOT_DONE:
         {
             // No ops, but need to notify
-            LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+            LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
             result.status = rc;
             result.request_api = evt;
             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
@@ -1107,7 +1105,7 @@
                rc = m_parent->mParameters.updateFlashMode(internal_evt->led_data);
                break;
            default:
-               LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+               LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
                break;
            }
        }
@@ -1115,7 +1113,7 @@
     case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
     case QCAMERA_SM_EVT_THERMAL_NOTIFY:
     default:
-        LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+        LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
         break;
     }
 
@@ -1671,7 +1669,7 @@
     case QCAMERA_SM_EVT_SNAPSHOT_DONE:
         {
             // No ops, but need to notify
-            LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+            LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
             result.status = rc;
             result.request_api = evt;
             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
@@ -1701,7 +1699,7 @@
        break;
     case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
     default:
-        LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+        LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
         break;
     }
 
@@ -1803,7 +1801,7 @@
                 rc = m_parent->processASDUpdate(internal_evt->asd_data);
                 break;
             case QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE:
-                LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+                LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
                 break;
             case QCAMERA_INTERNAL_EVT_AWB_UPDATE:
                 rc = m_parent->transAwbMetaToParams(internal_evt->awb_data);
@@ -1851,7 +1849,7 @@
     case QCAMERA_SM_EVT_SNAPSHOT_DONE:
         {
             // No ops, but need to notify
-            LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+            LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
             result.status = rc;
             result.request_api = evt;
             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
@@ -1865,7 +1863,7 @@
         break;
     case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
     default:
-        LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+        LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
         break;
     }
 
@@ -2188,7 +2186,7 @@
                 rc = m_parent->processASDUpdate(internal_evt->asd_data);
                 break;
             case QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE:
-                LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+                LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
                 break;
             case QCAMERA_INTERNAL_EVT_AWB_UPDATE:
                 rc = m_parent->transAwbMetaToParams(internal_evt->awb_data);
@@ -2316,7 +2314,7 @@
         }
         break;
     default:
-        LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+        LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
         break;
     }
 
@@ -2661,7 +2659,7 @@
                 rc = m_parent->processASDUpdate(internal_evt->asd_data);
                 break;
             case QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE:
-                LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+                LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
                 break;
             case QCAMERA_INTERNAL_EVT_AWB_UPDATE:
                 rc = m_parent->transAwbMetaToParams(internal_evt->awb_data);
@@ -2712,7 +2710,7 @@
     case QCAMERA_SM_EVT_SNAPSHOT_DONE:
         {
             // No ops, but need to notify
-            LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+            LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
             result.status = rc;
             result.request_api = evt;
             result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
@@ -2721,7 +2719,7 @@
        break;
     case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
     default:
-        LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+        LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
         break;
     }
 
@@ -3041,7 +3039,7 @@
                 rc = m_parent->processASDUpdate(internal_evt->asd_data);
                 break;
             case QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE:
-                LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+                LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
                 break;
             case QCAMERA_INTERNAL_EVT_AWB_UPDATE:
                 rc = m_parent->transAwbMetaToParams(internal_evt->awb_data);
@@ -3107,7 +3105,7 @@
         }
         break;
     default:
-        LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+        LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
         break;
     }
 
@@ -3550,7 +3548,7 @@
                 rc = m_parent->processASDUpdate(internal_evt->asd_data);
                 break;
             case QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE:
-                LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+                LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
                 break;
             case QCAMERA_INTERNAL_EVT_AWB_UPDATE:
                 rc = m_parent->transAwbMetaToParams(internal_evt->awb_data);
@@ -3663,7 +3661,7 @@
         }
        break;
     default:
-        LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+        LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
         break;
     }
 
diff --git a/QCamera2/HAL/QCameraStateMachine.h b/QCamera2/HAL/QCameraStateMachine.h
index 20c3145..b02ba06 100644
--- a/QCamera2/HAL/QCameraStateMachine.h
+++ b/QCamera2/HAL/QCameraStateMachine.h
@@ -174,7 +174,7 @@
         cam_faces_data_t faces_data;
         cam_hist_stats_t stats_data;
         cam_crop_data_t crop_data;
-        cam_auto_scene_t asd_data;
+        cam_asd_decision_t asd_data;
         cam_flash_mode_t led_data;
         cam_awb_params_t awb_data;
         cam_3a_params_t ae_data;
@@ -200,8 +200,6 @@
     bool isRecording();
     void releaseThread();
 
-    bool isDisplayFrameNeeded() { return m_bDisplayFrame; };
-    int32_t setDisplayFrame(bool enabled) {m_bDisplayFrame=enabled; return 0;};
     bool isPreviewCallbackNeeded() { return m_bPreviewCallbackNeeded; };
     int32_t setPreviewCallbackNeeded(bool enabled) {m_bPreviewCallbackNeeded=enabled; return 0;};
 private:
@@ -257,8 +255,6 @@
     bool m_bPreviewDelayedRestart;        // Preview delayed restart
     int32_t m_DelayedMsgs;
     bool m_RestoreZSL;
-
-    bool m_bDisplayFrame;
     bool m_bPreviewCallbackNeeded;
 };
 
diff --git a/QCamera2/HAL/test/Android.mk b/QCamera2/HAL/test/Android.mk
index 9ead9bd..50a186e 100644
--- a/QCamera2/HAL/test/Android.mk
+++ b/QCamera2/HAL/test/Android.mk
@@ -38,6 +38,7 @@
     external/skia/include/images \
     $(TARGET_OUT_HEADERS)/qcom/display \
     hardware/qcom/camera/QCamera2/stack/common \
+    hardware/qcom/camera/QCamera2/stack/mm-camera-interface/inc \
     frameworks/av/include/media/stagefright \
     frameworks/native/include/media/openmax \
     $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr/include
diff --git a/QCamera2/HAL/test/qcamera_test.cpp b/QCamera2/HAL/test/qcamera_test.cpp
index 714bab2..dd06c67 100644
--- a/QCamera2/HAL/test/qcamera_test.cpp
+++ b/QCamera2/HAL/test/qcamera_test.cpp
@@ -70,7 +70,9 @@
 #include <gralloc_priv.h>
 #include <math.h>
 
+#include "qcamera_test.h"
 #include "cam_types.h"
+#include "mm_camera_dbg.h"
 
 #define VIDEO_BUF_ALLIGN(size, allign) \
   (((size) + (allign-1)) & (typeof(size))(~(allign-1)))
@@ -197,7 +199,7 @@
         return INVALID_OPERATION;
     }
 
-    printf(" buffer=%p, size=%lld stored at %s\n",
+    printf("%s: buffer=%p, size=%lld stored at %s\n",
             __FUNCTION__, buff, (long long int) size, path.string());
 
     if (fd >= 0)
@@ -502,7 +504,7 @@
     fseek(fh, 0, SEEK_END);
     len = (size_t)ftell(fh);
     rewind(fh);
-    printf(" buffer=%p, size=%zu stored at %s\n",
+    printf("%s: buffer=%p, size=%zu stored at %s\n",
             __FUNCTION__, bitmap->getPixels(), len, path.string());
 
     free(mJEXIFSection.Data);
@@ -986,7 +988,7 @@
 
                     if (encodeJPEG(wStream, skBMDec, jpegPath) != false) {
                         printf("%s():%d:: Failed during jpeg encode\n",
-                                __FUNCTION__);
+                                __FUNCTION__,__LINE__);
                         mInterpr->PiPUnlock();
                         return;
                     }
@@ -1442,65 +1444,57 @@
  *==========================================================================*/
 void CameraContext::printSupportedParams()
 {
+    const char *camera_ids = mParams.get("camera-indexes");
+    const char *pic_sizes = mParams.get(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES);
+    const char *pic_formats = mParams.get(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS);
+    const char *preview_sizes = mParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES);
+    const char *video_sizes = mParams.get(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES);
+    const char *preview_formats = mParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS);
+    const char *frame_rates = mParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
+    const char *thumb_sizes = mParams.get(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES);
+    const char *wb_modes = mParams.get(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE);
+    const char *effects = mParams.get(CameraParameters::KEY_SUPPORTED_EFFECTS);
+    const char *scene_modes = mParams.get(CameraParameters::KEY_SUPPORTED_SCENE_MODES);
+    const char *focus_modes = mParams.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES);
+    const char *antibanding_modes = mParams.get(CameraParameters::KEY_SUPPORTED_ANTIBANDING);
+    const char *flash_modes = mParams.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES);
+    int focus_areas = mParams.getInt(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS);
+    const char *fps_ranges = mParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE);
+    const char *focus_distances = mParams.get(CameraParameters::KEY_FOCUS_DISTANCES);
+
     printf("\n\r\tSupported Cameras: %s",
-           mParams.get("camera-indexes")?
-               mParams.get("camera-indexes") : "NULL");
+           (camera_ids != NULL)? camera_ids : "NULL");
     printf("\n\r\tSupported Picture Sizes: %s",
-           mParams.get(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES)?
-           mParams.get(
-               CameraParameters::KEY_SUPPORTED_PICTURE_SIZES) : "NULL");
+           (pic_sizes != NULL)? pic_sizes : "NULL");
     printf("\n\r\tSupported Picture Formats: %s",
-           mParams.get(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS)?
-           mParams.get(
-               CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS) : "NULL");
+           (pic_formats != NULL)? pic_formats : "NULL");
     printf("\n\r\tSupported Preview Sizes: %s",
-           mParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES)?
-           mParams.get(
-               CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES) : "NULL");
+           (preview_sizes != NULL)? preview_sizes : "NULL");
     printf("\n\r\tSupported Video Sizes: %s",
-            mParams.get(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES)?
-            mParams.get(
-               CameraParameters::KEY_SUPPORTED_VIDEO_SIZES) : "NULL");
+            (video_sizes != NULL)? video_sizes : "NULL");
     printf("\n\r\tSupported Preview Formats: %s",
-           mParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS)?
-           mParams.get(
-               CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS) : "NULL");
+           (preview_formats != NULL)? preview_formats : "NULL");
     printf("\n\r\tSupported Preview Frame Rates: %s",
-           mParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES)?
-           mParams.get(
-               CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES) : "NULL");
+           (frame_rates != NULL)? frame_rates : "NULL");
     printf("\n\r\tSupported Thumbnail Sizes: %s",
-           mParams.get(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES)?
-           mParams.get(
-               CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES) : "NULL");
+           (thumb_sizes != NULL)? thumb_sizes : "NULL");
     printf("\n\r\tSupported Whitebalance Modes: %s",
-           mParams.get(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE)?
-           mParams.get(
-               CameraParameters::KEY_SUPPORTED_WHITE_BALANCE) : "NULL");
+           (wb_modes != NULL)? wb_modes : "NULL");
     printf("\n\r\tSupported Effects: %s",
-           mParams.get(CameraParameters::KEY_SUPPORTED_EFFECTS)?
-           mParams.get(CameraParameters::KEY_SUPPORTED_EFFECTS) : "NULL");
+           (effects != NULL)? effects : "NULL");
     printf("\n\r\tSupported Scene Modes: %s",
-           mParams.get(CameraParameters::KEY_SUPPORTED_SCENE_MODES)?
-           mParams.get(CameraParameters::KEY_SUPPORTED_SCENE_MODES) : "NULL");
+           (scene_modes != NULL)? scene_modes : "NULL");
     printf("\n\r\tSupported Focus Modes: %s",
-           mParams.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES)?
-           mParams.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES) : "NULL");
+           (focus_modes != NULL)? focus_modes : "NULL");
     printf("\n\r\tSupported Antibanding Options: %s",
-           mParams.get(CameraParameters::KEY_SUPPORTED_ANTIBANDING)?
-           mParams.get(CameraParameters::KEY_SUPPORTED_ANTIBANDING) : "NULL");
+           (antibanding_modes != NULL)? antibanding_modes : "NULL");
     printf("\n\r\tSupported Flash Modes: %s",
-           mParams.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES)?
-           mParams.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES) : "NULL");
-    printf("\n\r\tSupported Focus Areas: %d",
-           mParams.getInt(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS));
+           (flash_modes != NULL)? flash_modes : "NULL");
+    printf("\n\r\tSupported Focus Areas: %d", focus_areas);
     printf("\n\r\tSupported FPS ranges : %s",
-           mParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE)?
-           mParams.get(
-               CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE) : "NULL");
+           (fps_ranges != NULL)? fps_ranges : "NULL");
     printf("\n\r\tFocus Distances: %s \n",
-           mParams.get(CameraParameters::KEY_FOCUS_DISTANCES)?
-           mParams.get(CameraParameters::KEY_FOCUS_DISTANCES) : "NULL");
+           (focus_distances != NULL)? focus_distances : "NULL");
 }
 
 /*===========================================================================
@@ -1646,7 +1640,7 @@
     mPreviewSurface(NULL),
     mInUse(false)
 {
-    mRecorder = new MediaRecorder();
+    mRecorder = new MediaRecorder(String16("camera"));
 }
 
 /*===========================================================================
@@ -1713,6 +1707,7 @@
 
     if ( NULL != mCamera.get() ) {
         printf("Camera already open! \n");
+        signalFinished();
         return NO_ERROR;
     }
 
@@ -1734,6 +1729,7 @@
 
     if ( NULL == mCamera.get() ) {
         printf("Unable to connect to CameraService\n");
+        signalFinished();
         return NO_INIT;
     }
 
@@ -2140,35 +2136,35 @@
     ret = mRecorder->setParameters(
         String8("video-param-encoding-bitrate=64000"));
     if ( ret != NO_ERROR ) {
-        ERROR("Could not configure recorder (%d)", ret);
+        LOGE("Could not configure recorder (%d)", ret);
         return ret;
     }
 
     ret = mRecorder->setCamera(
         mCamera->remote(), mCamera->getRecordingProxy());
     if ( ret != NO_ERROR ) {
-        ERROR("Could not set camera (%d)", ret);
+        LOGE("Could not set camera (%d)", ret);
         return ret;
     }
     ret = mRecorder->setVideoSource(VIDEO_SOURCE_CAMERA);
     if ( ret != NO_ERROR ) {
-        ERROR("Could not set video soruce (%d)", ret);
+        LOGE("Could not set video soruce (%d)", ret);
         return ret;
     }
     ret = mRecorder->setAudioSource(AUDIO_SOURCE_DEFAULT);
     if ( ret != NO_ERROR ) {
-        ERROR("Could not set audio source (%d)", ret);
+        LOGE("Could not set audio source (%d)", ret);
         return ret;
     }
     ret = mRecorder->setOutputFormat(OUTPUT_FORMAT_DEFAULT);
     if ( ret != NO_ERROR ) {
-        ERROR("Could not set output format (%d)", ret);
+        LOGE("Could not set output format (%d)", ret);
         return ret;
     }
 
     ret = mRecorder->setVideoEncoder(VIDEO_ENCODER_DEFAULT);
     if ( ret != NO_ERROR ) {
-        ERROR("Could not set video encoder (%d)", ret);
+        LOGE("Could not set video encoder (%d)", ret);
         return ret;
     }
 
@@ -2183,32 +2179,32 @@
     }
 
     if ( mVideoFd < 0 ) {
-        ERROR("Could not open video file for writing %s!", fileName);
+        LOGE("Could not open video file for writing %s!", fileName);
         return UNKNOWN_ERROR;
     }
 
     ret = mRecorder->setOutputFile(mVideoFd, 0, 0);
     if ( ret != NO_ERROR ) {
-        ERROR("Could not set output file (%d)", ret);
+        LOGE("Could not set output file (%d)", ret);
         return ret;
     }
 
     ret = mRecorder->setVideoSize(videoSize.width, videoSize.height);
     if ( ret  != NO_ERROR ) {
-        ERROR("Could not set video size %dx%d", videoSize.width,
+        LOGE("Could not set video size %dx%d", videoSize.width,
             videoSize.height);
         return ret;
     }
 
     ret = mRecorder->setVideoFrameRate(30);
     if ( ret != NO_ERROR ) {
-        ERROR("Could not set video frame rate (%d)", ret);
+        LOGE("Could not set video frame rate (%d)", ret);
         return ret;
     }
 
     ret = mRecorder->setAudioEncoder(AUDIO_ENCODER_DEFAULT);
     if ( ret != NO_ERROR ) {
-        ERROR("Could not set audio encoder (%d)", ret);
+        LOGE("Could not set audio encoder (%d)", ret);
         return ret;
     }
 
@@ -2290,13 +2286,13 @@
 
         ret = mRecorder->prepare();
         if ( ret != NO_ERROR ) {
-            ERROR("Could not prepare recorder");
+            LOGE("Could not prepare recorder");
             return ret;
         }
 
         ret = mRecorder->start();
         if ( ret != NO_ERROR ) {
-            ERROR("Could not start recorder");
+            LOGE("Could not start recorder");
             return ret;
         }
 
@@ -2781,6 +2777,7 @@
 {
     if ( !mDoPrintMenu ) return;
     Size currentPictureSize, currentPreviewSize, currentVideoSize;
+    const char *zsl_mode = mParams.get(CameraContext::KEY_ZSL);
 
     assert(currentCamera.get());
 
@@ -2839,8 +2836,8 @@
             Interpreter::CHANGE_PICTURE_SIZE_CMD,
             currentPictureSize.width,
             currentPictureSize.height);
-    printf("   %c. zsl:  %s\n", Interpreter::ZSL_CMD, mParams.get(CameraContext::KEY_ZSL) ?
-            mParams.get(CameraContext::KEY_ZSL) : "NULL");
+    printf("   %c. zsl:  %s\n", Interpreter::ZSL_CMD,
+        (zsl_mode != NULL) ? zsl_mode : "NULL");
 
     printf("\n   Choice: ");
 }
@@ -2966,8 +2963,13 @@
         format->setInt32("width", mTestContext->mViVVid.VideoSizes[1].width);
         format->setInt32("height", mTestContext->mViVVid.VideoSizes[1].height);
     }
+    int fd = open(fileName, O_CREAT | O_RDWR );
+    if (fd < 0) {
+        LOGE("Error opening file");
+        return UNKNOWN_ERROR;
+    }
     mTestContext->mViVVid.muxer = new MediaMuxer(
-        fileName, MediaMuxer::OUTPUT_FORMAT_MPEG_4);
+        fd, MediaMuxer::OUTPUT_FORMAT_MPEG_4);
 
     format->setString("mime", "video/avc");
     format->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque);
@@ -3250,10 +3252,13 @@
         }
         camera[i]->setTestCtxInstance(this);
 
-        status_t stat = camera[i]->openCamera();
-        if ( NO_ERROR != stat ) {
-            printf("Error encountered Openging camera id : %d\n", i);
-            break;
+        //by default open only back camera
+        if (i==0) {
+            status_t stat = camera[i]->openCamera();
+            if ( NO_ERROR != stat ) {
+                printf("Error encountered Openging camera id : %d\n", i);
+                break;
+            }
         }
         mAvailableCameras.add(camera[i]);
         i++;
@@ -3366,8 +3371,11 @@
         mInterpreter->setTestCtxInst(this);
     }
 
-
-    mTestRunning = true;
+    if (mAvailableCameras.size() == 0) {
+        printf("no cameras supported... exiting test app\n");
+    } else {
+        mTestRunning = true;
+    }
 
     while (mTestRunning) {
         sp<CameraContext> currentCamera =
@@ -3382,6 +3390,7 @@
             mCurrentCameraIndex++;
             mCurrentCameraIndex %= mAvailableCameras.size();
             currentCamera = mAvailableCameras.itemAt(mCurrentCameraIndex);
+            stat = currentCamera->openCamera();
         }
             break;
 
diff --git a/QCamera2/HAL/test/qcamera_test.h b/QCamera2/HAL/test/qcamera_test.h
new file mode 100644
index 0000000..b8c5998
--- /dev/null
+++ b/QCamera2/HAL/test/qcamera_test.h
@@ -0,0 +1,361 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef QCAMERA_TEST_H
+#define QCAMERA_TEST_H
+
+#include <SkData.h>
+#include <SkBitmap.h>
+#include <SkStream.h>
+
+namespace qcamera {
+
+using namespace android;
+
+#define MAX_CAM_INSTANCES 3
+
+class TestContext;
+
+class CameraContext : public CameraListener,
+    public ICameraRecordingProxyListener{
+public:
+    typedef enum {
+        READ_METADATA = 1,
+        READ_IMAGE = 2,
+        READ_ALL = 3
+    } ReadMode_t;
+
+    // This structure is used to store jpeg file sections in memory.
+    typedef struct {
+        unsigned char *  Data;
+        int      Type;
+        size_t   Size;
+    } Sections_t;
+
+public:
+    static const char KEY_ZSL[];
+
+    CameraContext(int cameraIndex);
+    virtual ~CameraContext();
+
+
+
+    status_t openCamera();
+    status_t closeCamera();
+
+    status_t startPreview();
+    status_t stopPreview();
+    status_t resumePreview();
+    status_t autoFocus();
+    status_t enablePreviewCallbacks();
+    status_t takePicture();
+    status_t startRecording();
+    status_t stopRecording();
+    status_t startViVRecording();
+    status_t stopViVRecording();
+    status_t configureViVRecording();
+
+    status_t nextPreviewSize();
+    status_t setPreviewSize(const char *format);
+    status_t getCurrentPreviewSize(Size &previewSize);
+
+    status_t nextPictureSize();
+    status_t getCurrentPictureSize(Size &pictureSize);
+    status_t setPictureSize(const char *format);
+
+    status_t nextVideoSize();
+    status_t setVideoSize(const char *format);
+    status_t getCurrentVideoSize(Size &videoSize);
+    status_t configureRecorder();
+    status_t unconfigureRecorder();
+    Sections_t *FindSection(int SectionType);
+    status_t ReadSectionsFromBuffer (unsigned char *buffer,
+            size_t buffer_size, ReadMode_t ReadMode);
+    virtual IBinder* onAsBinder();
+    void setTestCtxInstance(TestContext *instance);
+
+    void printMenu(sp<CameraContext> currentCamera);
+    void printSupportedParams();
+    const char *getZSL();
+    void setZSL(const char *value);
+
+
+    int getCameraIndex() { return mCameraIndex; }
+    int getNumberOfCameras();
+    void enablePrintPreview();
+    void disablePrintPreview();
+    void enablePiPCapture();
+    void disablePiPCapture();
+    void CheckSectionsAllocated();
+    void DiscardData();
+    void DiscardSections();
+    size_t calcBufferSize(int width, int height);
+    size_t calcStride(int width);
+    size_t calcYScanLines(int height);
+    size_t calcUVScanLines(int height);
+
+    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
+    virtual void postData(int32_t msgType,
+            const sp<IMemory>& dataPtr,
+            camera_frame_metadata_t *metadata);
+
+    virtual void postDataTimestamp(nsecs_t timestamp,
+            int32_t msgType,
+            const sp<IMemory>& dataPtr);
+    virtual void dataCallbackTimestamp(nsecs_t timestamp,
+            int32_t msgType,
+            const sp<IMemory>& dataPtr);
+
+private:
+
+    status_t createPreviewSurface(int width, int height, int32_t pixFormat);
+    status_t destroyPreviewSurface();
+
+    status_t saveFile(const sp<IMemory>& mem, String8 path);
+    SkBitmap * PiPCopyToOneFile(SkBitmap *bitmap0, SkBitmap *bitmap1);
+    status_t decodeJPEG(const sp<IMemory>& mem, SkBitmap *skBM);
+    status_t encodeJPEG(SkWStream * stream, const SkBitmap *bitmap,
+        String8 path);
+    void previewCallback(const sp<IMemory>& mem);
+
+    static int JpegIdx;
+    int mCameraIndex;
+    bool mResizePreview;
+    bool mHardwareActive;
+    bool mPreviewRunning;
+    bool mRecordRunning;
+    int  mVideoFd;
+    int  mVideoIdx;
+    bool mRecordingHint;
+    bool mDoPrintMenu;
+    bool mPiPCapture;
+    static int mPiPIdx;
+    unsigned int mfmtMultiplier;
+    int mWidthTmp;
+    int mHeightTmp;
+    size_t mSectionsRead;
+    size_t mSectionsAllocated;
+    Sections_t * mSections;
+    Sections_t * mJEXIFTmp;
+    Sections_t mJEXIFSection;
+    int mHaveAll;
+    TestContext *mInterpr;
+
+    sp<Camera> mCamera;
+    sp<SurfaceComposerClient> mClient;
+    sp<SurfaceControl> mSurfaceControl;
+    sp<Surface> mPreviewSurface;
+    sp<MediaRecorder> mRecorder;
+    CameraParameters mParams;
+    SkBitmap *skBMDec;
+    SkImageEncoder* skJpegEnc;
+    SkBitmap skBMtmp;
+    sp<IMemory> PiPPtrTmp;
+
+    size_t mCurrentPreviewSizeIdx;
+    Size getPreviewSizeFromVideoSizes(Size currentVideoSize);
+    size_t mCurrentPictureSizeIdx;
+    size_t mCurrentVideoSizeIdx;
+    Vector<Size> mSupportedPreviewSizes;
+    Vector<Size> mSupportedPictureSizes;
+    Vector<Size> mSupportedVideoSizes;
+
+    bool mInUse;
+    Mutex mLock;
+    Condition mCond;
+
+    void useLock();
+    void signalFinished();
+
+    //------------------------------------------------------------------------
+    // JPEG markers consist of one or more 0xFF bytes, followed by a marker
+    // code byte (which is not an FF).  Here are the marker codes of interest
+    // in this program.  (See jdmarker.c for a more complete list.)
+    //------------------------------------------------------------------------
+    #define M_SOF0  0xC0          // Start Of Frame N
+    #define M_SOF1  0xC1          // N indicates which compression process
+    #define M_SOF2  0xC2          // Only SOF0-SOF2 are now in common use
+    #define M_SOF3  0xC3
+    #define M_SOF5  0xC5          // NB: codes C4 and CC are NOT SOF markers
+    #define M_SOF6  0xC6
+    #define M_SOF7  0xC7
+    #define M_SOF9  0xC9
+    #define M_SOF10 0xCA
+    #define M_SOF11 0xCB
+    #define M_SOF13 0xCD
+    #define M_SOF14 0xCE
+    #define M_SOF15 0xCF
+    #define M_SOI   0xD8          // Start Of Image (beginning of datastream)
+    #define M_EOI   0xD9          // End Of Image (end of datastream)
+    #define M_SOS   0xDA          // Start Of Scan (begins compressed data)
+    #define M_JFIF  0xE0          // Jfif marker
+    #define M_EXIF  0xE1          // Exif marker.  Also used for XMP data!
+    #define M_XMP   0x10E1        // Not a real tag same value as Exif!
+    #define M_COM   0xFE          // COMment
+    #define M_DQT   0xDB
+    #define M_DHT   0xC4
+    #define M_DRI   0xDD
+    #define M_IPTC  0xED          // IPTC marker
+    #define PSEUDO_IMAGE_MARKER 0x123; // Extra value.
+};
+
+class Interpreter
+{
+public:
+    enum Commands_e {
+        SWITCH_CAMERA_CMD = 'A',
+        RESUME_PREVIEW_CMD = '[',
+        START_PREVIEW_CMD = '1',
+        STOP_PREVIEW_CMD = '2',
+        CHANGE_VIDEO_SIZE_CMD = '3',
+        CHANGE_PREVIEW_SIZE_CMD = '4',
+        CHANGE_PICTURE_SIZE_CMD = '5',
+        START_RECORD_CMD = '6',
+        STOP_RECORD_CMD = '7',
+        START_VIV_RECORD_CMD = '8',
+        STOP_VIV_RECORD_CMD = '9',
+        DUMP_CAPS_CMD = 'E',
+        AUTOFOCUS_CMD = 'f',
+        TAKEPICTURE_CMD = 'p',
+        TAKEPICTURE_IN_PICTURE_CMD = 'P',
+        ENABLE_PRV_CALLBACKS_CMD = '&',
+        EXIT_CMD = 'q',
+        DELAY = 'd',
+        ZSL_CMD = 'z',
+        INVALID_CMD = '0'
+    };
+
+    struct Command {
+        Command( Commands_e cmd_, char *arg_ = NULL)
+        : cmd(cmd_)
+        , arg(arg_) {}
+        Command()
+        : cmd(INVALID_CMD)
+        , arg(NULL) {}
+        Commands_e cmd;
+        char *arg;
+    };
+
+    /* API */
+    Interpreter()
+    : mUseScript(false)
+    , mScript(NULL) {}
+
+    Interpreter(const char *file);
+    ~Interpreter();
+
+    Command getCommand(sp<CameraContext> currentCamera);
+    void releasePiPBuff();
+    status_t configureViVCodec();
+    void setViVSize(Size VideoSize, int camIndex);
+    void setTestCtxInst(TestContext *instance);
+    status_t unconfigureViVCodec();
+    status_t ViVEncoderThread();
+    void ViVEncode();
+    static void *ThreadWrapper(void *context);
+
+private:
+    static const int numberOfCommands;
+
+    bool mUseScript;
+    size_t mCmdIndex;
+    char *mScript;
+    Vector<Command> mCommands;
+    TestContext *mTestContext;
+    pthread_t mViVEncThread;
+};
+
+class TestContext
+{
+    friend class CameraContext;
+    friend class Interpreter;
+public:
+    TestContext();
+    ~TestContext();
+
+    size_t GetCamerasNum();
+    status_t FunctionalTest();
+    status_t AddScriptFromFile(const char *scriptFile);
+    void setViVSize(Size VideoSize, int camIndex);
+    void PiPLock();
+    void PiPUnlock();
+    void ViVLock();
+    void ViVUnlock();
+
+private:
+    sp<CameraContext> camera[MAX_CAM_INSTANCES];
+    char GetNextCmd(sp<qcamera::CameraContext> currentCamera);
+    size_t mCurrentCameraIndex;
+    size_t mSaveCurrentCameraIndex;
+    Vector< sp<qcamera::CameraContext> > mAvailableCameras;
+    bool mTestRunning;
+    Interpreter *mInterpreter;
+    Mutex mPiPLock;
+    Condition mPiPCond;
+    bool mPiPinUse;
+    Mutex mViVLock;
+    Condition mViVCond;
+    bool mViVinUse;
+    bool mIsZSLOn;
+
+    typedef struct ViVBuff_t{
+        void *buff;
+        size_t buffSize;
+        size_t YStride;
+        size_t UVStride;
+        size_t YScanLines;
+        size_t UVScanLines;
+        size_t srcWidth;
+        size_t srcHeight;
+    } ViVBuff_t;
+
+    typedef struct ViVVid_t{
+        sp<IGraphicBufferProducer> bufferProducer;
+        sp<Surface> surface;
+        sp<MediaCodec> codec;
+        sp<MediaMuxer> muxer;
+        sp<ANativeWindow> ANW;
+        Vector<sp<ABuffer> > buffers;
+        Size VideoSizes[2];
+        int ViVIdx;
+        size_t buff_cnt;
+        sp<GraphicBuffer> graphBuf;
+        void * mappedBuff;
+        bool isBuffValid;
+        int sourceCameraID;
+        int destinationCameraID;
+    } vidPiP_t;
+
+    ViVVid_t mViVVid;
+    ViVBuff_t mViVBuff;
+};
+
+}; //namespace qcamera
+
+#endif
diff --git a/QCamera2/HAL3/QCamera3HWI.cpp b/QCamera2/HAL3/QCamera3HWI.cpp
index 87a1c86..5a3f9f5 100755
--- a/QCamera2/HAL3/QCamera3HWI.cpp
+++ b/QCamera2/HAL3/QCamera3HWI.cpp
@@ -88,9 +88,9 @@
 #define REGIONS_TUPLE_COUNT    5
 #define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
 #define BURST_REPROCESS_PERF_TIME_OUT  (1000) // milliseconds
-
+// Set a threshold for detection of missing buffers //seconds
+#define MISSING_REQUEST_BUF_TIMEOUT 3
 #define FLUSH_TIMEOUT 3
-
 #define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
 
 #define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
@@ -538,7 +538,10 @@
     if (mState != CLOSED)
         closeCamera();
 
-    mPendingBuffersMap.mPendingBufferList.clear();
+    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
+        req.mPendingBufferList.clear();
+    }
+    mPendingBuffersMap.mPendingBuffersInRequest.clear();
     mPendingReprocessResultList.clear();
     for (pendingRequestIterator i = mPendingRequestsList.begin();
             i != mPendingRequestsList.end();) {
@@ -1192,19 +1195,23 @@
 {
     // Mark all pending buffers for this particular request
     // with corresponding framerate information
-    for (List<PendingBufferInfo>::iterator j =
-            mPendingBuffersMap.mPendingBufferList.begin();
-            j != mPendingBuffersMap.mPendingBufferList.end(); j++) {
-        QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
-        if ((j->frame_number == frame_number) &&
+    for (List<PendingBuffersInRequest>::iterator req =
+            mPendingBuffersMap.mPendingBuffersInRequest.begin();
+            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
+        for(List<PendingBufferInfo>::iterator j =
+                req->mPendingBufferList.begin();
+                j != req->mPendingBufferList.end(); j++) {
+            QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
+            if ((req->frame_number == frame_number) &&
                 (channel->getStreamTypeMask() &
                 (1U << CAM_STREAM_TYPE_PREVIEW))) {
-            IF_META_AVAILABLE(cam_fps_range_t, float_range,
+                IF_META_AVAILABLE(cam_fps_range_t, float_range,
                     CAM_INTF_PARM_FPS_RANGE, metadata) {
-                int32_t cameraFps = float_range->max_fps;
-                struct private_handle_t *priv_handle =
+                    int32_t cameraFps = float_range->max_fps;
+                    struct private_handle_t *priv_handle =
                         (struct private_handle_t *)(*(j->buffer));
-                setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
+                    setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
+                }
             }
         }
     }
@@ -2152,15 +2159,18 @@
     mStreamConfigInfo.buffer_info.max_buffers =
             m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
 
-    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
+    /* Initialize mPendingRequestInfo and mPendingBuffersMap */
     for (pendingRequestIterator i = mPendingRequestsList.begin();
             i != mPendingRequestsList.end();) {
         i = erasePendingRequest(i);
     }
     mPendingFrameDropList.clear();
     // Initialize/Reset the pending buffers list
-    mPendingBuffersMap.num_buffers = 0;
-    mPendingBuffersMap.mPendingBufferList.clear();
+    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
+        req.mPendingBufferList.clear();
+    }
+    mPendingBuffersMap.mPendingBuffersInRequest.clear();
+
     mPendingReprocessResultList.clear();
 
     mCurJpegMeta.clear();
@@ -2383,6 +2393,47 @@
 }
 
 /*===========================================================================
+ * FUNCTION   : handleBuffersDuringFlushLock
+ *
+ * DESCRIPTION: Account for buffers returned from back-end during flush
+ *              This function is executed while mMutex is held by the caller.
+ *
+ * PARAMETERS :
+ *   @buffer: image buffer for the callback
+ *
+ * RETURN     :
+ *==========================================================================*/
+void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
+{
+    bool buffer_found = false;
+    for (List<PendingBuffersInRequest>::iterator req =
+            mPendingBuffersMap.mPendingBuffersInRequest.begin();
+            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
+        for (List<PendingBufferInfo>::iterator i =
+                req->mPendingBufferList.begin();
+                i != req->mPendingBufferList.end(); i++) {
+            if (i->buffer == buffer->buffer) {
+                mPendingBuffersMap.numPendingBufsAtFlush--;
+                LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
+                    buffer->buffer, req->frame_number,
+                    mPendingBuffersMap.numPendingBufsAtFlush);
+                buffer_found = true;
+                break;
+            }
+        }
+        if (buffer_found) {
+            break;
+        }
+    }
+    if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
+        //signal the flush()
+        LOGD("All buffers returned to HAL. Continue flush");
+        pthread_cond_signal(&mBuffersCond);
+    }
+}
+
+
+/*===========================================================================
  * FUNCTION   : handlePendingReprocResults
  *
  * DESCRIPTION: check and notify on any pending reprocess results
@@ -2625,6 +2676,7 @@
     int32_t frame_number_valid, urgent_frame_number_valid;
     uint32_t frame_number, urgent_frame_number;
     int64_t capture_time;
+    nsecs_t currentSysTime;
 
     int32_t *p_frame_number_valid =
             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
@@ -2648,12 +2700,25 @@
             free(metadata_buf);
         }
         goto done_metadata;
-    } else {
-        frame_number_valid = *p_frame_number_valid;
-        frame_number = *p_frame_number;
-        capture_time = *p_capture_time;
-        urgent_frame_number_valid = *p_urgent_frame_number_valid;
-        urgent_frame_number = *p_urgent_frame_number;
+    }
+    frame_number_valid =        *p_frame_number_valid;
+    frame_number =              *p_frame_number;
+    capture_time =              *p_capture_time;
+    urgent_frame_number_valid = *p_urgent_frame_number_valid;
+    urgent_frame_number =       *p_urgent_frame_number;
+    currentSysTime =            systemTime(CLOCK_MONOTONIC);
+
+    // Detect if buffers from any requests are overdue
+    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
+        if ( (currentSysTime - req.timestamp) >
+            s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
+            for (auto &missed : req.mPendingBufferList) {
+                LOGE("Current frame: %d. Missing: frame = %d, buffer = %p,"
+                    "stream type = %d, stream format = %d",
+                    frame_number, req.frame_number, missed.buffer,
+                    missed.stream->stream_type, missed.stream->format);
+            }
+        }
     }
     //Partial result on process_capture_result for timestamp
     if (urgent_frame_number_valid) {
@@ -2854,54 +2919,42 @@
         if (result.num_output_buffers > 0) {
             camera3_stream_buffer_t *result_buffers =
                 new camera3_stream_buffer_t[result.num_output_buffers];
-            if (!result_buffers) {
+            if (result_buffers != NULL) {
+                size_t result_buffers_idx = 0;
+                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
+                        j != i->buffers.end(); j++) {
+                    if (j->buffer) {
+                        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
+                                m != mPendingFrameDropList.end(); m++) {
+                            QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
+                            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
+                            if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
+                                j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
+                                LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
+                                        frame_number, streamID);
+                                m = mPendingFrameDropList.erase(m);
+                                break;
+                            }
+                        }
+                        mPendingBuffersMap.removeBuf(j->buffer->buffer);
+                        result_buffers[result_buffers_idx++] = *(j->buffer);
+                        free(j->buffer);
+                        j->buffer = NULL;
+                    }
+                }
+                result.output_buffers = result_buffers;
+                mCallbackOps->process_capture_result(mCallbackOps, &result);
+                LOGD("meta frame_number = %u, capture_time = %lld",
+                        result.frame_number, i->timestamp);
+                free_camera_metadata((camera_metadata_t *)result.result);
+                delete[] result_buffers;
+            }else {
                 LOGE("Fatal error: out of memory");
             }
-            size_t result_buffers_idx = 0;
-            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
-                    j != i->buffers.end(); j++) {
-                if (j->buffer) {
-                    for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
-                            m != mPendingFrameDropList.end(); m++) {
-                        QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
-                        uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
-                        if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
-                            j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
-                            LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
-                                   frame_number, streamID);
-                            m = mPendingFrameDropList.erase(m);
-                            break;
-                        }
-                    }
-
-                    for (List<PendingBufferInfo>::iterator k =
-                      mPendingBuffersMap.mPendingBufferList.begin();
-                      k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
-                      if (k->buffer == j->buffer->buffer) {
-                        LOGD("Found buffer %p in pending buffer List "
-                              "for frame %u, Take it out!!",
-                               k->buffer, k->frame_number);
-                        mPendingBuffersMap.num_buffers--;
-                        k = mPendingBuffersMap.mPendingBufferList.erase(k);
-                        break;
-                      }
-                    }
-
-                    result_buffers[result_buffers_idx++] = *(j->buffer);
-                    free(j->buffer);
-                    j->buffer = NULL;
-                }
-            }
-            result.output_buffers = result_buffers;
-            mCallbackOps->process_capture_result(mCallbackOps, &result);
-            LOGD("meta frame_number = %u, capture_time = %lld",
-                  result.frame_number, i->timestamp);
-            free_camera_metadata((camera_metadata_t *)result.result);
-            delete[] result_buffers;
         } else {
             mCallbackOps->process_capture_result(mCallbackOps, &result);
             LOGD("meta frame_number = %u, capture_time = %lld",
-                  result.frame_number, i->timestamp);
+                    result.frame_number, i->timestamp);
             free_camera_metadata((camera_metadata_t *)result.result);
         }
 
@@ -2951,8 +3004,9 @@
     }
 
     //acquire perf lock for 5 sec after the last HDR frame is captured
-    if (*p_frame_number_valid) {
-        if (mLastCustIntentFrmNum == (int32_t)*p_frame_number) {
+    if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
+        if ((p_frame_number != NULL) &&
+                (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
             m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
         }
     }
@@ -3048,32 +3102,14 @@
     camera3_stream_buffer_t *buffer, uint32_t frame_number)
 {
     ATRACE_CALL();
-    if (mFlushPerf) {
-        // flush case
-        //go through the pending buffers and mark them as returned.
-        LOGD("Handle buffer with lock called during flush");
-        for (List<PendingBufferInfo>::iterator i =
-                mPendingBuffersMap.mPendingBufferList.begin();
-                i != mPendingBuffersMap.mPendingBufferList.end(); i++) {
-            if (i->buffer == buffer->buffer) {
-                mPendingBuffersMap.num_buffers--;
-                LOGD("Found Frame buffer, updated num_buffers %d, ",
-                         mPendingBuffersMap.num_buffers);
-                break;
-            }
-        }
-        if (mPendingBuffersMap.num_buffers == 0) {
-            //signal the flush()
-            LOGD("All buffers returned to HAL continue flush");
-            pthread_cond_signal(&mBuffersCond);
-        }
-        return;
-    }
     /* Nothing to be done during error state */
     if ((ERROR == mState) || (DEINIT == mState)) {
         return;
     }
-
+    if (mFlushPerf) {
+        handleBuffersDuringFlushLock(buffer);
+        return;
+    }
     //not in flush
     // If the frame number doesn't exist in the pending request list,
     // directly send the buffer to the frameworks, and update pending buffers map
@@ -3113,19 +3149,7 @@
         LOGH("result frame_number = %d, buffer = %p",
                  frame_number, buffer->buffer);
 
-        for (List<PendingBufferInfo>::iterator k =
-                mPendingBuffersMap.mPendingBufferList.begin();
-                k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
-            if (k->buffer == buffer->buffer) {
-                LOGD("Found Frame buffer, take it out from list");
-
-                mPendingBuffersMap.num_buffers--;
-                k = mPendingBuffersMap.mPendingBufferList.erase(k);
-                break;
-            }
-        }
-        LOGD("mPendingBuffersMap.num_buffers = %d",
-             mPendingBuffersMap.num_buffers);
+        mPendingBuffersMap.removeBuf(buffer->buffer);
 
         mCallbackOps->process_capture_result(mCallbackOps, &result);
     } else {
@@ -3156,20 +3180,7 @@
                    LOGE("input buffer sync wait failed %d", rc);
                }
             }
-
-            for (List<PendingBufferInfo>::iterator k =
-                    mPendingBuffersMap.mPendingBufferList.begin();
-                    k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
-                if (k->buffer == buffer->buffer) {
-                    LOGD("Found Frame buffer, take it out from list");
-
-                    mPendingBuffersMap.num_buffers--;
-                    k = mPendingBuffersMap.mPendingBufferList.erase(k);
-                    break;
-                }
-            }
-            LOGD("mPendingBuffersMap.num_buffers = %d",
-                 mPendingBuffersMap.num_buffers);
+            mPendingBuffersMap.removeBuf(buffer->buffer);
 
             bool notifyNow = true;
             for (pendingRequestIterator j = mPendingRequestsList.begin();
@@ -3214,8 +3225,8 @@
                         j->buffer = (camera3_stream_buffer_t *)malloc(
                             sizeof(camera3_stream_buffer_t));
                         *(j->buffer) = *buffer;
-                        LOGH("cache buffer %p at result frame_number %d",
-                             buffer, frame_number);
+                        LOGH("cache buffer %p at result frame_number %u",
+                             buffer->buffer, frame_number);
                     }
                 }
             }
@@ -3779,6 +3790,11 @@
     }
     pendingRequest.fwkCacMode = mCacMode;
 
+    PendingBuffersInRequest bufsForCurRequest;
+    bufsForCurRequest.frame_number = frameNumber;
+    // Mark current timestamp for the new request
+    bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
+
     for (size_t i = 0; i < request->num_output_buffers; i++) {
         RequestedBufferInfo requestedBuf;
         memset(&requestedBuf, 0, sizeof(requestedBuf));
@@ -3788,17 +3804,19 @@
 
         // Add to buffer handle the pending buffers list
         PendingBufferInfo bufferInfo;
-        bufferInfo.frame_number = frameNumber;
         bufferInfo.buffer = request->output_buffers[i].buffer;
         bufferInfo.stream = request->output_buffers[i].stream;
-        mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
-        mPendingBuffersMap.num_buffers++;
+        bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
         QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
         LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
-                 frameNumber, bufferInfo.buffer,
-                channel->getStreamTypeMask(), bufferInfo.stream->format);
+            frameNumber, bufferInfo.buffer,
+            channel->getStreamTypeMask(), bufferInfo.stream->format);
     }
-    LOGD("mPendingBuffersMap.num_buffers = %d", mPendingBuffersMap.num_buffers);
+    // Add this request packet into mPendingBuffersMap
+    mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
+    LOGD("mPendingBuffersMap.num_overall_buffers = %d",
+        mPendingBuffersMap.get_num_overall_buffers());
+
     latestRequest = mPendingRequestsList.insert(
             mPendingRequestsList.end(), pendingRequest);
     if(mFlush) {
@@ -4031,16 +4049,16 @@
         i->input_buffer);
     }
     dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
-                mPendingBuffersMap.num_buffers);
+                mPendingBuffersMap.get_num_overall_buffers());
     dprintf(fd, "-------+------------------\n");
     dprintf(fd, " Frame | Stream type mask \n");
     dprintf(fd, "-------+------------------\n");
-    for(List<PendingBufferInfo>::iterator i =
-        mPendingBuffersMap.mPendingBufferList.begin();
-        i != mPendingBuffersMap.mPendingBufferList.end(); i++) {
-        QCamera3Channel *channel = (QCamera3Channel *)(i->stream->priv);
-        dprintf(fd, " %5d | %11d \n",
-                i->frame_number, channel->getStreamTypeMask());
+    for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
+        for(auto &j : req.mPendingBufferList) {
+            QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
+            dprintf(fd, " %5d | %11d \n",
+                    req.frame_number, channel->getStreamTypeMask());
+        }
     }
     dprintf(fd, "-------+------------------\n");
 
@@ -4165,10 +4183,13 @@
     int32_t rc = 0;
     struct timespec timeout;
     bool timed_wait = false;
-    FlushMap flushMap;
 
     pthread_mutex_lock(&mMutex);
     mFlushPerf = true;
+    mPendingBuffersMap.numPendingBufsAtFlush =
+        mPendingBuffersMap.get_num_overall_buffers();
+    LOGD("Calling flush. Wait for %d buffers to return",
+        mPendingBuffersMap.numPendingBufsAtFlush);
 
     /* send the flush event to the backend */
     rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
@@ -4179,8 +4200,8 @@
         return -ENODEV;
     }
 
-    if (mPendingBuffersMap.num_buffers == 0) {
-        LOGD("No pending buffers in the HAL, return flush");
+    if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
+        LOGD("No pending buffers in HAL, return flush");
         mFlushPerf = false;
         pthread_mutex_unlock(&mMutex);
         return rc;
@@ -4196,7 +4217,7 @@
     }
 
     //Block on conditional variable
-    while (mPendingBuffersMap.num_buffers != 0) {
+    while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
         LOGD("Waiting on mBuffersCond");
         if (!timed_wait) {
             rc = pthread_cond_wait(&mBuffersCond, &mMutex);
@@ -4253,6 +4274,7 @@
 
     mFlushPerf = false;
     pthread_mutex_unlock(&mMutex);
+    LOGD ("Flush Operation complete. rc = %d", rc);
     return rc;
 }
 
@@ -6199,6 +6221,7 @@
      * advertised as limited device*/
     limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
             (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
+            (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
             !supportBurst;
 
     uint8_t supportedHwLvl = limitedDevice ?
@@ -7375,42 +7398,6 @@
 }
 
 /*===========================================================================
- * FUNCTION   : getPreviewHalPixelFormat
- *
- * DESCRIPTION: convert the format to type recognized by framework
- *
- * PARAMETERS : format : the format from backend
- *
- ** RETURN    : format recognized by framework
- *
- *==========================================================================*/
-int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
-{
-    int32_t halPixelFormat;
-
-    switch (format) {
-    case CAM_FORMAT_YUV_420_NV12:
-        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
-        break;
-    case CAM_FORMAT_YUV_420_NV21:
-        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
-        break;
-    case CAM_FORMAT_YUV_420_NV21_ADRENO:
-        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
-        break;
-    case CAM_FORMAT_YUV_420_YV12:
-        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
-        break;
-    case CAM_FORMAT_YUV_422_NV16:
-    case CAM_FORMAT_YUV_422_NV61:
-    default:
-        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
-        break;
-    }
-    return halPixelFormat;
-}
-
-/*===========================================================================
  * FUNCTION   : computeNoiseModelEntryS
  *
  * DESCRIPTION: function to map a given sensitivity to the S noise
@@ -9556,7 +9543,7 @@
 }
 
 /*===========================================================================
- * FUNCTION   : needJpegRotation
+ * FUNCTION   : needJpegExifRotation
  *
  * DESCRIPTION: if rotation from jpeg is needed
  *
@@ -9565,11 +9552,11 @@
  * RETURN     : true: needed
  *              false: no need
  *==========================================================================*/
-bool QCamera3HardwareInterface::needJpegRotation()
+bool QCamera3HardwareInterface::needJpegExifRotation()
 {
    /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
-       LOGD("Need Jpeg to do the rotation");
+       LOGD("Need use Jpeg EXIF Rotation");
        return true;
     }
     return false;
@@ -9620,6 +9607,9 @@
         pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
         pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
     }
+    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
+        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
+    }
 
     rc = pChannel->addReprocStreamsFromSource(pp_config,
             config,
@@ -9910,6 +9900,7 @@
 {
     int32_t rc = NO_ERROR;
 
+    LOGD("Stopping all channels");
     // Stop the Streams/Channels
     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
         it != mStreamInfo.end(); it++) {
@@ -10013,7 +10004,6 @@
     unsigned int frameNum = 0;
     camera3_capture_result_t result;
     camera3_stream_buffer_t *pStream_Buf = NULL;
-    FlushMap flushMap;
 
     memset(&result, 0, sizeof(camera3_capture_result_t));
 
@@ -10027,148 +10017,120 @@
         frameNum = UINT_MAX;
     }
 
-    LOGH("Oldest frame num on  mPendingRequestsList = %d",
+    LOGH("Oldest frame num on mPendingRequestsList = %u",
        frameNum);
 
-    // Go through the pending buffers and group them depending
-    // on frame number
-    for (List<PendingBufferInfo>::iterator k =
-            mPendingBuffersMap.mPendingBufferList.begin();
-            k != mPendingBuffersMap.mPendingBufferList.end();) {
+    for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
+            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
 
-        if (k->frame_number < frameNum) {
-            ssize_t idx = flushMap.indexOfKey(k->frame_number);
-            if (idx == NAME_NOT_FOUND) {
-                Vector<PendingBufferInfo> pending;
-                pending.add(*k);
-                flushMap.add(k->frame_number, pending);
-            } else {
-                Vector<PendingBufferInfo> &pending =
-                        flushMap.editValueFor(k->frame_number);
-                pending.add(*k);
+        if (req->frame_number < frameNum) {
+            // Send Error notify to frameworks for each buffer for which
+            // metadata buffer is already sent
+            LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
+                req->frame_number, req->mPendingBufferList.size());
+
+            pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
+            if (NULL == pStream_Buf) {
+                LOGE("No memory for pending buffers array");
+                return NO_MEMORY;
+            }
+            memset(pStream_Buf, 0,
+                sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
+            result.result = NULL;
+            result.frame_number = req->frame_number;
+            result.num_output_buffers = req->mPendingBufferList.size();
+            result.output_buffers = pStream_Buf;
+
+            size_t index = 0;
+            for (auto info = req->mPendingBufferList.begin();
+                info != req->mPendingBufferList.end(); ) {
+
+                camera3_notify_msg_t notify_msg;
+                memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
+                notify_msg.type = CAMERA3_MSG_ERROR;
+                notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
+                notify_msg.message.error.error_stream = info->stream;
+                notify_msg.message.error.frame_number = req->frame_number;
+                pStream_Buf[index].acquire_fence = -1;
+                pStream_Buf[index].release_fence = -1;
+                pStream_Buf[index].buffer = info->buffer;
+                pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
+                pStream_Buf[index].stream = info->stream;
+                mCallbackOps->notify(mCallbackOps, &notify_msg);
+                index++;
+                // Remove buffer from list
+                info = req->mPendingBufferList.erase(info);
             }
 
-            mPendingBuffersMap.num_buffers--;
-            k = mPendingBuffersMap.mPendingBufferList.erase(k);
+            // Remove this request from Map
+            LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
+                req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
+            req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
+
+            mCallbackOps->process_capture_result(mCallbackOps, &result);
+
+            delete [] pStream_Buf;
         } else {
-            k++;
-        }
-    }
 
-    for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
-        uint32_t frame_number = flushMap.keyAt(iFlush);
-        const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
+            // Go through the pending requests info and send error request to framework
+            LOGE("Sending ERROR REQUEST for all pending requests");
+            pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
 
-        // Send Error notify to frameworks for each buffer for which
-        // metadata buffer is already sent
-        LOGH("Sending ERROR BUFFER for frame %d number of buffer %d",
-           frame_number, pending.size());
+            LOGE("Sending ERROR REQUEST for frame %d", req->frame_number);
 
-        pStream_Buf = new camera3_stream_buffer_t[pending.size()];
-        if (NULL == pStream_Buf) {
-            LOGE("No memory for pending buffers array");
-            return NO_MEMORY;
-        }
-        memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
-
-        for (size_t j = 0; j < pending.size(); j++) {
-            const PendingBufferInfo &info = pending.itemAt(j);
+            // Send error notify to frameworks
             camera3_notify_msg_t notify_msg;
             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
             notify_msg.type = CAMERA3_MSG_ERROR;
-            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
-            notify_msg.message.error.error_stream = info.stream;
-            notify_msg.message.error.frame_number = frame_number;
-            pStream_Buf[j].acquire_fence = -1;
-            pStream_Buf[j].release_fence = -1;
-            pStream_Buf[j].buffer = info.buffer;
-            pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
-            pStream_Buf[j].stream = info.stream;
+            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
+            notify_msg.message.error.error_stream = NULL;
+            notify_msg.message.error.frame_number = req->frame_number;
             mCallbackOps->notify(mCallbackOps, &notify_msg);
-            LOGH("notify frame_number = %d stream %p",
-                    frame_number, info.stream);
+
+            pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
+            if (NULL == pStream_Buf) {
+                LOGE("No memory for pending buffers array");
+                return NO_MEMORY;
+            }
+            memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
+
+            result.result = NULL;
+            result.frame_number = req->frame_number;
+            result.input_buffer = i->input_buffer;
+            result.num_output_buffers = req->mPendingBufferList.size();
+            result.output_buffers = pStream_Buf;
+
+            size_t index = 0;
+            for (auto info = req->mPendingBufferList.begin();
+                info != req->mPendingBufferList.end(); ) {
+                pStream_Buf[index].acquire_fence = -1;
+                pStream_Buf[index].release_fence = -1;
+                pStream_Buf[index].buffer = info->buffer;
+                pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
+                pStream_Buf[index].stream = info->stream;
+                index++;
+                // Remove buffer from list
+                info = req->mPendingBufferList.erase(info);
+            }
+
+            // Remove this request from Map
+            LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
+                req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
+            req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
+
+            mCallbackOps->process_capture_result(mCallbackOps, &result);
+            delete [] pStream_Buf;
+            i = erasePendingRequest(i);
         }
-
-        result.result = NULL;
-        result.frame_number = frame_number;
-        result.num_output_buffers = (uint32_t)pending.size();
-        result.output_buffers = pStream_Buf;
-        mCallbackOps->process_capture_result(mCallbackOps, &result);
-
-        delete [] pStream_Buf;
-    }
-
-    LOGH("Sending ERROR REQUEST for all pending requests");
-
-    flushMap.clear();
-    for (List<PendingBufferInfo>::iterator k =
-            mPendingBuffersMap.mPendingBufferList.begin();
-            k != mPendingBuffersMap.mPendingBufferList.end();) {
-        ssize_t idx = flushMap.indexOfKey(k->frame_number);
-        if (idx == NAME_NOT_FOUND) {
-            Vector<PendingBufferInfo> pending;
-            pending.add(*k);
-            flushMap.add(k->frame_number, pending);
-        } else {
-            Vector<PendingBufferInfo> &pending =
-                    flushMap.editValueFor(k->frame_number);
-            pending.add(*k);
-        }
-
-        mPendingBuffersMap.num_buffers--;
-        k = mPendingBuffersMap.mPendingBufferList.erase(k);
-    }
-
-    pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
-
-    // Go through the pending requests info and send error request to framework
-    for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
-        uint32_t frame_number = flushMap.keyAt(iFlush);
-        const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
-        LOGH("Sending ERROR REQUEST for frame %d",
-               frame_number);
-
-        // Send shutter notify to frameworks
-        camera3_notify_msg_t notify_msg;
-        memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
-        notify_msg.type = CAMERA3_MSG_ERROR;
-        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
-        notify_msg.message.error.error_stream = NULL;
-        notify_msg.message.error.frame_number = frame_number;
-        mCallbackOps->notify(mCallbackOps, &notify_msg);
-
-        pStream_Buf = new camera3_stream_buffer_t[pending.size()];
-        if (NULL == pStream_Buf) {
-            LOGE("No memory for pending buffers array");
-            return NO_MEMORY;
-        }
-        memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
-
-        for (size_t j = 0; j < pending.size(); j++) {
-            const PendingBufferInfo &info = pending.itemAt(j);
-            pStream_Buf[j].acquire_fence = -1;
-            pStream_Buf[j].release_fence = -1;
-            pStream_Buf[j].buffer = info.buffer;
-            pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
-            pStream_Buf[j].stream = info.stream;
-        }
-
-        result.input_buffer = i->input_buffer;
-        result.num_output_buffers = (uint32_t)pending.size();
-        result.output_buffers = pStream_Buf;
-        result.result = NULL;
-        result.frame_number = frame_number;
-        mCallbackOps->process_capture_result(mCallbackOps, &result);
-        delete [] pStream_Buf;
-        i = erasePendingRequest(i);
     }
 
     /* Reset pending frame Drop list and requests list */
     mPendingFrameDropList.clear();
 
-    flushMap.clear();
-    mPendingBuffersMap.num_buffers = 0;
-    mPendingBuffersMap.mPendingBufferList.clear();
+    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
+        req.mPendingBufferList.clear();
+    }
+    mPendingBuffersMap.mPendingBuffersInRequest.clear();
     mPendingReprocessResultList.clear();
     LOGH("Cleared all the pending buffers ");
 
@@ -10225,4 +10187,60 @@
     return rc;
 }
 
+/*===========================================================================
+ * FUNCTION   : get_num_overall_buffers
+ *
+ * DESCRIPTION: Estimate number of pending buffers across all requests.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : Number of overall pending buffers
+ *
+ *==========================================================================*/
+uint32_t PendingBuffersMap::get_num_overall_buffers()
+{
+    uint32_t sum_buffers = 0;
+    for (auto &req : mPendingBuffersInRequest) {
+        sum_buffers += req.mPendingBufferList.size();
+    }
+    return sum_buffers;
+}
+
+/*===========================================================================
+ * FUNCTION   : removeBuf
+ *
+ * DESCRIPTION: Remove a matching buffer from tracker.
+ *
+ * PARAMETERS : @buffer: image buffer for the callback
+ *
+ * RETURN     : None
+ *
+ *==========================================================================*/
+void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
+{
+    bool buffer_found = false;
+    for (auto req = mPendingBuffersInRequest.begin();
+            req != mPendingBuffersInRequest.end(); req++) {
+        for (auto k = req->mPendingBufferList.begin();
+                k != req->mPendingBufferList.end(); k++ ) {
+            if (k->buffer == buffer) {
+                LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
+                        req->frame_number, buffer);
+                k = req->mPendingBufferList.erase(k);
+                if (req->mPendingBufferList.empty()) {
+                    // Remove this request from Map
+                    req = mPendingBuffersInRequest.erase(req);
+                }
+                buffer_found = true;
+                break;
+            }
+        }
+        if (buffer_found) {
+            break;
+        }
+    }
+    LOGD("mPendingBuffersMap.num_overall_buffers = %d",
+            get_num_overall_buffers());
+}
+
 }; //end namespace qcamera
diff --git a/QCamera2/HAL3/QCamera3HWI.h b/QCamera2/HAL3/QCamera3HWI.h
index 558cd61..02368b0 100644
--- a/QCamera2/HAL3/QCamera3HWI.h
+++ b/QCamera2/HAL3/QCamera3HWI.h
@@ -91,6 +91,32 @@
     QCamera3ProcessingChannel *channel;
 } stream_info_t;
 
+typedef struct {
+    // Stream handle
+    camera3_stream_t *stream;
+    // Buffer handle
+    buffer_handle_t *buffer;
+} PendingBufferInfo;
+
+typedef struct {
+    // Frame number corresponding to request
+    uint32_t frame_number;
+    // Time when request queued into system
+    nsecs_t timestamp;
+    List<PendingBufferInfo> mPendingBufferList;
+} PendingBuffersInRequest;
+
+class PendingBuffersMap {
+public:
+    // Number of outstanding buffers at flush
+    uint32_t numPendingBufsAtFlush;
+    // List of pending buffers per request
+    List<PendingBuffersInRequest> mPendingBuffersInRequest;
+    uint32_t get_num_overall_buffers();
+    void removeBuf(buffer_handle_t *buffer);
+};
+
+
 class QCamera3HardwareInterface {
 public:
     /* static variable and functions accessed by camera service */
@@ -135,7 +161,6 @@
                                    uint32_t tag);
     static bool resetIfNeededROI(cam_area_t* roi, const cam_crop_region_t* scalerCropRegion);
     static void convertLandmarks(cam_face_landmarks_info_t face, int32_t* landmarks);
-    static int32_t getScalarFormat(int32_t format);
     static int32_t getSensorSensitivity(int32_t iso_mode);
 
     double computeNoiseModelEntryS(int32_t sensitivity);
@@ -172,7 +197,7 @@
             QCamera3ProcessingChannel *inputChHandle);
     bool needRotationReprocess();
     bool needReprocess(uint32_t postprocess_mask);
-    bool needJpegRotation();
+    bool needJpegExifRotation();
     cam_denoise_process_type_t getWaveletDenoiseProcessPlate();
     cam_denoise_process_type_t getTemporalDenoiseProcessPlate();
 
@@ -249,6 +274,7 @@
     int validateStreamDimensions(camera3_stream_configuration_t *streamList);
     int validateStreamRotations(camera3_stream_configuration_t *streamList);
     void deriveMinFrameDuration();
+    void handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer);
     int32_t handlePendingReprocResults(uint32_t frame_number);
     int64_t getMinFrameDuration(const camera3_capture_request_t *request);
     void handleMetadataWithLock(mm_camera_super_buf_t *metadata_buf,
@@ -375,23 +401,6 @@
         uint32_t stream_ID;
     } PendingFrameDropInfo;
 
-    // Store the Pending buffers for Flushing
-    typedef struct {
-        // Frame number pertaining to the buffer
-        uint32_t frame_number;
-        camera3_stream_t *stream;
-        // Buffer handle
-        buffer_handle_t *buffer;
-
-    } PendingBufferInfo;
-
-    typedef struct {
-        // Total number of buffer requests pending
-        uint32_t num_buffers;
-        // List of pending buffers
-        List<PendingBufferInfo> mPendingBufferList;
-    } PendingBuffersMap;
-
     typedef struct {
         camera3_notify_msg_t notify_msg;
         camera3_stream_buffer_t buffer;
diff --git a/QCamera2/HAL3/QCamera3PostProc.cpp b/QCamera2/HAL3/QCamera3PostProc.cpp
index 92d52d2..82e513e 100644
--- a/QCamera2/HAL3/QCamera3PostProc.cpp
+++ b/QCamera2/HAL3/QCamera3PostProc.cpp
@@ -1124,7 +1124,7 @@
     metadata_buffer_t *metadata = NULL;
     jpeg_settings_t *jpeg_settings = NULL;
     QCamera3HardwareInterface* hal_obj = NULL;
-    bool needJpegRotation = false;
+    bool needJpegExifRotation = false;
 
     if (NULL == jpeg_job_data) {
         LOGE("Invalid jpeg job");
@@ -1171,6 +1171,8 @@
     dst_dim.width = recvd_frame->reproc_config.output_stream_dim.width;
     dst_dim.height = recvd_frame->reproc_config.output_stream_dim.height;
 
+    needJpegExifRotation = hal_obj->needJpegExifRotation();
+
     LOGH("Need new session?:%d", needNewSess);
     if (needNewSess) {
         //creating a new session, so we must destroy the old one
@@ -1191,6 +1193,10 @@
         encodeParam.thumb_dim.src_dim = src_dim;
         encodeParam.thumb_dim.dst_dim = jpeg_settings->thumbnail_size;
 
+        if (needJpegExifRotation) {
+            encodeParam.thumb_rotation = (uint32_t)jpeg_settings->jpeg_orientation;
+        }
+
         getFWKJpegEncodeConfig(encodeParam, recvd_frame, jpeg_settings);
         LOGH("#src bufs:%d # tmb bufs:%d #dst_bufs:%d",
                      encodeParam.num_src_bufs,encodeParam.num_tmb_bufs,encodeParam.num_dst_bufs);
@@ -1216,8 +1222,7 @@
     //main_stream->getCropInfo(crop);
 
     // Set main dim job parameters and handle rotation
-    needJpegRotation = hal_obj->needJpegRotation();
-    if (!needJpegRotation && (jpeg_settings->jpeg_orientation == 90 ||
+    if (!needJpegExifRotation && (jpeg_settings->jpeg_orientation == 90 ||
             jpeg_settings->jpeg_orientation == 270)) {
 
         jpg_job.encode_job.main_dim.src_dim.width = src_dim.height;
@@ -1267,11 +1272,8 @@
         jpg_job.encode_job.thumb_dim.dst_dim =
                 jpeg_settings->thumbnail_size;
 
-        if (needJpegRotation) {
-            jpg_job.encode_job.rotation = (uint32_t)jpeg_settings->jpeg_orientation;
-            LOGH("jpeg rotation is set to %u", jpg_job.encode_job.rotation);
-        } else if (jpeg_settings->jpeg_orientation  == 90 ||
-                jpeg_settings->jpeg_orientation == 270) {
+        if (!needJpegExifRotation && (jpeg_settings->jpeg_orientation == 90 ||
+                jpeg_settings->jpeg_orientation == 270)) {
             //swap the thumbnail destination width and height if it has
             //already been rotated
             int temp = jpg_job.encode_job.thumb_dim.dst_dim.width;
@@ -1341,7 +1343,7 @@
        LOGE("m_parent is NULL, Error");
        return BAD_VALUE;
     }
-    bool needJpegRotation = false;
+    bool needJpegExifRotation = false;
 
     recvd_frame = jpeg_job_data->src_frame;
     metadata = jpeg_job_data->metadata;
@@ -1418,7 +1420,7 @@
         return UNKNOWN_ERROR;
     }
 
-    needJpegRotation = hal_obj->needJpegRotation();
+    needJpegExifRotation = hal_obj->needJpegExifRotation();
     LOGH("Need new session?:%d", needNewSess);
     if (needNewSess) {
         //creating a new session, so we must destroy the old one
@@ -1437,7 +1439,7 @@
         getJpegEncodeConfig(encodeParam, main_stream, jpeg_settings);
         LOGH("#src bufs:%d # tmb bufs:%d #dst_bufs:%d",
                      encodeParam.num_src_bufs,encodeParam.num_tmb_bufs,encodeParam.num_dst_bufs);
-        if (!needJpegRotation &&
+        if (!needJpegExifRotation &&
             (jpeg_settings->jpeg_orientation == 90 ||
             jpeg_settings->jpeg_orientation == 270)) {
            //swap src width and height, stride and scanline due to rotation
@@ -1461,8 +1463,9 @@
         }
         encodeParam.main_dim.dst_dim = dst_dim;
         encodeParam.thumb_dim.dst_dim = jpeg_settings->thumbnail_size;
-        if (needJpegRotation) {
-           encodeParam.rotation = (uint32_t)jpeg_settings->jpeg_orientation;
+
+        if (needJpegExifRotation) {
+            encodeParam.thumb_rotation = (uint32_t)jpeg_settings->jpeg_orientation;
         }
 
         LOGI("Src Buffer cnt = %d, res = %dX%d len = %d rot = %d "
@@ -1500,19 +1503,13 @@
     jpg_job.encode_job.src_index = (int32_t)main_frame->buf_idx;
     jpg_job.encode_job.dst_index = 0;
 
-    if (needJpegRotation) {
-        jpg_job.encode_job.rotation = (uint32_t)jpeg_settings->jpeg_orientation;
-        LOGD("jpeg rotation is set to %d",
-                jpg_job.encode_job.rotation);
-    }
-
     cam_rect_t crop;
     memset(&crop, 0, sizeof(cam_rect_t));
     //TBD_later - Zoom event removed in stream
     //main_stream->getCropInfo(crop);
 
     // Set main dim job parameters and handle rotation
-    if (!needJpegRotation && (jpeg_settings->jpeg_orientation == 90 ||
+    if (!needJpegExifRotation && (jpeg_settings->jpeg_orientation == 90 ||
             jpeg_settings->jpeg_orientation == 270)) {
 
         jpg_job.encode_job.main_dim.src_dim.width = src_dim.height;
@@ -1563,7 +1560,7 @@
         jpg_job.encode_job.thumb_dim.dst_dim =
                 jpeg_settings->thumbnail_size;
 
-      if (!needJpegRotation &&
+      if (!needJpegExifRotation &&
           (jpeg_settings->jpeg_orientation  == 90 ||
            jpeg_settings->jpeg_orientation == 270)) {
             //swap the thumbnail destination width and height if it has
@@ -1869,7 +1866,7 @@
                         } else if (pp_buffer == NULL) {
                             LOGE("failed to dequeue from m_inputPPQ");
                             ret = -1;
-                        } else {
+                        } else if (pp_buffer != NULL){
                             memset(pp_job, 0, sizeof(qcamera_hal3_pp_data_t));
                             pp_job->src_frame = pp_buffer->input;
                             pp_job->src_metadata = meta_buffer;
@@ -2340,6 +2337,13 @@
         LOGE("No memory for QCamera3Exif");
         return NULL;
     }
+    QCamera3HardwareInterface* hal_obj = NULL;
+    if (m_parent != NULL) {
+        hal_obj = (QCamera3HardwareInterface*)m_parent->mUserData;
+    } else {
+        LOGE("m_parent is NULL, Error");
+        return NULL;
+    }
 
     int32_t rc = NO_ERROR;
     uint32_t count = 0;
@@ -2548,6 +2552,37 @@
             LOGW("Adding IMAGE_DESCRIPTION tag failed");
         }
     }
+
+    if( hal_obj->needJpegExifRotation()) {
+        int16_t orientation;
+        switch (jpeg_settings->jpeg_orientation) {
+            case 0:
+                orientation = 1;
+                break;
+            case 90:
+                orientation = 6;
+                break;
+            case 180:
+                orientation = 3;
+                break;
+            case 270:
+                orientation = 8;
+                break;
+            default:
+                orientation = 1;
+                break;
+        }
+        exif->addEntry(EXIFTAGID_ORIENTATION,
+                       EXIF_SHORT,
+                       1,
+                       (void *)&orientation);
+        exif->addEntry(EXIFTAGID_TN_ORIENTATION,
+                       EXIF_SHORT,
+                       1,
+                       (void *)&orientation);
+
+    }
+
     return exif;
 }
 
@@ -2720,6 +2755,7 @@
             break;
         case EXIF_SHORT:
             {
+                uint16_t *exif_data = (uint16_t *)data;
                 if (count > 1) {
                     uint16_t *values =
                         (uint16_t *)malloc(count * sizeof(uint16_t));
@@ -2727,8 +2763,8 @@
                         LOGE("No memory for short array");
                         rc = NO_MEMORY;
                     } else {
-                        memcpy(values, data, count * sizeof(uint16_t));
-                        m_Entries[m_nNumEntries].tag_entry.data._shorts =values;
+                        memcpy(values, exif_data, count * sizeof(uint16_t));
+                        m_Entries[m_nNumEntries].tag_entry.data._shorts = values;
                     }
                 } else {
                     m_Entries[m_nNumEntries].tag_entry.data._short =
@@ -2738,6 +2774,7 @@
             break;
         case EXIF_LONG:
             {
+                uint32_t *exif_data = (uint32_t *)data;
                 if (count > 1) {
                     uint32_t *values =
                         (uint32_t *)malloc(count * sizeof(uint32_t));
@@ -2745,7 +2782,7 @@
                         LOGE("No memory for long array");
                         rc = NO_MEMORY;
                     } else {
-                        memcpy(values, data, count * sizeof(uint32_t));
+                        memcpy(values, exif_data, count * sizeof(uint32_t));
                         m_Entries[m_nNumEntries].tag_entry.data._longs = values;
                     }
                 } else {
@@ -2756,13 +2793,14 @@
             break;
         case EXIF_RATIONAL:
             {
+                rat_t *exif_data = (rat_t *)data;
                 if (count > 1) {
                     rat_t *values = (rat_t *)malloc(count * sizeof(rat_t));
                     if (values == NULL) {
                         LOGE("No memory for rational array");
                         rc = NO_MEMORY;
                     } else {
-                        memcpy(values, data, count * sizeof(rat_t));
+                        memcpy(values, exif_data, count * sizeof(rat_t));
                         m_Entries[m_nNumEntries].tag_entry.data._rats = values;
                     }
                 } else {
@@ -2785,6 +2823,7 @@
             break;
         case EXIF_SLONG:
             {
+                int32_t *exif_data = (int32_t *)data;
                 if (count > 1) {
                     int32_t *values =
                         (int32_t *)malloc(count * sizeof(int32_t));
@@ -2792,7 +2831,7 @@
                         LOGE("No memory for signed long array");
                         rc = NO_MEMORY;
                     } else {
-                        memcpy(values, data, count * sizeof(int32_t));
+                        memcpy(values, exif_data, count * sizeof(int32_t));
                         m_Entries[m_nNumEntries].tag_entry.data._slongs =values;
                     }
                 } else {
@@ -2803,13 +2842,14 @@
             break;
         case EXIF_SRATIONAL:
             {
+                srat_t *exif_data = (srat_t *)data;
                 if (count > 1) {
                     srat_t *values = (srat_t *)malloc(count * sizeof(srat_t));
                     if (values == NULL) {
                         LOGE("No memory for sign rational array");
                         rc = NO_MEMORY;
                     } else {
-                        memcpy(values, data, count * sizeof(srat_t));
+                        memcpy(values, exif_data, count * sizeof(srat_t));
                         m_Entries[m_nNumEntries].tag_entry.data._srats = values;
                     }
                 } else {
diff --git a/QCamera2/stack/common/cam_intf.h b/QCamera2/stack/common/cam_intf.h
index 9b1f66b..bd4db4c 100644
--- a/QCamera2/stack/common/cam_intf.h
+++ b/QCamera2/stack/common/cam_intf.h
@@ -433,6 +433,10 @@
     uint8_t hotPixel_mode;
     uint32_t hotPixel_count;
     cam_coordinate_type_t hotPixelMap[512];
+
+    /* supported instant capture/AEC convergence modes */
+    size_t supported_instant_aec_modes_cnt;
+    cam_aec_convergence_type supported_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
 } cam_capability_t;
 
 typedef enum {
@@ -757,7 +761,7 @@
     INCLUDE(CAM_INTF_META_PREP_SNAPSHOT_DONE,           int32_t,                        1);
     INCLUDE(CAM_INTF_META_GOOD_FRAME_IDX_RANGE,         cam_frame_idx_range_t,          1);
     INCLUDE(CAM_INTF_META_ASD_HDR_SCENE_DATA,           cam_asd_hdr_scene_data_t,       1);
-    INCLUDE(CAM_INTF_META_ASD_SCENE_TYPE,               int32_t,                        1);
+    INCLUDE(CAM_INTF_META_ASD_SCENE_INFO,               cam_asd_decision_t,             1);
     INCLUDE(CAM_INTF_META_CURRENT_SCENE,                cam_scene_mode_type,            1);
     INCLUDE(CAM_INTF_META_AWB_INFO,                     cam_awb_params_t,               1);
     INCLUDE(CAM_INTF_META_FOCUS_POSITION,               cam_focus_pos_info_t,           1);
@@ -830,7 +834,6 @@
     INCLUDE(CAM_INTF_META_EXIF_DEBUG_AF,                cam_af_exif_debug_t,         1);
     INCLUDE(CAM_INTF_META_EXIF_DEBUG_ASD,               cam_asd_exif_debug_t,        1);
     INCLUDE(CAM_INTF_META_EXIF_DEBUG_STATS,             cam_stats_buffer_exif_debug_t, 1);
-    INCLUDE(CAM_INTF_META_ASD_SCENE_CAPTURE_TYPE,       cam_auto_scene_t,            1);
     INCLUDE(CAM_INTF_PARM_EFFECT,                       uint32_t,                    1);
     /* Defining as int32_t so that this array is 4 byte aligned */
     INCLUDE(CAM_INTF_META_PRIVATE_DATA,                 int32_t,
@@ -911,7 +914,8 @@
     INCLUDE(CAM_INTF_PARM_LONGSHOT_ENABLE,              int8_t,                      1);
     INCLUDE(CAM_INTF_PARM_TONE_MAP_MODE,                uint32_t,                    1);
     INCLUDE(CAM_INTF_META_TOUCH_AE_RESULT,              int32_t,                     1);
-    INCLUDE(CAM_INTF_PARM_DUAL_LED_CALIBRATION,         int32_t,                    1);
+    INCLUDE(CAM_INTF_PARM_DUAL_LED_CALIBRATION,         int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_ADV_CAPTURE_MODE,             uint8_t,                     1);
 
     /* HAL3 specific */
     INCLUDE(CAM_INTF_META_STREAM_INFO,                  cam_stream_size_info_t,      1);
@@ -961,6 +965,7 @@
     INCLUDE(CAM_INTF_PARM_MANUAL_CAPTURE_TYPE,          cam_manual_capture_type,     1);
     INCLUDE(CAM_INTF_AF_STATE_TRANSITION,               uint8_t,                     1);
     INCLUDE(CAM_INTF_PARM_INITIAL_EXPOSURE_INDEX,       uint32_t,                    1);
+    INCLUDE(CAM_INTF_PARM_INSTANT_AEC,                  uint8_t,                     1);
 } metadata_data_t;
 
 /* Update clear_metadata_buffer() function when a new is_xxx_valid is added to
diff --git a/QCamera2/stack/common/cam_types.h b/QCamera2/stack/common/cam_types.h
index 75d8687..972718a 100644
--- a/QCamera2/stack/common/cam_types.h
+++ b/QCamera2/stack/common/cam_types.h
@@ -82,7 +82,7 @@
 
 #define MAX_CAPTURE_BATCH_NUM 32
 
-#define TUNING_DATA_VERSION        3
+#define TUNING_DATA_VERSION        6
 #define TUNING_SENSOR_DATA_MAX     0x10000 /*(need value from sensor team)*/
 #define TUNING_VFE_DATA_MAX        0x10000 /*(need value from vfe team)*/
 #define TUNING_CPP_DATA_MAX        0x10000 /*(need value from pproc team)*/
@@ -323,7 +323,7 @@
     CAM_FORMAT_YUV_444_NV24,
     CAM_FORMAT_YUV_444_NV42,
 
-    /* Y plane only, used for FD */
+    /* Y plane only, used for FD, 8BPP */
     CAM_FORMAT_Y_ONLY, //100
 
     /* UBWC format */
@@ -334,6 +334,27 @@
     /* RGB formats */
     CAM_FORMAT_8888_ARGB,
 
+    /* Y plane only */
+    CAM_FORMAT_Y_ONLY_10_BPP,
+    CAM_FORMAT_Y_ONLY_12_BPP,
+    CAM_FORMAT_Y_ONLY_14_BPP,
+    CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GREY,
+    CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GREY,
+    CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GREY,
+    CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GREY,
+    CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GREY,
+    CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GREY,
+    CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GREY,
+    CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GREY,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GREY,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GREY,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GREY,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GREY,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GREY,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GREY,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GREY,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GREY,
+
     CAM_FORMAT_MAX
 } cam_format_t;
 
@@ -394,18 +415,6 @@
     CAM_STREAM_BUF_TYPE_MAX
 } cam_stream_buf_type;
 
-/* values that persist.camera.global.debug can be set to */
-/* all camera modules need to map their internal debug levels to this range */
-typedef enum {
-    CAM_GLBL_DBG_NONE  = 0,
-    CAM_GLBL_DBG_ERR   = 1,
-    CAM_GLBL_DBG_WARN  = 2,
-    CAM_GLBL_DBG_HIGH  = 3,
-    CAM_GLBL_DBG_DEBUG = 4,
-    CAM_GLBL_DBG_LOW   = 5,
-    CAM_GLBL_DBG_INFO  = 6
-} cam_global_debug_level_t;
-
 typedef struct {
     cam_mapping_buf_type type;
     uint32_t stream_id;   /* stream id: valid if STREAM_BUF */
@@ -602,6 +611,17 @@
     CAM_AEC_MODE_MAX
 } cam_auto_exposure_mode_type;
 
+/* enum to select AEC convergence type */
+typedef enum {
+    /* Normal AEC connvergence */
+    CAM_AEC_NORMAL_CONVERGENCE = 0,
+    /* Aggressive AEC connvergence */
+    CAM_AEC_AGGRESSIVE_CONVERGENCE,
+    /* Fast AEC convergence */
+    CAM_AEC_FAST_CONVERGENCE,
+    CAM_AEC_CONVERGENCE_MAX
+} cam_aec_convergence_type;
+
 typedef enum {
     CAM_AE_MODE_OFF,
     CAM_AE_MODE_ON,
@@ -1320,11 +1340,6 @@
 } cam_auto_focus_data_t;
 
 typedef struct {
-  uint32_t is_hdr_scene;
-  float    hdr_confidence;
-} cam_asd_hdr_scene_data_t;
-
-typedef struct {
     uint32_t stream_id;
     cam_rect_t crop;
     cam_rect_t roi_map;
@@ -1377,16 +1392,47 @@
   S_PORTRAIT_BACKLIGHT,
   S_SCENERY_BACKLIGHT,
   S_BACKLIGHT,
+  S_HDR,
+  S_MAX_DEFAULT,
+  S_CUSTOM0 = S_MAX_DEFAULT,
+  S_CUSTOM1,
+  S_CUSTOM2,
+  S_CUSTOM3,
+  S_CUSTOM4,
+  S_CUSTOM5,
+  S_CUSTOM6,
+  S_CUSTOM7,
+  S_CUSTOM8,
+  S_CUSTOM9,
   S_MAX,
 } cam_auto_scene_t;
 
 typedef struct {
+  uint32_t is_hdr_scene;
+  float    hdr_confidence;
+} cam_asd_hdr_scene_data_t;
+
+typedef struct {
+  uint32_t          detected;
+  float             confidence;
+  uint32_t          auto_compensation;
+} cam_asd_scene_info_t;
+
+typedef struct {
+  cam_auto_scene_t      detected_scene;
+  uint8_t               max_n_scenes;
+  cam_asd_scene_info_t  scene_info[S_MAX];
+} cam_asd_decision_t;
+
+
+typedef struct {
    uint32_t meta_frame_id;
 } cam_meta_valid_t;
 
 typedef enum {
     CAM_SENSOR_RAW,
-    CAM_SENSOR_YUV
+    CAM_SENSOR_YUV,
+    CAM_SENSOR_MONO
 } cam_sensor_t;
 
 typedef struct {
@@ -1635,10 +1681,7 @@
      * 2. good_frame_idx_range.min_frame_idx - current_frame_idx < 100 */
     cam_frame_idx_range_t good_frame_idx_range;
 
-    uint32_t is_hdr_scene_data_valid;
-    cam_asd_hdr_scene_data_t hdr_scene_data;
-    uint8_t is_asd_decision_valid;
-    cam_auto_scene_t scene; //scene type as decided by ASD
+    cam_asd_decision_t cam_asd_info;
 
     char private_metadata[MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES];
 
@@ -1779,15 +1822,14 @@
     CAM_INTF_META_PREP_SNAPSHOT_DONE, /* 60 */
     CAM_INTF_META_GOOD_FRAME_IDX_RANGE,
     CAM_INTF_META_ASD_HDR_SCENE_DATA,
-    CAM_INTF_META_ASD_SCENE_TYPE,
+    CAM_INTF_META_ASD_SCENE_INFO,
     CAM_INTF_META_CURRENT_SCENE,
     CAM_INTF_META_AEC_INFO,
     CAM_INTF_META_SENSOR_INFO,
-    CAM_INTF_META_ASD_SCENE_CAPTURE_TYPE,
     CAM_INTF_META_CHROMATIX_LITE_ISP,
     CAM_INTF_META_CHROMATIX_LITE_PP,
-    CAM_INTF_META_CHROMATIX_LITE_AE, /* 70 */
-    CAM_INTF_META_CHROMATIX_LITE_AWB,
+    CAM_INTF_META_CHROMATIX_LITE_AE,
+    CAM_INTF_META_CHROMATIX_LITE_AWB, /* 70 */
     CAM_INTF_META_CHROMATIX_LITE_AF,
     CAM_INTF_META_CHROMATIX_LITE_ASD,
     CAM_INTF_META_EXIF_DEBUG_AE,
@@ -1796,8 +1838,8 @@
     CAM_INTF_META_EXIF_DEBUG_ASD,
     CAM_INTF_META_EXIF_DEBUG_STATS,
     CAM_INTF_PARM_GET_CHROMATIX,
-    CAM_INTF_PARM_SET_RELOAD_CHROMATIX, /* 80 */
-    CAM_INTF_PARM_SET_AUTOFOCUSTUNING,
+    CAM_INTF_PARM_SET_RELOAD_CHROMATIX,
+    CAM_INTF_PARM_SET_AUTOFOCUSTUNING, /* 80 */
     CAM_INTF_PARM_GET_AFTUNE,
     CAM_INTF_PARM_SET_RELOAD_AFTUNE,
     CAM_INTF_PARM_SET_VFE_COMMAND,
@@ -1806,9 +1848,10 @@
     CAM_INTF_PARM_LONGSHOT_ENABLE,
     CAM_INTF_PARM_RDI_MODE,
     CAM_INTF_PARM_CDS_MODE,
-    CAM_INTF_PARM_TONE_MAP_MODE, /* 90 */
-    CAM_INTF_PARM_CAPTURE_FRAME_CONFIG,
+    CAM_INTF_PARM_TONE_MAP_MODE,
+    CAM_INTF_PARM_CAPTURE_FRAME_CONFIG, /* 90 */
     CAM_INTF_PARM_DUAL_LED_CALIBRATION,
+    CAM_INTF_PARM_ADV_CAPTURE_MODE,
 
     /* stream based parameters */
     CAM_INTF_PARM_DO_REPROCESS,
@@ -2075,7 +2118,8 @@
     /* Gain applied post raw captrue.
        ISP digital gain */
     CAM_INTF_META_ISP_SENSITIVITY,
-
+    /* Param for enabling instant aec*/
+    CAM_INTF_PARM_INSTANT_AEC,
     CAM_INTF_PARM_MAX
 } cam_intf_parm_type_t;
 
@@ -2518,6 +2562,7 @@
      * output is interleaved UYVY */
     CAM_FILTER_ARRANGEMENT_UYVY,
     CAM_FILTER_ARRANGEMENT_YUYV,
+    CAM_FILTER_ARRANGEMENT_Y
 } cam_color_filter_arrangement_t;
 
 typedef enum {
diff --git a/QCamera2/stack/common/mm_camera_interface.h b/QCamera2/stack/common/mm_camera_interface.h
index c382dfc..40bd6d3 100644
--- a/QCamera2/stack/common/mm_camera_interface.h
+++ b/QCamera2/stack/common/mm_camera_interface.h
@@ -378,6 +378,8 @@
 *                     queue
 *    @enable_frame_sync: Enables frame sync for dual camera
 *    @priority : save matched priority frames only
+*    @user_expected_frame_id : Number of frames, camera interface
+*                     will wait for getting the instant capture frame.
 **/
 typedef struct {
     mm_camera_super_buf_notify_mode_t notify_mode;
@@ -387,6 +389,7 @@
     uint8_t max_unmatched_frames;
     uint8_t enable_frame_sync;
     mm_camera_super_buf_priority_t priority;
+    uint8_t user_expected_frame_id;
 } mm_camera_channel_attr_t;
 
 typedef struct {
diff --git a/QCamera2/stack/mm-camera-interface/Android.mk b/QCamera2/stack/mm-camera-interface/Android.mk
index cc8cc74..6cb7bc8 100644
--- a/QCamera2/stack/mm-camera-interface/Android.mk
+++ b/QCamera2/stack/mm-camera-interface/Android.mk
@@ -19,11 +19,11 @@
     LOCAL_CFLAGS += -DUSE_ION
 endif
 
-ifneq (,$(filter msm8974 msm8916 msm8226 msm8610 msm8916 apq8084 msm8084 msm8994 msm8992 msm8952 msm8937 titanium msm8996,$(TARGET_BOARD_PLATFORM)))
+ifneq (,$(filter msm8974 msm8916 msm8226 msm8610 msm8916 apq8084 msm8084 msm8994 msm8992 msm8952 msm8937 msm8953 msm8996 msmcobalt, $(TARGET_BOARD_PLATFORM)))
     LOCAL_CFLAGS += -DVENUS_PRESENT
 endif
 
-ifneq (,$(filter msm8996,$(TARGET_BOARD_PLATFORM)))
+ifneq (,$(filter msm8996 msmcobalt,$(TARGET_BOARD_PLATFORM)))
     LOCAL_CFLAGS += -DUBWC_PRESENT
 endif
 
diff --git a/QCamera2/stack/mm-camera-interface/inc/mm_camera.h b/QCamera2/stack/mm-camera-interface/inc/mm_camera.h
index e5328dc..c4f660b 100644
--- a/QCamera2/stack/mm-camera-interface/inc/mm_camera.h
+++ b/QCamera2/stack/mm-camera-interface/inc/mm_camera.h
@@ -377,7 +377,6 @@
     uint32_t led_on_num_frames;
     uint32_t once;
     uint32_t frame_skip_count;
-    uint32_t nomatch_frame_id;
 } mm_channel_queue_t;
 
 typedef struct {
diff --git a/QCamera2/stack/mm-camera-interface/inc/mm_camera_dbg.h b/QCamera2/stack/mm-camera-interface/inc/mm_camera_dbg.h
index 7e01c4b..8298c78 100644
--- a/QCamera2/stack/mm-camera-interface/inc/mm_camera_dbg.h
+++ b/QCamera2/stack/mm-camera-interface/inc/mm_camera_dbg.h
@@ -46,6 +46,18 @@
     CAM_LAST_MODULE
 } cam_modules_t;
 
+/* values that persist.camera.global.debug can be set to */
+/* all camera modules need to map their internal debug levels to this range */
+typedef enum {
+    CAM_GLBL_DBG_NONE  = 0,
+    CAM_GLBL_DBG_ERR   = 1,
+    CAM_GLBL_DBG_WARN  = 2,
+    CAM_GLBL_DBG_HIGH  = 3,
+    CAM_GLBL_DBG_DEBUG = 4,
+    CAM_GLBL_DBG_LOW   = 5,
+    CAM_GLBL_DBG_INFO  = 6
+} cam_global_debug_level_t;
+
 extern int g_cam_log[CAM_LAST_MODULE][CAM_GLBL_DBG_INFO + 1];
 
 #define FATAL_IF(cond, ...) LOG_ALWAYS_FATAL_IF(cond, ## __VA_ARGS__)
diff --git a/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c b/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
index dd99715..ee47b56 100644
--- a/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
+++ b/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
@@ -1507,7 +1507,8 @@
         /* init superbuf queue */
         mm_channel_superbuf_queue_init(&my_obj->bundle.superbuf_queue);
         my_obj->bundle.superbuf_queue.num_streams = num_streams_in_bundle_queue;
-        my_obj->bundle.superbuf_queue.expected_frame_id = 0;
+        my_obj->bundle.superbuf_queue.expected_frame_id =
+                my_obj->bundle.superbuf_queue.attr.user_expected_frame_id;
         my_obj->bundle.superbuf_queue.expected_frame_id_without_led = 0;
         my_obj->bundle.superbuf_queue.led_off_start_frame_id = 0;
         my_obj->bundle.superbuf_queue.led_on_start_frame_id = 0;
@@ -1733,18 +1734,12 @@
             s_objs[i]->linked_stream->is_linked = 0;
             s_objs[i]->linked_stream->linked_obj = NULL;
             pthread_mutex_unlock(&s_objs[i]->linked_stream->buf_lock);
-
-            if (TRUE == my_obj->bundle.is_active) {
-                mm_channel_flush_super_buf_queue(my_obj, 0, s_objs[i]->stream_info->stream_type);
-            }
-            break;
-        } else {
-            continue;
         }
     }
 
     /* destroy super buf cmd thread */
     if (TRUE == my_obj->bundle.is_active) {
+        mm_channel_flush_super_buf_queue(my_obj, 0, CAM_STREAM_TYPE_DEFAULT);
         /* first stop bundle thread */
         mm_camera_cmd_thread_release(&my_obj->cmd_thread);
         mm_camera_cmd_thread_release(&my_obj->cb_thread);
@@ -2589,6 +2584,27 @@
             CAM_INTF_META_LOW_LIGHT, metadata) {
             ch_obj->needLowLightZSL = *low_light_level;
         }
+
+        // For the instant capture case, if AEC settles before expected frame ID from user,
+        // reset the expected frame ID to current frame index.
+        if (queue->attr.user_expected_frame_id > 0) {
+            if (queue->attr.user_expected_frame_id > buf_info->frame_idx) {
+                IF_META_AVAILABLE(const cam_3a_params_t, ae_params,
+                    CAM_INTF_META_AEC_INFO, metadata) {
+                    if (ae_params->settled) {
+                        queue->expected_frame_id = buf_info->frame_idx;
+                        // Reset the expected frame ID from HAL to 0
+                        queue->attr.user_expected_frame_id = 0;
+                        LOGD("AEC settled, reset expected frame ID from user");
+                    }
+                }
+            } else {
+                 // Reset the expected frame ID from HAL to 0 after
+                 // current frame index is greater than expected id.
+                queue->attr.user_expected_frame_id = 0;
+                LOGD("reset expected frame ID from user as it reached the bound");
+            }
+        }
     }
 end:
     return rc;
@@ -2651,14 +2667,6 @@
         return 0;
     }
 
-    if((queue->nomatch_frame_id != 0)
-            && (queue->nomatch_frame_id > buf_info->frame_idx)
-            && (buf_info->buf->stream_type == CAM_STREAM_TYPE_METADATA)) {
-        /*Incoming metadata is older than expected*/
-        mm_channel_qbuf(ch_obj, buf_info->buf);
-        return 0;
-    }
-
     /* comp */
     pthread_mutex_lock(&queue->que.lock);
     head = &queue->que.head.list;
@@ -2682,10 +2690,10 @@
                 continue;
             } else if ( buf_info->frame_idx == super_buf->frame_idx
                     /*Pick metadata greater than available frameID*/
-                    || ((queue->nomatch_frame_id != 0)
-                    && (queue->nomatch_frame_id <= buf_info->frame_idx)
+                    || ((queue->attr.priority == MM_CAMERA_SUPER_BUF_PRIORITY_LOW)
                     && (super_buf->super_buf[buf_s_idx].frame_idx == 0)
-                    && (buf_info->buf->stream_type == CAM_STREAM_TYPE_METADATA))
+                    && (buf_info->buf->stream_type == CAM_STREAM_TYPE_METADATA)
+                    && (super_buf->frame_idx < buf_info->frame_idx))
                     /*Pick available metadata closest to frameID*/
                     || ((queue->attr.priority == MM_CAMERA_SUPER_BUF_PRIORITY_LOW)
                     && (buf_info->buf->stream_type != CAM_STREAM_TYPE_METADATA)
@@ -2695,7 +2703,6 @@
                 metadata frameID greater than avialbale super buffer frameID  OR
                 metadata frame closest to incoming frameID will be bundled*/
                 found_super_buf = 1;
-                queue->nomatch_frame_id = 0;
                 break;
             } else {
                 unmatched_bundles++;
@@ -2870,13 +2877,6 @@
                         pthread_mutex_unlock(&fs_lock);
                     }
                 }
-
-                if ((queue->attr.priority == MM_CAMERA_SUPER_BUF_PRIORITY_LOW)
-                        && (buf_info->buf->stream_type != CAM_STREAM_TYPE_METADATA)) {
-                    LOGD("No metadata matching for frame = %d",
-                             buf_info->frame_idx);
-                    queue->nomatch_frame_id = buf_info->frame_idx;
-                }
             } else {
                 /* No memory */
                 if (NULL != new_buf) {
diff --git a/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c b/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
index 6d23196..c14cef4 100644
--- a/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
+++ b/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
@@ -1837,6 +1837,10 @@
         }
         close(dev_fd);
         dev_fd = -1;
+        if (num_cameras >= MM_CAMERA_MAX_NUM_SENSORS) {
+            LOGW("Maximum number of camera reached %d", num_cameras);
+            break;
+        }
     }
     g_cam_ctrl.num_cam = num_cameras;
 
diff --git a/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c b/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c
index c820d27..0c740b4 100644
--- a/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c
+++ b/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c
@@ -97,7 +97,7 @@
     /* send cmd to worker */
     ssize_t len = write(poll_cb->pfds[1], &cmd_evt, sizeof(cmd_evt));
     if (len < 1) {
-        LOGE("len = %lld, errno = %d",
+        LOGW("len = %lld, errno = %d",
                 (long long int)len, errno);
         /* Avoid waiting for the signal */
         pthread_mutex_unlock(&poll_cb->mutex);
@@ -143,7 +143,7 @@
 
     ssize_t len = write(poll_cb->pfds[1], &cmd_evt, sizeof(cmd_evt));
     if(len < 1) {
-        LOGE("len = %lld, errno = %d",
+        LOGW("len = %lld, errno = %d",
                 (long long int)len, errno);
         /* Avoid waiting for the signal */
         pthread_mutex_unlock(&poll_cb->mutex);
@@ -435,8 +435,7 @@
             rc = mm_camera_poll_sig_async(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC );
         }
     } else {
-        LOGE("invalid handler %d (%d)",
-                    handler, idx);
+        LOGE("invalid handler %d (%d)", handler, idx);
     }
     return rc;
 }
@@ -484,11 +483,15 @@
             rc = mm_camera_poll_sig_async(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC );
         }
     } else {
-        /* The error might be due to async update. We only report error for EVT type*/
-        if (MM_CAMERA_POLL_TYPE_DATA != poll_cb->poll_type)
-            LOGE("invalid handler %d (%d)", handler, idx);
-
-        return -1;
+        if ((MAX_STREAM_NUM_IN_BUNDLE <= idx) ||
+                (poll_cb->poll_entries[idx].handler != 0)) {
+            LOGE("invalid handler %d (%d)", poll_cb->poll_entries[idx].handler,
+                    idx);
+            rc = -1;
+        } else {
+            LOGW("invalid handler %d (%d)", handler, idx);
+            rc = 0;
+        }
     }
 
     return rc;
@@ -554,7 +557,7 @@
     mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_EXIT);
     /* wait until poll thread exits */
     if (pthread_join(poll_cb->pid, NULL) != 0) {
-        LOGE("pthread dead already\n");
+        LOGD("pthread dead already\n");
     }
 
     /* close pipe */
diff --git a/QCamera2/stack/mm-camera-test/Android.mk b/QCamera2/stack/mm-camera-test/Android.mk
index 4ff1bbf..0ea22e0 100644
--- a/QCamera2/stack/mm-camera-test/Android.mk
+++ b/QCamera2/stack/mm-camera-test/Android.mk
@@ -58,7 +58,7 @@
         LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
         LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
         LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
-else ifeq ($(TARGET_BOARD_PLATFORM),msm8916 msm8952 msm8937 titanium)
+else ifeq ($(TARGET_BOARD_PLATFORM),msm8916 msm8952 msm8937 msm8953)
         LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
         LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
         LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
@@ -152,7 +152,7 @@
         LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
         LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
         LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
-else ifeq ($(TARGET_BOARD_PLATFORM),msm8916 msm8952 msm8937 titanium)
+else ifeq ($(TARGET_BOARD_PLATFORM),msm8916 msm8952 msm8937 msm8953)
         LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
         LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
         LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
diff --git a/QCamera2/stack/mm-camera-test/src/mm_qcamera_socket.c b/QCamera2/stack/mm-camera-test/src/mm_qcamera_socket.c
index d3216a0..7ab8db8 100644
--- a/QCamera2/stack/mm-camera-test/src/mm_qcamera_socket.c
+++ b/QCamera2/stack/mm-camera-test/src/mm_qcamera_socket.c
@@ -560,14 +560,14 @@
   server_addr.addr_in.sin_addr.s_addr = inet_addr(ip_addr);
 
   if (server_addr.addr_in.sin_addr.s_addr == INADDR_NONE) {
-    LOGE("[ERR] %s invalid address.\n");
+    LOGE(" invalid address.\n");
     return -1;
   }
 
   /* Create an AF_INET stream socket to receive incoming connection ON */
   sock_fd = socket(AF_INET, SOCK_STREAM, 0);
   if (sock_fd < 0) {
-    LOGE("[ERR] %s socket failed\n");
+    LOGE(" socket failed\n");
     return sock_fd;
   }
 
@@ -603,7 +603,7 @@
     return sock_fd;
   }
 
-  LOGH("%s. sock_fd: %d, listen at port: %d\n",  sock_fd, port);
+  LOGH("sock_fd: %d, listen at port: %d\n",  sock_fd, port);
 
   return sock_fd;
 }
@@ -660,7 +660,7 @@
     /* no timeout */
     result = select(num_fds + 1, &tsfds, NULL, NULL, NULL);
     if (result < 0) {
-      LOGE("[ERR] select failed: %s\n", strerror(errno));
+      LOGE("select failed: %s\n", strerror(errno));
       continue;
     }
 
@@ -712,7 +712,7 @@
         lib_handle->tsctrl.proto->send_buf, lib_handle->tsctrl.proto->send_len);
     }
 
-    if (FD_ISSET(client_socket, &tsfds)) {
+    if ((client_socket < FD_SETSIZE) && (FD_ISSET(client_socket, &tsfds))) {
       if (lib_handle->tsctrl.proto == NULL) {
         LOGE(" Cannot receive msg without connect\n");
         continue;
@@ -810,7 +810,7 @@
       }
     }
 
-    if (FD_ISSET(prev_client_socket, &tsfds)) {
+    if ((prev_client_socket < FD_SETSIZE) && (FD_ISSET(prev_client_socket, &tsfds))) {
       recv_bytes = recv(prev_client_socket, (void *)buf,
         lib_handle->tsctrl.pr_proto->next_recv_len, 0);
 
diff --git a/QCamera2/stack/mm-jpeg-interface/Android.mk b/QCamera2/stack/mm-jpeg-interface/Android.mk
index 648de5a..234e1ee 100644
--- a/QCamera2/stack/mm-jpeg-interface/Android.mk
+++ b/QCamera2/stack/mm-jpeg-interface/Android.mk
@@ -41,6 +41,7 @@
 JPEG_PIPELINE_TARGET_LIST := msm8994
 JPEG_PIPELINE_TARGET_LIST += msm8992
 JPEG_PIPELINE_TARGET_LIST += msm8996
+JPEG_PIPELINE_TARGET_LIST += msmcobalt
 
 ifneq (,$(filter  $(JPEG_PIPELINE_TARGET_LIST),$(TARGET_BOARD_PLATFORM)))
     LOCAL_CFLAGS+= -DMM_JPEG_USE_PIPELINE
diff --git a/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg.h b/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg.h
index a7c7397..2213da9 100644
--- a/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg.h
+++ b/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg.h
@@ -51,6 +51,8 @@
 #define MAX_EXIF_TABLE_ENTRIES 50
 #define MAX_JPEG_SIZE 20000000
 #define MAX_OMX_HANDLES (5)
+// Thumbnail src and dest aspect ratio diffrence tolerance
+#define ASPECT_TOLERANCE 0.001
 
 
 /** mm_jpeg_abort_state_t:
@@ -405,6 +407,9 @@
 
   /* Pointer to the session in progress*/
   mm_jpeg_job_session_t *p_session_inprogress;
+
+  // dummy OMX handle
+  OMX_HANDLETYPE dummy_handle;
 } mm_jpeg_obj;
 
 /** mm_jpeg_pending_func_t:
diff --git a/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg.c b/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg.c
index 220f34c..2ff450f 100644
--- a/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg.c
+++ b/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg.c
@@ -31,6 +31,7 @@
 #include <pthread.h>
 #include <errno.h>
 #include <fcntl.h>
+#include <math.h>
 #define PRCTL_H <SYSTEM_HEADER_PREFIX/prctl.h>
 #include PRCTL_H
 
@@ -78,6 +79,27 @@
   mm_jpeg_queue_t* queue, void * dst_ptr);
 static OMX_ERRORTYPE mm_jpeg_session_configure(mm_jpeg_job_session_t *p_session);
 
+/** mm_jpeg_get_comp_name:
+ *
+ *  Arguments:
+ *       None
+ *
+ *  Return:
+ *       Encoder component name
+ *
+ *  Description:
+ *       Get the name of omx component to be used for jpeg encoding
+ *
+ **/
+inline char* mm_jpeg_get_comp_name()
+{
+#ifdef MM_JPEG_USE_PIPELINE
+  return "OMX.qcom.image.jpeg.encoder_pipeline";
+#else
+  return "OMX.qcom.image.jpeg.encoder";
+#endif
+}
+
 /** mm_jpeg_session_send_buffers:
  *
  *  Arguments:
@@ -276,7 +298,6 @@
 {
   OMX_ERRORTYPE rc = OMX_ErrorNone;
   mm_jpeg_obj *my_obj = (mm_jpeg_obj *) p_session->jpeg_obj;
-  char *omx_lib = "OMX.qcom.image.jpeg.encoder";
 
   pthread_mutex_init(&p_session->lock, NULL);
   pthread_cond_init(&p_session->cond, NULL);
@@ -298,11 +319,10 @@
   p_session->thumb_from_main = 0;
 #ifdef MM_JPEG_USE_PIPELINE
   p_session->thumb_from_main = !p_session->params.thumb_from_postview;
-  omx_lib = "OMX.qcom.image.jpeg.encoder_pipeline";
 #endif
 
   rc = OMX_GetHandle(&p_session->omx_handle,
-      omx_lib,
+      mm_jpeg_get_comp_name(),
       (void *)p_session,
       &p_session->omx_callbacks);
   if (OMX_ErrorNone != rc) {
@@ -949,6 +969,64 @@
   return ret;
 }
 
+/** mm_jpeg_update_thumbnail_crop
+ *
+ *  Arguments:
+ *    @p_thumb_dim: thumbnail dimension
+ *    @crop_width : flag indicating if width needs to be cropped
+ *
+ *  Return:
+ *    OMX error values
+ *
+ *  Description:
+ *    Updates thumbnail crop aspect ratio based on
+ *    thumbnail destination aspect ratio.
+ *
+ */
+OMX_ERRORTYPE mm_jpeg_update_thumbnail_crop(mm_jpeg_dim_t *p_thumb_dim,
+  uint8_t crop_width)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  int32_t cropped_width = 0, cropped_height = 0;
+
+  if (crop_width) {
+    // Keep height constant
+    cropped_height = p_thumb_dim->crop.height;
+    cropped_width = floor((cropped_height * p_thumb_dim->dst_dim.width) /
+      p_thumb_dim->dst_dim.height);
+    if (cropped_width % 2) {
+      cropped_width -= 1;
+    }
+  } else {
+    // Keep width constant
+    cropped_width = p_thumb_dim->crop.width;
+    cropped_height = floor((cropped_width * p_thumb_dim->dst_dim.height) /
+      p_thumb_dim->dst_dim.width);
+    if (cropped_height % 2) {
+      cropped_height -= 1;
+    }
+  }
+  p_thumb_dim->crop.left = p_thumb_dim->crop.left +
+    floor((p_thumb_dim->crop.width - cropped_width) / 2);
+  if (p_thumb_dim->crop.left % 2) {
+    p_thumb_dim->crop.left -= 1;
+  }
+  p_thumb_dim->crop.top = p_thumb_dim->crop.top +
+    floor((p_thumb_dim->crop.height - cropped_height) / 2);
+  if (p_thumb_dim->crop.top % 2) {
+    p_thumb_dim->crop.top -= 1;
+  }
+  p_thumb_dim->crop.width = cropped_width;
+  p_thumb_dim->crop.height = cropped_height;
+
+  LOGH("New thumbnail crop: left %d, top %d, crop width %d,"
+    " crop height %d", p_thumb_dim->crop.left,
+    p_thumb_dim->crop.top, p_thumb_dim->crop.width,
+    p_thumb_dim->crop.height);
+
+  return ret;
+}
+
 /** mm_jpeg_omx_config_thumbnail:
  *
  *  Arguments:
@@ -969,6 +1047,7 @@
   mm_jpeg_encode_params_t *p_params = &p_session->params;
   mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
   mm_jpeg_dim_t *p_thumb_dim = &p_jobparams->thumb_dim;
+  mm_jpeg_dim_t *p_main_dim = &p_jobparams->main_dim;
   QOMX_YUV_FRAME_INFO *p_frame_info = &thumbnail_info.tmbOffset;
   mm_jpeg_buf_t *p_tmb_buf = &p_params->src_thumb_buf[p_jobparams->thumb_index];
 
@@ -1019,10 +1098,6 @@
   thumbnail_info.scaling_enabled = 1;
   thumbnail_info.input_width = (OMX_U32)p_thumb_dim->src_dim.width;
   thumbnail_info.input_height = (OMX_U32)p_thumb_dim->src_dim.height;
-  thumbnail_info.crop_info.nWidth = (OMX_U32)p_thumb_dim->crop.width;
-  thumbnail_info.crop_info.nHeight = (OMX_U32)p_thumb_dim->crop.height;
-  thumbnail_info.crop_info.nLeft = p_thumb_dim->crop.left;
-  thumbnail_info.crop_info.nTop = p_thumb_dim->crop.top;
   thumbnail_info.rotation = (OMX_U32)p_params->thumb_rotation;
   thumbnail_info.quality = (OMX_U32)p_params->thumb_quality;
   thumbnail_info.output_width = (OMX_U32)p_thumb_dim->dst_dim.width;
@@ -1038,6 +1113,32 @@
       thumbnail_info.output_height = (OMX_U32)p_thumb_dim->dst_dim.width;
       thumbnail_info.rotation = p_session->params.rotation;
     }
+    //Thumb FOV should be within main image FOV
+    if (p_thumb_dim->crop.left < p_main_dim->crop.left) {
+      p_thumb_dim->crop.left = p_main_dim->crop.left;
+    }
+
+    if (p_thumb_dim->crop.top < p_main_dim->crop.top) {
+      p_thumb_dim->crop.top = p_main_dim->crop.top;
+    }
+
+    while ((p_thumb_dim->crop.left + p_thumb_dim->crop.width) >
+      (p_main_dim->crop.left + p_main_dim->crop.width)) {
+      if (p_thumb_dim->crop.left == p_main_dim->crop.left) {
+        p_thumb_dim->crop.width = p_main_dim->crop.width;
+      } else {
+        p_thumb_dim->crop.left = p_main_dim->crop.left;
+      }
+    }
+
+    while ((p_thumb_dim->crop.top + p_thumb_dim->crop.height) >
+      (p_main_dim->crop.top + p_main_dim->crop.height)) {
+      if (p_thumb_dim->crop.top == p_main_dim->crop.top) {
+        p_thumb_dim->crop.height = p_main_dim->crop.height;
+      } else {
+        p_thumb_dim->crop.top = p_main_dim->crop.top;
+      }
+    }
   } else if ((p_thumb_dim->dst_dim.width > p_thumb_dim->src_dim.width) ||
     (p_thumb_dim->dst_dim.height > p_thumb_dim->src_dim.height)) {
     LOGE("Incorrect thumbnail dim %dx%d resetting to %dx%d", p_thumb_dim->dst_dim.width,
@@ -1047,6 +1148,26 @@
     thumbnail_info.output_height = (OMX_U32)p_thumb_dim->src_dim.height;
   }
 
+  // If the thumbnail crop aspect ratio image and thumbnail dest aspect
+  // ratio are different, reset the thumbnail crop
+  double thumbcrop_aspect_ratio = (double)p_thumb_dim->crop.width /
+    (double)p_thumb_dim->crop.height;
+  double thumbdst_aspect_ratio = (double)p_thumb_dim->dst_dim.width /
+    (double)p_thumb_dim->dst_dim.height;
+  if ((thumbdst_aspect_ratio - thumbcrop_aspect_ratio) >
+    ASPECT_TOLERANCE) {
+    mm_jpeg_update_thumbnail_crop(p_thumb_dim, 0);
+  } else if ((thumbcrop_aspect_ratio - thumbdst_aspect_ratio) >
+    ASPECT_TOLERANCE) {
+    mm_jpeg_update_thumbnail_crop(p_thumb_dim, 1);
+  }
+
+  // Fill thumbnail crop info
+  thumbnail_info.crop_info.nWidth = (OMX_U32)p_thumb_dim->crop.width;
+  thumbnail_info.crop_info.nHeight = (OMX_U32)p_thumb_dim->crop.height;
+  thumbnail_info.crop_info.nLeft = p_thumb_dim->crop.left;
+  thumbnail_info.crop_info.nTop = p_thumb_dim->crop.top;
+
   memset(p_frame_info, 0x0, sizeof(*p_frame_info));
 
   p_frame_info->cbcrStartOffset[0] = p_tmb_buf->offset.mp[0].len;
@@ -2080,6 +2201,9 @@
   }
 #endif
 
+  // create dummy OMX handle to avoid dlopen latency
+  OMX_GetHandle(&my_obj->dummy_handle, mm_jpeg_get_comp_name(), NULL, NULL);
+
   return rc;
 }
 
@@ -2106,6 +2230,10 @@
     LOGE("Error");
   }
 
+  if (my_obj->dummy_handle) {
+    OMX_FreeHandle(my_obj->dummy_handle);
+  }
+
   /* unload OMX engine */
   OMX_Deinit();
 
diff --git a/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c b/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c
index c3eb3de..e56fc24 100644
--- a/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c
+++ b/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c
@@ -336,7 +336,7 @@
   } else {
     flash_fired = 0;
   }
-  LOGD("Flash value %d flash mode %d flash state %d", val_short,
+  LOGD("Flash mode %d flash state %d",
     p_sensor_params->flash_mode, p_sensor_params->flash_state);
 
   switch(p_sensor_params->flash_mode) {
@@ -610,12 +610,12 @@
   /* take the cached values if meta is invalid */
   if ((!is_3a_meta_valid) && (hal_version == CAM_HAL_V1)) {
     p_3a_params = p_cam_exif_params->cam_3a_params;
-    LOGE("Warning using cached values for 3a");
+    LOGW("Warning using cached values for 3a");
   }
 
   if ((!is_sensor_meta_valid) && (hal_version == CAM_HAL_V1)) {
     p_sensor_params = p_cam_exif_params->sensor_params;
-    LOGE("Warning using cached values for sensor");
+    LOGW("Warning using cached values for sensor");
   }
 
   if ((hal_version != CAM_HAL_V1) || (p_sensor_params.sens_type != CAM_SENSOR_YUV)) {
@@ -632,10 +632,12 @@
 
   if (p_meta) {
     short val_short = 0;
+    cam_asd_decision_t *scene_info = NULL;
 
-    IF_META_AVAILABLE(cam_auto_scene_t, scene_cap_type,
-        CAM_INTF_META_ASD_SCENE_CAPTURE_TYPE, p_meta) {
-      val_short = (short) *scene_cap_type;
+    IF_META_AVAILABLE(cam_asd_decision_t, scene_cap_type,
+        CAM_INTF_META_ASD_SCENE_INFO, p_meta) {
+      scene_info = (cam_asd_decision_t*)scene_cap_type;
+      val_short = (short) scene_info->detected_scene;
     }
 
     rc = addExifEntry(exif_info, EXIFTAGID_SCENE_CAPTURE_TYPE, EXIF_SHORT,
diff --git a/QCamera2/util/QCameraDisplay.cpp b/QCamera2/util/QCameraDisplay.cpp
index 72280bb..108cc72 100644
--- a/QCamera2/util/QCameraDisplay.cpp
+++ b/QCamera2/util/QCameraDisplay.cpp
@@ -95,7 +95,7 @@
     looper = new android::Looper(false);
     status_t status = pQCameraDisplay->mDisplayEventReceiver.initCheck();
     if (status != NO_ERROR) {
-        ALOGE("Initialization of DisplayEventReceiver failed with status: %d", status);
+        LOGE("Initialization of DisplayEventReceiver failed with status: %d", status);
         return NULL;
     }
     looper->addFd(pQCameraDisplay->mDisplayEventReceiver.getFd(), 0, ALOOPER_EVENT_INPUT,
diff --git a/QCamera2/util/QCameraPerf.cpp b/QCamera2/util/QCameraPerf.cpp
index bba23bd..83ba4cc 100644
--- a/QCamera2/util/QCameraPerf.cpp
+++ b/QCamera2/util/QCameraPerf.cpp
@@ -177,6 +177,22 @@
     Mutex::Autolock lock(mLock);
     if (mPerfLockEnable) {
         LOGD("E");
+
+        if (mActivePowerHints.empty() == false) {
+            // Disable the active power hint
+            mCurrentPowerHint = *mActivePowerHints.begin();
+            powerHintInternal(mCurrentPowerHint, false);
+            mActivePowerHints.clear();
+        }
+
+        if ((NULL != perf_lock_rel) && (mPerfLockHandleTimed >= 0)) {
+            (*perf_lock_rel)(mPerfLockHandleTimed);
+        }
+
+        if ((NULL != perf_lock_rel) && (mPerfLockHandle >= 0)) {
+            (*perf_lock_rel)(mPerfLockHandle);
+        }
+
         if (mDlHandle) {
             perf_lock_acq  = NULL;
             perf_lock_rel  = NULL;
@@ -376,7 +392,7 @@
     if (mPerfLockEnable) {
         LOGD("E");
         if (mPerfLockHandleTimed < 0) {
-            LOGE("mPerfLockHandle < 0,check if lock is acquired");
+            LOGW("mPerfLockHandle < 0,check if lock is acquired");
             return ret;
         }
         LOGD("perf_handle_rel %d ", mPerfLockHandleTimed);
@@ -418,7 +434,7 @@
     if (mPerfLockEnable) {
         LOGD("E");
         if (mPerfLockHandle < 0) {
-            LOGE("mPerfLockHandle < 0,check if lock is acquired");
+            LOGW("mPerfLockHandle < 0,check if lock is acquired");
             return ret;
         }
         LOGD("perf_handle_rel %d ", mPerfLockHandle);
@@ -497,12 +513,12 @@
         for (List<power_hint_t>::iterator it = mActivePowerHints.begin();
                 it != mActivePowerHints.end(); ++it) {
             if (*it == hint) {
-                mActivePowerHints.erase(it);
                 if (it != mActivePowerHints.begin()) {
-                    LOGE("Request to remove the previous power hint: %d instead of"
+                    LOGW("Request to remove the previous power hint: %d instead of "
                             "currently active power hint: %d", static_cast<int>(hint),
                                                             static_cast<int>(mCurrentPowerHint));
                 }
+                mActivePowerHints.erase(it);
                 break;
             }
         }