camera: Update to AU171 proprietary drop

Update to AU_LINUX_ANDROID_LA.HB.1.3.9.06.00.01.213.171

camera_hal: (commits since AU169)
d24d522 QCamera2/HAL: Fix CLANG errors
9184bc0 Merge dev.1.0 05-09 ToT to LA.HB.1.3.9
b210f8a Merge "QCamera2:HAL1: Close duplicated FD's for media extension." into camera.lnx.1.0-dev.1.0
f251991 Merge "QCamera2:HAL1: Update native handle for every timestamp callback" into camera.lnx.1.0-dev.1.0
4103d20 Merge "QCamera2:HAL1: Flush pending video frames during stopRecording." into camera.lnx.1.0-dev.1.0
5f817e9 Merge "QCamera2: HAL: Return error when analysis info is invalid" into camera.lnx.1.0-dev.1.0
5be1a13 QCamera2: Fix compilation errors for unused variables.
941faad Merge "QCamera2: Added support for BE stats Debug Data" into camera.lnx.1.0-dev.1.0
3468f87 Merge "QCamera2: HAL3: Link Bayer+Mono sessions" into camera.lnx.1.0-dev.1.0
fa1c092 QCamera2:HAL1: Flush pending video frames during stopRecording.
4279aeb QCamera2: HAL: Return error when analysis info is invalid
33cb3f1 Merge "Revert "QCamera2: HAL3 support for Bayer+Mono vendor tags" latest patchset need to be tested for preAU again CRs-Fixed: 1004987" into camera.lnx.1.0-dev.1.0
ba337f9 Revert "QCamera2: HAL3 support for Bayer+Mono vendor tags" latest patchset need to be tested for preAU again CRs-Fixed: 1004987
ef54431 Merge "QCamera2: Adding support for mono formats in mm-camera" into camera.lnx.1.0-dev.1.0
bd40a72 Merge "QCamera2: HAL3 support for Bayer+Mono vendor tags" into camera.lnx.1.0-dev.1.0
cb05396 Merge "QCamera2: Provide camera output colorspace to video." into camera.lnx.1.0-dev.1.0
8020c6e Merge "QCamera2: Check metadata callback msg type before ASD processing." into camera.lnx.1.0-dev.1.0
89f75d5 Merge "QCamera2: HAL: Avoid the redundant Steam Info IOCTL" into camera.lnx.1.0-dev.1.0
eeb015e QCamera2:HAL1: Close duplicated FD's for media extension.
3fa680f QCamera2:HAL1: Update native handle for every timestamp callback
40b708f QCamera2: HAL3 support for Bayer+Mono vendor tags
4a9e7e4 QCamera2/HAL3: Remove unused err2 label
2bc4060 HAL: Fixed compilation error for AOSP upgrade
fd70238 QCamera2: Added support for BE stats Debug Data
eb21933 QCamera2: Adding support for mono formats in mm-camera
602c71e QCamera2: HAL: Avoid the redundant Steam Info IOCTL
5612332 QCamera2: Check metadata callback msg type before ASD processing.
0c9e55f QCamera2: HAL3: Link Bayer+Mono sessions
d14952e QCamera2: Provide camera output colorspace to video.

 Conflicts:
	QCamera2/HAL3/QCamera3Channel.cpp
	QCamera2/HAL3/QCamera3HWI.cpp

Change-Id: Ic546004e2ccf965353802b2cf3731487d421b760
diff --git a/QCamera2/Android.mk b/QCamera2/Android.mk
index 1681ea7..470c56b 100644
--- a/QCamera2/Android.mk
+++ b/QCamera2/Android.mk
@@ -11,6 +11,7 @@
         util/QCameraFlash.cpp \
         util/QCameraPerf.cpp \
         util/QCameraQueue.cpp \
+        util/QCameraCommon.cpp \
         QCamera2Hal.cpp \
         QCamera2Factory.cpp
 
@@ -50,6 +51,11 @@
 LOCAL_CFLAGS += -DVANILLA_HAL
 endif
 
+#use media extension
+ifeq ($(TARGET_USES_MEDIA_EXTENSIONS), true)
+LOCAL_CFLAGS += -DUSE_MEDIA_EXTENSIONS
+endif
+
 #HAL 1.0 Flags
 LOCAL_CFLAGS += -DDEFAULT_DENOISE_MODE_ON -DHAL3 -DQCAMERA_REDEFINE_LOG
 
@@ -108,5 +114,4 @@
 include $(BUILD_SHARED_LIBRARY)
 
 include $(call first-makefiles-under,$(LOCAL_PATH))
-
 endif
diff --git a/QCamera2/HAL/QCamera2HWI.cpp b/QCamera2/HAL/QCamera2HWI.cpp
index a0bbf05..3df922c 100644
--- a/QCamera2/HAL/QCamera2HWI.cpp
+++ b/QCamera2/HAL/QCamera2HWI.cpp
@@ -757,6 +757,7 @@
             struct camera_device *device, const void *opaque)
 {
     ATRACE_CALL();
+    int32_t ret = NO_ERROR;
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
     if (!hw) {
@@ -768,9 +769,22 @@
         return;
     }
     LOGD("E camera id %d", hw->getCameraId());
+
+    //Close and delete duplicated native handle and FD's.
+    if (hw->mVideoMem != NULL) {
+        ret = hw->mVideoMem->closeNativeHandle(opaque,
+                hw->mStoreMetaDataInFrame > 0);
+        if (ret != NO_ERROR) {
+            LOGE("Invalid video metadata");
+            return;
+        }
+    } else {
+        LOGW("Possible FD leak. Release recording called after stop");
+    }
+
     hw->lockAPI();
     qcamera_api_result_t apiResult;
-    int32_t ret = hw->processAPI(QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME, (void *)opaque);
+    ret = hw->processAPI(QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME, (void *)opaque);
     if (ret == NO_ERROR) {
         hw->waitAPIResult(QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME, &apiResult);
     }
@@ -1661,6 +1675,7 @@
       mJpegClientHandle(0),
       mJpegHandleOwner(false),
       mMetadataMem(NULL),
+      mVideoMem(NULL),
       mCACDoneReceived(false),
       m_bNeedRestart(false)
 {
@@ -2334,8 +2349,14 @@
     }
     memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
                                         sizeof(cam_capability_t));
-    gCamCapability[cameraId]->analysis_padding_info.offset_info.offset_x = 0;
-    gCamCapability[cameraId]->analysis_padding_info.offset_info.offset_y = 0;
+
+    int index;
+    for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
+        cam_analysis_info_t *p_analysis_info =
+                &gCamCapability[cameraId]->analysis_info[index];
+        p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
+        p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
+    }
 
     rc = NO_ERROR;
 
@@ -2836,6 +2857,7 @@
             }
             videoMemory->setVideoInfo(usage, fmt);
             mem = videoMemory;
+            mVideoMem = videoMemory;
         }
         break;
     case CAM_STREAM_TYPE_CALLBACK:
@@ -2918,7 +2940,7 @@
     uint8_t bufNum = 0;
     size_t bufSize = 0;
     QCameraHeapMemory *miscBuf = NULL;
-    uint32_t feature_mask =
+    cam_feature_mask_t feature_mask =
             streamInfo->reprocess_config.pp_feature_config.feature_mask;
 
     switch (streamInfo->stream_type) {
@@ -3166,6 +3188,7 @@
         }
         video_mem->setVideoInfo(usage, fmt);
         mem = static_cast<QCameraMemory *>(video_mem);
+        mVideoMem = video_mem;
     }
     break;
 
@@ -3320,10 +3343,16 @@
               Preview callback  in UBWC case*/
         if (!(msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME)) &&
                 (msg_type & CAMERA_MSG_PREVIEW_FRAME)) {
-            // Start callback channel only when preview channel is active
-            if ((m_channels[QCAMERA_CH_TYPE_CALLBACK] != NULL) &&
-                    (m_channels[QCAMERA_CH_TYPE_PREVIEW] != NULL) &&
-                    (m_channels[QCAMERA_CH_TYPE_PREVIEW]->isActive())){
+            // Start callback channel only when preview/zsl channel is active
+            QCameraChannel* previewCh = NULL;
+            if (isZSLMode() && (getRecordingHintValue() != true)) {
+                previewCh = m_channels[QCAMERA_CH_TYPE_ZSL];
+            } else {
+                previewCh = m_channels[QCAMERA_CH_TYPE_PREVIEW];
+            }
+            QCameraChannel* callbackCh = m_channels[QCAMERA_CH_TYPE_CALLBACK];
+            if ((callbackCh != NULL) &&
+                    (previewCh != NULL) && previewCh->isActive()) {
                 rc = startChannel(QCAMERA_CH_TYPE_CALLBACK);
                 if (rc != NO_ERROR) {
                     LOGE("START Callback Channel failed");
@@ -3611,6 +3640,7 @@
     int32_t rc = NO_ERROR;
 
     LOGI("E");
+    mVideoMem = NULL;
     //link meta stream with video channel if low power mode.
     if (isLowPowerMode()) {
         // Find and try to link a metadata stream from preview channel
@@ -3709,9 +3739,10 @@
     }
     int rc = stopChannel(QCAMERA_CH_TYPE_VIDEO);
 
+    m_cbNotifier.flushVideoNotifications();
     // Disable power hint for video encoding
     m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
-
+    mVideoMem = NULL;
     LOGI("X rc = %d", rc);
     return rc;
 }
@@ -3732,8 +3763,9 @@
 {
     int32_t rc = UNKNOWN_ERROR;
     QCameraVideoChannel *pChannel =
-        (QCameraVideoChannel *)m_channels[QCAMERA_CH_TYPE_VIDEO];
+            (QCameraVideoChannel *)m_channels[QCAMERA_CH_TYPE_VIDEO];
     LOGD("opaque data = %p",opaque);
+
     if(pChannel != NULL) {
         rc = pChannel->releaseFrame(opaque, mStoreMetaDataInFrame > 0);
     }
@@ -3757,8 +3789,6 @@
     cam_focus_mode_type focusMode = mParameters.getFocusMode();
     LOGH("E");
 
-    m_currentFocusState = CAM_AF_STATE_INACTIVE;
-
     switch (focusMode) {
     case CAM_FOCUS_MODE_AUTO:
     case CAM_FOCUS_MODE_MACRO:
@@ -4983,7 +5013,11 @@
         LOGE("take_picture_thread: NULL camera device");
         return (void *)BAD_VALUE;
     }
-    hw->takeLiveSnapshot_internal();
+    if (hw->bLiveSnapshot) {
+        hw->takeLiveSnapshot_internal();
+    } else {
+        hw->cancelLiveSnapshot_internal();
+    }
     return (void* )NULL;
 }
 
@@ -5037,6 +5071,11 @@
 int QCamera2HardwareInterface::takeLiveSnapshot()
 {
     int rc = NO_ERROR;
+    if (mLiveSnapshotThread != 0) {
+        pthread_join(mLiveSnapshotThread,NULL);
+        mLiveSnapshotThread = 0;
+    }
+    bLiveSnapshot = true;
     rc= pthread_create(&mLiveSnapshotThread, NULL, Live_Snapshot_thread, (void *) this);
     if (!rc) {
         pthread_setname_np(mLiveSnapshotThread, "CAM_liveSnap");
@@ -5450,6 +5489,31 @@
 int QCamera2HardwareInterface::cancelLiveSnapshot()
 {
     int rc = NO_ERROR;
+    if (mLiveSnapshotThread != 0) {
+        pthread_join(mLiveSnapshotThread,NULL);
+        mLiveSnapshotThread = 0;
+    }
+    bLiveSnapshot = false;
+    rc= pthread_create(&mLiveSnapshotThread, NULL, Live_Snapshot_thread, (void *) this);
+    if (!rc) {
+        pthread_setname_np(mLiveSnapshotThread, "CAM_cancel_liveSnap");
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : cancelLiveSnapshot_internal
+ *
+ * DESCRIPTION: cancel live snapshot during recording
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::cancelLiveSnapshot_internal() {
+    int rc = NO_ERROR;
 
     unconfigureAdvancedCapture();
     LOGH("Enable display frames again");
@@ -5459,11 +5523,6 @@
         m_perfLock.lock_rel();
     }
 
-    if (mLiveSnapshotThread != 0) {
-        pthread_join(mLiveSnapshotThread,NULL);
-        mLiveSnapshotThread = 0;
-    }
-
     //stop post processor
     m_postprocessor.stop();
 
@@ -6422,46 +6481,45 @@
 int32_t QCamera2HardwareInterface::processASDUpdate(
         __unused cam_asd_decision_t asd_decision)
 {
-    size_t data_len = sizeof(cam_auto_scene_t);
-    size_t buffer_len = 1 *sizeof(int)       //meta type
-                      + 1 *sizeof(int)       //data len
-                      + data_len;            //data
-    camera_memory_t *asdBuffer = mGetMemory(-1,
-                                             buffer_len,
-                                             1,
-                                             mCallbackCookie);
-    if ( NULL == asdBuffer ) {
-        LOGE("Not enough memory for histogram data");
-        return NO_MEMORY;
-    }
-
-    int *pASDData = (int *)asdBuffer->data;
-    if (pASDData == NULL) {
-        LOGE("memory data ptr is NULL");
-        return UNKNOWN_ERROR;
-    }
-
 #ifndef VANILLA_HAL
-    pASDData[0] = CAMERA_META_DATA_ASD;
-    pASDData[1] = (int)data_len;
-    pASDData[2] = asd_decision.detected_scene;
+    if ( msgTypeEnabled(CAMERA_MSG_META_DATA) ) {
+        size_t data_len = sizeof(cam_auto_scene_t);
+        size_t buffer_len = 1 *sizeof(int)       //meta type
+                + 1 *sizeof(int)       //data len
+                + data_len;            //data
+        camera_memory_t *asdBuffer = mGetMemory(-1,
+                buffer_len, 1, mCallbackCookie);
+        if ( NULL == asdBuffer ) {
+            LOGE("Not enough memory for histogram data");
+            return NO_MEMORY;
+        }
 
-    qcamera_callback_argm_t cbArg;
-    memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
-    cbArg.cb_type = QCAMERA_DATA_CALLBACK;
-    cbArg.msg_type = CAMERA_MSG_META_DATA;
-    cbArg.data = asdBuffer;
-    cbArg.user_data = asdBuffer;
-    cbArg.cookie = this;
-    cbArg.release_cb = releaseCameraMemory;
-    int32_t rc = m_cbNotifier.notifyCallback(cbArg);
-    if (rc != NO_ERROR) {
-        LOGE("fail sending notification");
-        asdBuffer->release(asdBuffer);
+        int *pASDData = (int *)asdBuffer->data;
+        if (pASDData == NULL) {
+            LOGE("memory data ptr is NULL");
+            return UNKNOWN_ERROR;
+        }
+
+        pASDData[0] = CAMERA_META_DATA_ASD;
+        pASDData[1] = (int)data_len;
+        pASDData[2] = asd_decision.detected_scene;
+
+        qcamera_callback_argm_t cbArg;
+        memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+        cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+        cbArg.msg_type = CAMERA_MSG_META_DATA;
+        cbArg.data = asdBuffer;
+        cbArg.user_data = asdBuffer;
+        cbArg.cookie = this;
+        cbArg.release_cb = releaseCameraMemory;
+        int32_t rc = m_cbNotifier.notifyCallback(cbArg);
+        if (rc != NO_ERROR) {
+            LOGE("fail sending notification");
+            asdBuffer->release(asdBuffer);
+        }
     }
 #endif
     return NO_ERROR;
-
 }
 
 /*===========================================================================
@@ -6690,9 +6748,25 @@
     }
 
     cam_padding_info_t padding_info;
+
     if (streamType == CAM_STREAM_TYPE_ANALYSIS) {
-        padding_info =
-                gCamCapability[mCameraId]->analysis_padding_info;
+        cam_analysis_info_t analysisInfo;
+        cam_feature_mask_t featureMask;
+
+        featureMask = 0;
+        mParameters.getStreamPpMask(CAM_STREAM_TYPE_ANALYSIS, featureMask);
+        rc = mParameters.getAnalysisInfo(
+                ((mParameters.getRecordingHintValue() == true) &&
+                 mParameters.fdModeInVideo()),
+                FALSE,
+                featureMask,
+                &analysisInfo);
+        if (rc != NO_ERROR) {
+            LOGE("getAnalysisInfo failed, ret = %d", rc);
+            return rc;
+        }
+
+        padding_info = analysisInfo.analysis_padding_info;
     } else {
         padding_info =
                 gCamCapability[mCameraId]->padding_info;
@@ -7429,9 +7503,9 @@
                 mParameters.getReprocCount(), curIndex);
     }
 
-    LOGH("Supported pproc feature mask = %x",
+    LOGH("Supported pproc feature mask = %llx",
             gCamCapability[mCameraId]->qcom_supported_feature_mask);
-    uint32_t feature_mask = gCamCapability[mCameraId]->qcom_supported_feature_mask;
+    cam_feature_mask_t feature_mask = gCamCapability[mCameraId]->qcom_supported_feature_mask;
     int32_t zoomLevel = mParameters.getParmZoomLevel();
     uint32_t rotation = mParameters.getJpegRotation();
     int32_t effect = mParameters.getEffectValue();
@@ -7615,7 +7689,7 @@
             break;
 
     }
-    LOGH("pproc feature mask set = %x pass count = %d",
+    LOGH("pproc feature mask set = %llx pass count = %d",
              pp_config.feature_mask, curIndex);
     return rc;
 }
@@ -7680,7 +7754,7 @@
     uint8_t minStreamBufNum = getBufNumRequired(CAM_STREAM_TYPE_OFFLINE_PROC);
 
     //WNR and HDR happen inline. No extra buffers needed.
-    uint32_t temp_feature_mask = pp_config.feature_mask;
+    cam_feature_mask_t temp_feature_mask = pp_config.feature_mask;
     temp_feature_mask &= ~CAM_QCOM_FEATURE_HDR;
     if (temp_feature_mask && mParameters.isHDREnabled()) {
         minStreamBufNum = (uint8_t)(1 + mParameters.getNumOfExtraHDRInBufsIfNeeded());
@@ -7723,11 +7797,11 @@
     }
 
     //Mask out features that are already processed in snapshot stream.
-    uint32_t snapshot_feature_mask = 0;
+    cam_feature_mask_t snapshot_feature_mask = 0;
     mParameters.getStreamPpMask(CAM_STREAM_TYPE_SNAPSHOT, snapshot_feature_mask);
 
     pp_config.feature_mask &= ~snapshot_feature_mask;
-    LOGH("Snapshot feature mask: 0x%x, reproc feature mask: 0x%x",
+    LOGH("Snapshot feature mask: 0x%llx, reproc feature mask: 0x%llx",
             snapshot_feature_mask, pp_config.feature_mask);
 
     bool offlineReproc = isRegularCapture();
diff --git a/QCamera2/HAL/QCamera2HWI.h b/QCamera2/HAL/QCamera2HWI.h
index 63e3c5f..0c1a00a 100644
--- a/QCamera2/HAL/QCamera2HWI.h
+++ b/QCamera2/HAL/QCamera2HWI.h
@@ -168,7 +168,9 @@
     static void releaseNotifications(void *data, void *user_data);
     static bool matchSnapshotNotifications(void *data, void *user_data);
     static bool matchPreviewNotifications(void *data, void *user_data);
+    static bool matchTimestampNotifications(void *data, void *user_data);
     virtual int32_t flushPreviewNotifications();
+    virtual int32_t flushVideoNotifications();
 private:
 
     camera_notify_callback         mNotifyCb;
@@ -217,6 +219,7 @@
     static int pre_take_picture(struct camera_device *);
     static int take_picture(struct camera_device *);
     int takeLiveSnapshot_internal();
+    int cancelLiveSnapshot_internal();
     int takeBackendPic_internal(bool *JpegMemOpt, char *raw_format);
     void clearIntPendingEvents();
     void checkIntPicPending(bool JpegMemOpt, char *raw_format);
@@ -299,6 +302,7 @@
     int32_t getJpegHandleInfo(mm_jpeg_ops_t *ops,
             mm_jpeg_mpo_ops_t *mpo_ops, uint32_t *pJpegClientHandle);
     uint32_t getCameraId() { return mCameraId; };
+    bool bLiveSnapshot;
 private:
     int setPreviewWindow(struct preview_stream_ops *window);
     int setCallBacks(
@@ -748,7 +752,6 @@
     mm_jpeg_mpo_ops_t     mJpegMpoHandle;
     uint32_t              mJpegClientHandle;
     bool                  mJpegHandleOwner;
-
    //ts add for makeup
 #ifdef TARGET_TS_MAKEUP
     TSRect mFaceRect;
@@ -757,6 +760,7 @@
     bool TsMakeupProcess(mm_camera_buf_def_t *frame,QCameraStream * stream,TSRect& faceRect);
 #endif
     QCameraMemory *mMetadataMem;
+    QCameraVideoMemory *mVideoMem;
 
     static uint32_t sNextJobId;
 
diff --git a/QCamera2/HAL/QCamera2HWICallbacks.cpp b/QCamera2/HAL/QCamera2HWICallbacks.cpp
index c6312dd..ec05ec4 100644
--- a/QCamera2/HAL/QCamera2HWICallbacks.cpp
+++ b/QCamera2/HAL/QCamera2HWICallbacks.cpp
@@ -37,9 +37,6 @@
 #include STAT_H
 #include <utils/Errors.h>
 
-// OpenMAX dependencies
-#include "QComOMXMetadata.h"
-
 // Camera dependencies
 #include "QCamera2HWI.h"
 #include "QCameraTrace.h"
@@ -1386,7 +1383,7 @@
                                                         void *userdata)
 {
     ATRACE_CALL();
-    QCameraMemory *videoMemObj = NULL;
+    QCameraVideoMemory *videoMemObj = NULL;
     camera_memory_t *video_mem = NULL;
     nsecs_t timeStamp = 0;
     bool triggerTCB = FALSE;
@@ -1423,11 +1420,12 @@
             LOGD("Video frame to encoder TimeStamp : %lld batch = 0",
                     timeStamp);
             pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_VIDEO);
-            videoMemObj = (QCameraMemory *)frame->mem_info;
+            videoMemObj = (QCameraVideoMemory *)frame->mem_info;
             video_mem = NULL;
             if (NULL != videoMemObj) {
                 video_mem = videoMemObj->getMemory(frame->buf_idx,
                         (pme->mStoreMetaDataInFrame > 0)? true : false);
+                videoMemObj->updateNativeHandle(frame->buf_idx);
                 triggerTCB = TRUE;
             }
         } else {
@@ -1449,7 +1447,8 @@
                 }
             }
             video_mem = stream->mCurMetaMemory;
-            if (video_mem == NULL) {
+            nh = videoMemObj->updateNativeHandle(stream->mCurMetaIndex);
+            if (video_mem == NULL || nh == NULL) {
                 LOGE("No Free metadata. Drop this frame");
                 stream->mCurBufIndex = -1;
                 stream->bufDone(frame->buf_idx);
@@ -1457,9 +1456,6 @@
                 return;
             }
 
-            struct encoder_media_buffer_type * packet =
-                    (struct encoder_media_buffer_type *)video_mem->data;
-            nh = const_cast<native_handle_t *>(packet->meta_handle);
             int index = stream->mCurBufIndex;
             int fd_cnt = pme->mParameters.getVideoBatchSize();
             nsecs_t frame_ts = nsecs_t(frame->ts.tv_sec) * 1000000000LL
@@ -1499,19 +1495,13 @@
             }
         }
     } else {
-        videoMemObj = (QCameraMemory *)frame->mem_info;
+        videoMemObj = (QCameraVideoMemory *)frame->mem_info;
         video_mem = NULL;
         native_handle_t *nh = NULL;
         int fd_cnt = frame->user_buf.bufs_used;
         if (NULL != videoMemObj) {
             video_mem = videoMemObj->getMemory(frame->buf_idx, true);
-            if (video_mem != NULL) {
-                struct encoder_media_buffer_type * packet =
-                        (struct encoder_media_buffer_type *)video_mem->data;
-                nh = const_cast<native_handle_t *>(packet->meta_handle);
-            } else {
-                LOGE("video_mem NULL");
-            }
+            nh = videoMemObj->updateNativeHandle(frame->buf_idx);
         } else {
             LOGE("videoMemObj NULL");
         }
@@ -2357,6 +2347,30 @@
         }
     }
 
+    IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t, bestats_exif_debug_params,
+            CAM_INTF_META_EXIF_DEBUG_BESTATS, pMetaData) {
+        if (pme->mExifParams.debug_params) {
+            pme->mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
+            pme->mExifParams.debug_params->bestats_debug_params_valid = TRUE;
+        }
+    }
+
+    IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
+            CAM_INTF_META_EXIF_DEBUG_BHIST, pMetaData) {
+        if (pme->mExifParams.debug_params) {
+            pme->mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
+            pme->mExifParams.debug_params->bhist_debug_params_valid = TRUE;
+        }
+    }
+
+    IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
+            CAM_INTF_META_EXIF_DEBUG_3A_TUNING, pMetaData) {
+        if (pme->mExifParams.debug_params) {
+            pme->mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
+            pme->mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
+        }
+    }
+
     IF_META_AVAILABLE(uint32_t, led_mode, CAM_INTF_META_LED_MODE_OVERRIDE, pMetaData) {
         qcamera_sm_internal_evt_payload_t *payload =
                 (qcamera_sm_internal_evt_payload_t *)
@@ -3097,6 +3111,33 @@
 }
 
 /*===========================================================================
+ * FUNCTION   : matchTimestampNotifications
+ *
+ * DESCRIPTION: matches timestamp data callbacks
+ *
+ * PARAMETERS :
+ *   @data      : data to match
+ *   @user_data : context data
+ *
+ * RETURN     : bool match
+ *              true - match found
+ *              false- match not found
+ *==========================================================================*/
+bool QCameraCbNotifier::matchTimestampNotifications(void *data,
+        void */*user_data*/)
+{
+    qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
+    if (NULL != arg) {
+        if ((QCAMERA_DATA_TIMESTAMP_CALLBACK == arg->cb_type) &&
+                (CAMERA_MSG_VIDEO_FRAME == arg->msg_type)) {
+            return true;
+        }
+    }
+
+    return false;
+}
+
+/*===========================================================================
  * FUNCTION   : cbNotifyRoutine
  *
  * DESCRIPTION: callback thread which interfaces with the upper layers
@@ -3140,7 +3181,7 @@
                 numOfSnapshotExpected = pme->mParent->numOfSnapshotsExpected();
                 longShotEnabled = pme->mParent->isLongshotEnabled();
                 LOGD("Num Snapshots Expected = %d",
-                   numOfSnapshotExpected);
+                       numOfSnapshotExpected);
                 numOfSnapshotRcvd = 0;
             }
             break;
@@ -3160,7 +3201,7 @@
                 cbStatus = NO_ERROR;
                 if (NULL != cb) {
                     LOGD("cb type %d received",
-                          cb->cb_type);
+                              cb->cb_type);
 
                     if (pme->mParent->msgTypeEnabledWithLock(cb->msg_type)) {
                         switch (cb->cb_type) {
@@ -3176,7 +3217,7 @@
                                                   cb->ext2,
                                                   pme->mCallbackCookie);
                                 } else {
-                                    LOGE("notify callback not set!");
+                                    LOGW("notify callback not set!");
                                 }
                                 if (cb->release_cb) {
                                     cb->release_cb(cb->user_data, cb->cookie,
@@ -3193,7 +3234,7 @@
                                                  cb->metadata,
                                                  pme->mCallbackCookie);
                                 } else {
-                                    LOGE("data callback not set!");
+                                    LOGW("data callback not set!");
                                 }
                                 if (cb->release_cb) {
                                     cb->release_cb(cb->user_data, cb->cookie,
@@ -3250,8 +3291,7 @@
                                         // release_cb should not be called
                                         // muxer will release after its done with
                                         // processing the buffer
-                                    }
-                                    else {
+                                    } else if(pme->mDataCb){
                                         pme->mDataCb(cb->msg_type, cb->data, cb->index,
                                                 cb->metadata, pme->mCallbackCookie);
                                         if (cb->release_cb) {
@@ -3265,7 +3305,7 @@
                         default:
                             {
                                 LOGE("invalid cb type %d",
-                                      cb->cb_type);
+                                          cb->cb_type);
                                 cbStatus = BAD_VALUE;
                                 if (cb->release_cb) {
                                     cb->release_cb(cb->user_data, cb->cookie,
@@ -3275,16 +3315,16 @@
                             break;
                         };
                     } else {
-                        LOGE("cb message type %d not enabled!",
-                              cb->msg_type);
+                        LOGW("cb message type %d not enabled!",
+                                  cb->msg_type);
                         cbStatus = INVALID_OPERATION;
                         if (cb->release_cb) {
                             cb->release_cb(cb->user_data, cb->cookie, cbStatus);
                         }
                     }
-                delete cb;
+                    delete cb;
                 } else {
-                    LOGE("invalid cb type passed");
+                    LOGW("invalid cb type passed");
                 }
             }
             break;
@@ -3413,9 +3453,29 @@
         LOGE("notify thread is not active");
         return UNKNOWN_ERROR;
     }
-
     mDataQ.flushNodes(matchPreviewNotifications);
+    return NO_ERROR;
+}
 
+/*===========================================================================
+ * FUNCTION   : flushVideoNotifications
+ *
+ * DESCRIPTION: flush all pending video notifications
+ *              from the notifier queue
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCbNotifier::flushVideoNotifications()
+{
+    if (!mActive) {
+        LOGE("notify thread is not active");
+        return UNKNOWN_ERROR;
+    }
+    mDataQ.flushNodes(matchTimestampNotifications);
     return NO_ERROR;
 }
 
diff --git a/QCamera2/HAL/QCameraChannel.cpp b/QCamera2/HAL/QCameraChannel.cpp
index 0855540..b0c326e 100644
--- a/QCamera2/HAL/QCameraChannel.cpp
+++ b/QCamera2/HAL/QCameraChannel.cpp
@@ -896,7 +896,6 @@
  * PARAMETERS :
  *   @cam_handle : camera handle
  *   @cam_ops    : ptr to camera ops table
- *   @pp_mask    : post-proccess feature mask
  *
  * RETURN     : none
  *==========================================================================*/
@@ -1015,7 +1014,7 @@
                     pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
                     pStream->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
                     pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW)) {
-                uint32_t feature_mask = featureConfig.feature_mask;
+                cam_feature_mask_t feature_mask = featureConfig.feature_mask;
 
                 // skip thumbnail reprocessing if not needed
                 if (!param.needThumbnailReprocess(&feature_mask)) {
diff --git a/QCamera2/HAL/QCameraMem.cpp b/QCamera2/HAL/QCameraMem.cpp
old mode 100644
new mode 100755
index cdb1ae1..e738c01
--- a/QCamera2/HAL/QCameraMem.cpp
+++ b/QCamera2/HAL/QCameraMem.cpp
@@ -37,16 +37,22 @@
 #include "gralloc.h"
 #include "gralloc_priv.h"
 
-// OpenMAX dependencies
-#include "OMX_QCOMExtns.h"
-#include "QComOMXMetadata.h"
-
 // Camera dependencies
 #include "QCamera2HWI.h"
 #include "QCameraMem.h"
 #include "QCameraParameters.h"
 #include "QCameraTrace.h"
 
+// Media dependencies
+#include "OMX_QCOMExtns.h"
+#ifdef USE_MEDIA_EXTENSIONS
+#include <media/hardware/HardwareAPI.h>
+typedef struct VideoNativeHandleMetadata media_metadata_buffer;
+#else
+#include "QComOMXMetadata.h"
+typedef struct encoder_media_buffer_type media_metadata_buffer;
+#endif
+
 extern "C" {
 #include "mm_camera_dbg.h"
 #include "mm_camera_interface.h"
@@ -1279,10 +1285,11 @@
     : QCameraStreamMemory(memory, cached)
 {
     memset(mMetadata, 0, sizeof(mMetadata));
+    memset(mNativeHandle, 0, sizeof(mNativeHandle));
     mMetaBufCount = 0;
     mBufType = bufType;
     //Set Default color conversion format
-    mUsage = private_handle_t::PRIV_FLAGS_ITU_R_709;
+    mUsage = private_handle_t::PRIV_FLAGS_ITU_R_601_FR;
 
     //Set Default frame format
     mFormat = OMX_COLOR_FormatYUV420SemiPlanar;
@@ -1334,9 +1341,7 @@
             return rc;
         }
         for (int i = 0; i < count; i ++) {
-            struct encoder_media_buffer_type * packet =
-                    (struct encoder_media_buffer_type *)mMetadata[i]->data;
-            native_handle_t * nh = const_cast<native_handle_t *>(packet->meta_handle);
+            native_handle_t *nh =  mNativeHandle[i];
             if (!nh) {
                 LOGE("Error in getting video native handle");
                 ATRACE_END();
@@ -1380,7 +1385,7 @@
     if (!(mBufType & QCAMERA_MEM_TYPE_BATCH)) {
         for (int i = mBufferCount; i < count + mBufferCount; i ++) {
             mMetadata[i] = mGetMemory(-1,
-                    sizeof(struct encoder_media_buffer_type), 1, this);
+                    sizeof(media_metadata_buffer), 1, this);
             if (!mMetadata[i]) {
                 LOGE("allocation of video metadata failed.");
                 for (int j = mBufferCount; j <= i-1; j ++) {
@@ -1392,14 +1397,19 @@
                 ATRACE_END();
                 return NO_MEMORY;
             }
-            struct encoder_media_buffer_type * packet =
-                    (struct encoder_media_buffer_type *)mMetadata[i]->data;
-
+            media_metadata_buffer * packet =
+                    (media_metadata_buffer *)mMetadata[i]->data;
             //FDs = 1
             //numInts  = 5 (offset, size, usage, timestamp, format)
-            packet->meta_handle = native_handle_create(1, VIDEO_METADATA_NUM_INTS);
+            mNativeHandle[i] = native_handle_create(1, VIDEO_METADATA_NUM_INTS);
+#ifdef USE_MEDIA_EXTENSIONS
+            packet->eType = kMetadataBufferTypeNativeHandleSource;
+            packet->pHandle = mNativeHandle[i];
+#else
             packet->buffer_type = kMetadataBufferTypeCameraSource;
-            native_handle_t * nh = const_cast<native_handle_t *>(packet->meta_handle);
+            packet->meta_handle = mNativeHandle[i];
+#endif
+            native_handle_t *nh =  mNativeHandle[i];
             if (!nh) {
                 LOGE("Error in getting video native handle");
                 ATRACE_END();
@@ -1437,25 +1447,38 @@
 
     for (int i = 0; i < buf_cnt; i++) {
         mMetadata[i] = mGetMemory(-1,
-                sizeof(struct encoder_media_buffer_type), 1, this);
+                sizeof(media_metadata_buffer), 1, this);
         if (!mMetadata[i]) {
             LOGE("allocation of video metadata failed.");
             for (int j = (i - 1); j >= 0; j--) {
+                if (NULL != mNativeHandle[j]) {
+                   native_handle_delete(mNativeHandle[j]);
+                }
                 mMetadata[j]->release(mMetadata[j]);
             }
             return NO_MEMORY;
         }
-        struct encoder_media_buffer_type * packet =
-                (struct encoder_media_buffer_type *)mMetadata[i]->data;
-        packet->meta_handle = native_handle_create(numFDs, (numInts * numFDs));
-        packet->buffer_type = kMetadataBufferTypeCameraSource;
-        if (packet->meta_handle == NULL) {
+        media_metadata_buffer *packet =
+                (media_metadata_buffer *)mMetadata[i]->data;
+        mNativeHandle[i] = native_handle_create(numFDs, (numInts * numFDs));
+        if (mNativeHandle[i] == NULL) {
             LOGE("Error in getting video native handle");
             for (int j = (i - 1); j >= 0; j--) {
+                mMetadata[i]->release(mMetadata[i]);
+                if (NULL != mNativeHandle[j]) {
+                   native_handle_delete(mNativeHandle[j]);
+                }
                 mMetadata[j]->release(mMetadata[j]);
             }
             return NO_MEMORY;
         }
+#ifdef USE_MEDIA_EXTENSIONS
+        packet->eType = kMetadataBufferTypeNativeHandleSource;
+        packet->pHandle = mNativeHandle[i];
+#else
+        packet->buffer_type = kMetadataBufferTypeCameraSource;
+        packet->meta_handle = mNativeHandle[i];
+#endif
     }
     mMetaBufCount = buf_cnt;
     return rc;
@@ -1473,20 +1496,15 @@
 void QCameraVideoMemory::deallocateMeta()
 {
     for (int i = 0; i < mMetaBufCount; i++) {
-        struct encoder_media_buffer_type * packet =
-                (struct encoder_media_buffer_type *)mMetadata[i]->data;
-        if (NULL != packet) {
-            native_handle_t * nh = const_cast<native_handle_t *>(packet->meta_handle);
-            if (NULL != nh) {
-               if (native_handle_delete(nh)) {
-                   LOGE("Unable to delete native handle");
-               }
-            } else {
-               LOGE("native handle not available");
-            }
+        native_handle_t *nh = mNativeHandle[i];
+        if (NULL != nh) {
+           if (native_handle_delete(nh)) {
+               LOGE("Unable to delete native handle");
+           }
         } else {
-            LOGE("packet not available");
+           LOGE("native handle not available");
         }
+        mNativeHandle[i] = NULL;
         mMetadata[i]->release(mMetadata[i]);
         mMetadata[i] = NULL;
     }
@@ -1537,6 +1555,88 @@
 }
 
 /*===========================================================================
+ * FUNCTION   : updateNativeHandle
+ *
+ * DESCRIPTION: Updating native handle pointer
+ *
+ * PARAMETERS :
+ *   @index   : buffer index
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : camera native handle ptr
+ *              NULL if not supported or failed
+ *==========================================================================*/
+native_handle_t *QCameraVideoMemory::updateNativeHandle(uint32_t index, bool metadata)
+{
+    if (index >= mMetaBufCount || (!metadata && index >= mBufferCount)) {
+        return NULL;
+    }
+
+    native_handle_t *nh = NULL;
+    if (metadata && mMetadata[index] != NULL) {
+        media_metadata_buffer *packet =
+                (media_metadata_buffer *)mMetadata[index]->data;
+        nh = mNativeHandle[index];
+#ifdef USE_MEDIA_EXTENSIONS
+        packet->pHandle = nh;
+#else
+        packet->meta_handle = nh;
+#endif
+    }
+    return nh;
+}
+
+/*===========================================================================
+ * FUNCTION   : closeNativeHandle
+ *
+ * DESCRIPTION: close video native handle
+ *
+ * PARAMETERS :
+ *   @opaque  : ptr to video frame to be returned
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraVideoMemory::closeNativeHandle(__unused const void *data,
+        __unused bool metadata)
+{
+    int32_t rc = NO_ERROR;
+
+#ifdef USE_MEDIA_EXTENSIONS
+    int32_t index = -1;
+    camera_memory_t *video_mem = NULL;
+
+    if (metadata) {
+        index = getMatchBufIndex(data, metadata);
+        if (index < 0) {
+            LOGE("Invalid buffer");
+            return BAD_VALUE;
+        }
+        video_mem = getMemory(index, metadata);
+        media_metadata_buffer * packet = NULL;
+        if (video_mem) {
+             packet = (media_metadata_buffer *)video_mem->data;
+        }
+
+        if (packet != NULL && packet->eType ==
+                kMetadataBufferTypeNativeHandleSource) {
+            native_handle_close(packet->pHandle);
+            native_handle_delete(packet->pHandle);
+            packet->pHandle = NULL;
+        } else {
+            LOGE("Invalid Data. Could not release");
+            return BAD_VALUE;
+        }
+    } else {
+        LOGE("Not of type video meta buffer. Failed");
+        return BAD_VALUE;
+    }
+#endif
+    return rc;
+}
+
+/*===========================================================================
  * FUNCTION   : getMatchBufIndex
  *
  * DESCRIPTION: query buffer index by opaque ptr
diff --git a/QCamera2/HAL/QCameraMem.h b/QCamera2/HAL/QCameraMem.h
index c52cea1..a20e29a 100644
--- a/QCamera2/HAL/QCameraMem.h
+++ b/QCamera2/HAL/QCameraMem.h
@@ -231,10 +231,13 @@
     int getUsage(){return mUsage;};
     int getFormat(){return mFormat;};
     int convCamtoOMXFormat(cam_format_t format);
+    native_handle_t *updateNativeHandle(uint32_t index, bool metadata = true);
+    int closeNativeHandle(const void *data, bool metadata = true);
 private:
     camera_memory_t *mMetadata[MM_CAMERA_MAX_NUM_FRAMES];
     uint8_t mMetaBufCount;
     int mUsage, mFormat;
+    native_handle_t *mNativeHandle[MM_CAMERA_MAX_NUM_FRAMES];
 };
 
 
diff --git a/QCamera2/HAL/QCameraParameters.cpp b/QCamera2/HAL/QCameraParameters.cpp
index 328c10a..4ec143e 100644
--- a/QCamera2/HAL/QCameraParameters.cpp
+++ b/QCamera2/HAL/QCameraParameters.cpp
@@ -395,7 +395,6 @@
 const char QCameraParameters::KEY_QC_MAX_SCE_FACTOR[] = "max-sce-factor";
 const char QCameraParameters::KEY_QC_SCE_FACTOR_STEP[] = "sce-factor-step";
 
-const char QCameraParameters::KEY_QC_SUPPORTED_CAMERA_FEATURES[] = "qc-camera-features";
 const char QCameraParameters::KEY_QC_MAX_NUM_REQUESTED_FACES[] = "qc-max-num-requested-faces";
 
 //Values for DENOISE
@@ -893,6 +892,7 @@
 QCameraParameters::QCameraParameters()
     : CameraParameters(),
       m_reprocScaleParam(),
+      mCommon(),
       m_pCapability(NULL),
       m_pCamOpsTbl(NULL),
       m_pParamHeap(NULL),
@@ -1329,12 +1329,12 @@
 
     if (len > 0) {
         snprintf(buffer, sizeof(buffer), "%dx%d",
-                 values[0].dim.width, values[0].dim.height);
+                 values[0].dim[0].width, values[0].dim[0].height);
         str.append(buffer);
     }
     for (size_t i = 1; i < len; i++) {
         snprintf(buffer, sizeof(buffer), ",%dx%d",
-                 values[i].dim.width, values[i].dim.height);
+                 values[i].dim[0].width, values[i].dim[0].height);
         str.append(buffer);
     }
     return str;
@@ -4923,13 +4923,13 @@
     if (str != NULL) {
         if (prev_str == NULL || strcmp(str, prev_str) != 0) {
             set(KEY_QC_LONG_SHOT, str);
-            if (!strcmp(str, "off")) {
-                if (m_bLongshotEnabled == true) {
-                    // We restart here, to reset the FPS and no
-                    // of buffers as per the requirement of single snapshot usecase.
-                    m_bNeedRestart = true;
-                }
-                m_bLongshotEnabled = false;
+            if (prev_str && !strcmp(str, "off") && !strcmp(prev_str, "on")) {
+                // We restart here, to reset the FPS and no
+                // of buffers as per the requirement of single snapshot usecase.
+                // Here restart happens when continuous shot is changed to off from on.
+                // In case of continuous shot on, restart is taken care when actual
+                // longshot command is triggered through sendCommand.
+                m_bNeedRestart = true;
             }
         }
     }
@@ -5200,8 +5200,6 @@
         m_pCapability->auto_exposure_lock_supported? VALUE_TRUE : VALUE_FALSE);
     set(KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED,
         m_pCapability->auto_wb_lock_supported? VALUE_TRUE : VALUE_FALSE);
-    set(KEY_QC_SUPPORTED_CAMERA_FEATURES,
-            (int)m_pCapability->qcom_supported_feature_mask);
     set(KEY_MAX_NUM_DETECTED_FACES_HW, m_pCapability->max_num_roi);
     set(KEY_MAX_NUM_DETECTED_FACES_SW, m_pCapability->max_num_roi);
     set(KEY_QC_MAX_NUM_REQUESTED_FACES, m_pCapability->max_num_roi);
@@ -5434,7 +5432,7 @@
     m_pCapability->min_focus_pos[CAM_MANUAL_FOCUS_MODE_DIOPTER] = 0;
     if (m_pCapability->min_focus_distance > 0) {
         m_pCapability->max_focus_pos[CAM_MANUAL_FOCUS_MODE_DIOPTER] =
-                100.0f / m_pCapability->min_focus_distance;
+                m_pCapability->min_focus_distance;
     } else {
         m_pCapability->max_focus_pos[CAM_MANUAL_FOCUS_MODE_DIOPTER] = 0;
     }
@@ -5680,7 +5678,7 @@
             m_pCapability->hfr_tbl,
             m_pCapability->hfr_tbl_cnt);
     set(KEY_QC_SUPPORTED_HFR_SIZES, hfrSizeValues.string());
-    LOGD("HFR values %s HFR Sizes = %d", hfrValues.string(), hfrSizeValues.string());
+    LOGD("HFR values = %s HFR Sizes = %s", hfrValues.string(), hfrSizeValues.string());
     setHighFrameRate(CAM_HFR_MODE_OFF);
 
     // Set Focus algorithms
@@ -6011,7 +6009,7 @@
     }
 
     setOfflineRAW();
-    memset(mStreamPpMask, 0, sizeof(uint32_t)*CAM_STREAM_TYPE_MAX);
+    memset(mStreamPpMask, 0, sizeof(cam_feature_mask_t)*CAM_STREAM_TYPE_MAX);
     //Set video buffers as uncached by default
     set(KEY_QC_CACHE_VIDEO_BUFFERS, VALUE_DISABLE);
 
@@ -6187,6 +6185,8 @@
 
     initDefaultParameters();
 
+    mCommon.init(capabilities);
+
     m_bInited = true;
 
     goto TRANS_INIT_DONE;
@@ -6416,7 +6416,10 @@
                  min_fps, max_fps, vid_min_fps, vid_max_fps);
 
     if(fixedFpsValue != 0) {
-      min_fps = max_fps = vid_min_fps = vid_max_fps = (int)fixedFpsValue*1000;
+        min_fps = max_fps = fixedFpsValue*1000;
+        if (!isHfrMode()) {
+             vid_min_fps = vid_max_fps = fixedFpsValue*1000;
+        }
     }
     snprintf(str, sizeof(str), "%d,%d", min_fps, max_fps);
     LOGH("Setting preview fps range %s", str);
@@ -7216,7 +7219,7 @@
         return rc;
     }
 
-    if (enable == true) m_bLongshotEnabled = enable;
+    m_bLongshotEnabled = enable;
 
     return rc;
 }
@@ -9950,13 +9953,28 @@
         format = mAppPreviewFormat;
         break;
     case CAM_STREAM_TYPE_ANALYSIS:
-        if (m_pCapability->hw_analysis_supported &&
-                m_pCapability->analysis_recommended_format == CAM_FORMAT_Y_ONLY) {
-            format = m_pCapability->analysis_recommended_format;
+        cam_analysis_info_t analysisInfo;
+        cam_feature_mask_t featureMask;
+
+        featureMask = 0;
+        getStreamPpMask(CAM_STREAM_TYPE_ANALYSIS, featureMask);
+        ret = getAnalysisInfo(
+                ((getRecordingHintValue() == true) && fdModeInVideo()),
+                FALSE,
+                featureMask,
+                &analysisInfo);
+        if (ret != NO_ERROR) {
+            LOGE("getAnalysisInfo failed, ret = %d", ret);
+            return ret;
+        }
+
+        if (analysisInfo.hw_analysis_supported &&
+                analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY) {
+            format = analysisInfo.analysis_format;
         } else {
-            if (m_pCapability->hw_analysis_supported) {
-                LOGW("Invalid analysis_recommended_format %d\n",
-                        m_pCapability->analysis_recommended_format);
+            if (analysisInfo.hw_analysis_supported) {
+                LOGW("Invalid analysis_format %d\n",
+                        analysisInfo.analysis_format);
             }
             format = mAppPreviewFormat;
         }
@@ -10153,16 +10171,24 @@
         /* Analysis stream need aspect ratio as preview stream */
         getPreviewSize(&prv_dim.width, &prv_dim.height);
 
-        max_dim.width = m_pCapability->analysis_max_res.width;
-        max_dim.height = m_pCapability->analysis_max_res.height;
+        cam_analysis_info_t analysisInfo;
+        cam_feature_mask_t featureMask;
 
-        if ((getRecordingHintValue() == true)
-                && fdModeInVideo()
-                && m_pCapability->hw_analysis_supported) {
-            max_dim.width /= 2;
-            max_dim.height /= 2;
+        featureMask = 0;
+        getStreamPpMask(CAM_STREAM_TYPE_ANALYSIS, featureMask);
+        ret = getAnalysisInfo(
+                ((getRecordingHintValue() == true) && fdModeInVideo()),
+                FALSE,
+                featureMask,
+                &analysisInfo);
+        if (ret != NO_ERROR) {
+            LOGE("getAnalysisInfo failed, ret = %d", ret);
+            return ret;
         }
 
+        max_dim.width = analysisInfo.analysis_max_res.width;
+        max_dim.height = analysisInfo.analysis_max_res.height;
+
         if (prv_dim.width > max_dim.width || prv_dim.height > max_dim.height) {
             double max_ratio, requested_ratio;
 
@@ -12687,12 +12713,9 @@
     memset(&stream_config_info, 0, sizeof(stream_config_info));
     stream_config_info.num_streams = 0;
 
-    if (m_bStreamsConfigured) {
+    if (resetConfig) {
         LOGH("Reset stream config!!");
         rc = sendStreamConfigInfo(stream_config_info);
-        m_bStreamsConfigured = false;
-    }
-    if (resetConfig) {
         LOGH("Done Resetting stream config!!");
         return rc;
     }
@@ -12702,12 +12725,6 @@
     stream_config_info.min_stride     = m_pCapability->min_stride;
     stream_config_info.min_scanline   = m_pCapability->min_scanline;
     stream_config_info.batch_size = getBufBatchCount();
-    LOGH("buf_alignment=%d stride X scan=%dx%d batch size = %d\n",
-            m_pCapability->buf_alignment,
-            m_pCapability->min_stride,
-            m_pCapability->min_scanline,
-            stream_config_info.batch_size);
-
 
     property_get("persist.camera.raw_yuv", value, "0");
     raw_yuv = atoi(value) > 0 ? true : false;
@@ -12920,7 +12937,7 @@
         stream_config_info.num_streams++;
     }
     for (uint32_t k = 0; k < stream_config_info.num_streams; k++) {
-        LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x Format = %d",
+        LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%llx Format = %d",
                 stream_config_info.type[k],
                 stream_config_info.stream_sizes[k].width,
                 stream_config_info.stream_sizes[k].height,
@@ -12929,8 +12946,6 @@
     }
 
     rc = sendStreamConfigInfo(stream_config_info);
-    m_bStreamsConfigured = true;
-
     return rc;
 }
 
@@ -13011,7 +13026,7 @@
  * RETURN     : true: needed
  *              false: no need
  *==========================================================================*/
-bool QCameraParameters::needThumbnailReprocess(uint32_t *pFeatureMask)
+bool QCameraParameters::needThumbnailReprocess(cam_feature_mask_t *pFeatureMask)
 {
     if (isUbiFocusEnabled() || isChromaFlashEnabled() ||
             isOptiZoomEnabled() || isUbiRefocus() ||
@@ -13320,7 +13335,7 @@
  *==========================================================================*/
 int32_t QCameraParameters::updatePpFeatureMask(cam_stream_type_t stream_type) {
 
-    uint32_t feature_mask = 0;
+    cam_feature_mask_t feature_mask = 0;
 
     if (stream_type >= CAM_STREAM_TYPE_MAX) {
         LOGE("Error!! stream type: %d not valid", stream_type);
@@ -13425,9 +13440,21 @@
         }
     }
 
+    // Preview assisted autofocus needs to be supported for
+    // callback, preview, or video streams
+    switch (stream_type) {
+    case CAM_STREAM_TYPE_CALLBACK:
+    case CAM_STREAM_TYPE_PREVIEW:
+    case CAM_STREAM_TYPE_VIDEO:
+        feature_mask |= CAM_QCOM_FEATURE_PAAF;
+        break;
+    default:
+        break;
+    }
+
     // Store stream feature mask
     setStreamPpMask(stream_type, feature_mask);
-    LOGH("stream type: %d, pp_mask: 0x%x", stream_type, feature_mask);
+    LOGH("stream type: %d, pp_mask: 0x%llx", stream_type, feature_mask);
 
     return NO_ERROR;
 }
@@ -13445,7 +13472,7 @@
  *              int32_t type of status
  *==========================================================================*/
 int32_t QCameraParameters::setStreamPpMask(cam_stream_type_t stream_type,
-        uint32_t pp_mask) {
+        cam_feature_mask_t pp_mask) {
 
     if(stream_type >= CAM_STREAM_TYPE_MAX) {
         return BAD_TYPE;
@@ -13468,7 +13495,7 @@
  *              int32_t type of status
  *==========================================================================*/
 int32_t QCameraParameters::getStreamPpMask(cam_stream_type_t stream_type,
-        uint32_t &pp_mask) {
+        cam_feature_mask_t &pp_mask) {
 
     if(stream_type >= CAM_STREAM_TYPE_MAX) {
         return BAD_TYPE;
@@ -14073,7 +14100,10 @@
     char value[PROPERTY_VALUE_MAX];
     uint8_t fdvideo = 0;
 
-    if (!m_pCapability->hw_analysis_supported) {
+    cam_analysis_info_t *pAnalysisInfo =
+            &m_pCapability->analysis_info[CAM_ANALYSIS_INFO_FD_VIDEO];
+
+    if (!pAnalysisInfo->hw_analysis_supported) {
         return 0;
     }
 
@@ -14284,4 +14314,29 @@
     return NO_ERROR;
 }
 
+/*===========================================================================
+ * FUNCTION   : getAnalysisInfo
+ *
+ * DESCRIPTION: Get the Analysis information based on
+ *     current mode and feature mask
+ *
+ * PARAMETERS :
+ *   @fdVideoEnabled : Whether fdVideo enabled currently
+ *   @videoEnabled   : Whether hal3 or hal1
+ *   @featureMask    : Feature mask
+ *   @analysis_info  : Analysis info to be filled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getAnalysisInfo(
+        bool fdVideoEnabled,
+        bool hal3,
+        uint32_t featureMask,
+        cam_analysis_info_t *pAnalysisInfo)
+{
+    return mCommon.getAnalysisInfo(fdVideoEnabled, hal3, featureMask, pAnalysisInfo);
+}
+
 }; // namespace qcamera
diff --git a/QCamera2/HAL/QCameraParameters.h b/QCamera2/HAL/QCameraParameters.h
index b1f379c..1418796 100644
--- a/QCamera2/HAL/QCameraParameters.h
+++ b/QCamera2/HAL/QCameraParameters.h
@@ -32,6 +32,7 @@
 #include "QCameraMem.h"
 #include "QCameraParametersIntf.h"
 #include "QCameraThermalAdapter.h"
+#include "QCameraCommon.h"
 
 extern "C" {
 #include "mm_jpeg_interface.h"
@@ -259,10 +260,6 @@
     static const char KEY_QC_FACE_RECOGNITION[];
     static const char KEY_QC_SUPPORTED_FACE_RECOGNITION[];
 
-    // supported camera features to be queried by Snapdragon SDK
-    //Read only
-    static const char KEY_QC_SUPPORTED_CAMERA_FEATURES[];
-
     //Indicates number of faces requested by the application.
     //This value will be rejected if the requested faces
     //greater than supported by hardware.
@@ -744,14 +741,14 @@
 
     const char *getASDStateString(cam_auto_scene_t scene);
     bool isHDRThumbnailProcessNeeded() { return m_bHDRThumbnailProcessNeeded; };
-    void setMinPpMask(uint32_t min_pp_mask) { m_nMinRequiredPpMask = min_pp_mask; };
+    void setMinPpMask(cam_feature_mask_t min_pp_mask) { m_nMinRequiredPpMask = min_pp_mask; };
     bool setStreamConfigure(bool isCapture, bool previewAsPostview, bool resetConfig);
     int32_t addOnlineRotation(uint32_t rotation, uint32_t streamId, int32_t device_rotation);
     uint8_t getNumOfExtraBuffersForImageProc();
     uint8_t getNumOfExtraBuffersForVideo();
     uint8_t getNumOfExtraBuffersForPreview();
     uint32_t getExifBufIndex(uint32_t captureIndex);
-    bool needThumbnailReprocess(uint32_t *pFeatureMask);
+    bool needThumbnailReprocess(cam_feature_mask_t *pFeatureMask);
     inline bool isUbiFocusEnabled() {return m_bAFBracketingOn && !m_bReFocusOn;};
     inline bool isChromaFlashEnabled() {return m_bChromaFlashOn;};
     inline bool isHighQualityNoiseReductionMode() {return m_bHighQualityNoiseReductionMode;};
@@ -794,7 +791,7 @@
     int32_t setIntEvent(cam_int_evt_params_t params);
     bool getofflineRAW() {return mOfflineRAW;}
     int32_t updatePpFeatureMask(cam_stream_type_t stream_type);
-    int32_t getStreamPpMask(cam_stream_type_t stream_type, uint32_t &pp_mask);
+    int32_t getStreamPpMask(cam_stream_type_t stream_type, cam_feature_mask_t &pp_mask);
     int32_t getSharpness() {return m_nSharpness;};
     int32_t getEffect() {return mParmEffect;};
     int32_t updateFlashMode(cam_flash_mode_t flash_mode);
@@ -863,6 +860,12 @@
 
     int32_t checkFeatureConcurrency();
     int32_t setInstantAEC(uint8_t enable, bool initCommit);
+
+    int32_t getAnalysisInfo(
+        bool fdVideoEnabled,
+        bool hal3,
+        uint32_t featureMask,
+        cam_analysis_info_t *pAnalysisInfo);
 private:
     int32_t setPreviewSize(const QCameraParameters& );
     int32_t setVideoSize(const QCameraParameters& );
@@ -1013,7 +1016,7 @@
     void setBufBatchCount(int8_t buf_cnt);
     void setVideoBatchSize();
     void setDcrf();
-    int32_t setStreamPpMask(cam_stream_type_t stream_type, uint32_t pp_mask);
+    int32_t setStreamPpMask(cam_stream_type_t stream_type, cam_feature_mask_t pp_mask);
     void setOfflineRAW(bool value = 0);
     int32_t configureFlash(cam_capture_frame_config_t &frame_config);
     int32_t configureLowLight(cam_capture_frame_config_t &frame_config);
@@ -1097,6 +1100,7 @@
     static const QCameraMap<int> NOISE_REDUCTION_MODES_MAP[];
 
     QCameraReprocScaleParam m_reprocScaleParam;
+    QCameraCommon           mCommon;
 
     cam_capability_t *m_pCapability;
     mm_camera_vtbl_t *m_pCamOpsTbl;
@@ -1182,13 +1186,12 @@
     bool m_bHDRModeSensor;
     bool mOfflineRAW;
     bool m_bTruePortraitOn;
-    uint32_t m_nMinRequiredPpMask;
-    uint32_t mStreamPpMask[CAM_STREAM_TYPE_MAX];
+    cam_feature_mask_t m_nMinRequiredPpMask;
+    cam_feature_mask_t mStreamPpMask[CAM_STREAM_TYPE_MAX];
     int32_t m_nSharpness;
     int8_t mTotalPPCount;
     int8_t mCurPPCount;
     int32_t mZoomLevel;
-    bool m_bStreamsConfigured;
     int32_t mParmZoomLevel;
     bool m_bIsLowMemoryDevice;
     int32_t mCds_mode;
diff --git a/QCamera2/HAL/QCameraParametersIntf.cpp b/QCamera2/HAL/QCameraParametersIntf.cpp
index a620680..5ae3c76 100644
--- a/QCamera2/HAL/QCameraParametersIntf.cpp
+++ b/QCamera2/HAL/QCameraParametersIntf.cpp
@@ -750,7 +750,7 @@
     return mImpl->isHDRThumbnailProcessNeeded();
 }
 
-void QCameraParametersIntf::setMinPpMask(uint32_t min_pp_mask)
+void QCameraParametersIntf::setMinPpMask(cam_feature_mask_t min_pp_mask)
 {
     Mutex::Autolock lock(mLock);
     CHECK_PARAM_INTF(mImpl);
@@ -802,7 +802,7 @@
     return mImpl->getExifBufIndex(captureIndex);
 }
 
-bool QCameraParametersIntf::needThumbnailReprocess(uint32_t *pFeatureMask)
+bool QCameraParametersIntf::needThumbnailReprocess(cam_feature_mask_t *pFeatureMask)
 {
     Mutex::Autolock lock(mLock);
     CHECK_PARAM_INTF(mImpl);
@@ -1026,7 +1026,7 @@
 }
 
 int32_t QCameraParametersIntf::getStreamPpMask(cam_stream_type_t stream_type,
-        uint32_t &pp_mask)
+        cam_feature_mask_t &pp_mask)
 {
     Mutex::Autolock lock(mLock);
     CHECK_PARAM_INTF(mImpl);
@@ -1386,4 +1386,15 @@
     return mImpl->setInstantAEC(enable, initCommit);
 }
 
+int32_t QCameraParametersIntf::getAnalysisInfo(
+        bool fdVideoEnabled,
+        bool hal3,
+        uint32_t featureMask,
+        cam_analysis_info_t *pAnalysisInfo)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getAnalysisInfo(fdVideoEnabled, hal3, featureMask, pAnalysisInfo);
+}
+
 }; // namespace qcamera
diff --git a/QCamera2/HAL/QCameraParametersIntf.h b/QCamera2/HAL/QCameraParametersIntf.h
index c89f4e0..0fb60b5 100644
--- a/QCamera2/HAL/QCameraParametersIntf.h
+++ b/QCamera2/HAL/QCameraParametersIntf.h
@@ -183,7 +183,7 @@
 
     const char *getASDStateString(cam_auto_scene_t scene);
     bool isHDRThumbnailProcessNeeded();
-    void setMinPpMask(uint32_t min_pp_mask);
+    void setMinPpMask(cam_feature_mask_t min_pp_mask);
     bool setStreamConfigure(bool isCapture,
             bool previewAsPostview, bool resetConfig);
     int32_t addOnlineRotation(uint32_t rotation, uint32_t streamId,
@@ -192,7 +192,7 @@
     uint8_t getNumOfExtraBuffersForVideo();
     uint8_t getNumOfExtraBuffersForPreview();
     uint32_t getExifBufIndex(uint32_t captureIndex);
-    bool needThumbnailReprocess(uint32_t *pFeatureMask);
+    bool needThumbnailReprocess(cam_feature_mask_t *pFeatureMask);
     bool isUbiFocusEnabled();
     bool isChromaFlashEnabled();
     bool isHighQualityNoiseReductionMode();
@@ -228,7 +228,7 @@
     int32_t setIntEvent(cam_int_evt_params_t params);
     bool getofflineRAW();
     int32_t updatePpFeatureMask(cam_stream_type_t stream_type);
-    int32_t getStreamPpMask(cam_stream_type_t stream_type, uint32_t &pp_mask);
+    int32_t getStreamPpMask(cam_stream_type_t stream_type, cam_feature_mask_t &pp_mask);
     int32_t getSharpness();
     int32_t getEffect();
     int32_t updateFlashMode(cam_flash_mode_t flash_mode);
@@ -291,6 +291,12 @@
 
     int32_t checkFeatureConcurrency();
     int32_t setInstantAEC(uint8_t enable, bool initCommit);
+
+    int32_t getAnalysisInfo(
+        bool fdVideoEnabled,
+        bool hal3,
+        uint32_t featureMask,
+        cam_analysis_info_t *pAnalysisInfo);
 private:
     QCameraParameters *mImpl;
     mutable Mutex mLock;
diff --git a/QCamera2/HAL/QCameraPostProc.cpp b/QCamera2/HAL/QCameraPostProc.cpp
index 22413a1..8eddccf 100644
--- a/QCamera2/HAL/QCameraPostProc.cpp
+++ b/QCamera2/HAL/QCameraPostProc.cpp
@@ -97,6 +97,7 @@
     memset(mPPChannels, 0, sizeof(mPPChannels));
     m_DataMem = NULL;
     mOfflineDataBufs = NULL;
+    pthread_mutex_init(&m_reprocess_lock,NULL);
 }
 
 /*===========================================================================
@@ -124,6 +125,7 @@
         }
     }
     mPPChannelCount = 0;
+    pthread_mutex_destroy(&m_reprocess_lock);
 }
 
 /*===========================================================================
@@ -1435,9 +1437,11 @@
         }
 
         // free pp job buf
+        pthread_mutex_lock(&m_reprocess_lock);
         if (job) {
             free(job);
         }
+        pthread_mutex_unlock(&m_reprocess_lock);
     }
 
     LOGD("");
@@ -2455,6 +2459,12 @@
                     jpg_job.encode_job.cam_exif_params.debug_params->asd_debug_params_valid;
             jpg_job.encode_job.p_metadata->is_statsdebug_stats_params_valid =
                     jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_bestats_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->bestats_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_bhist_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->bhist_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_3a_tuning_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->q3a_tuning_debug_params_valid;
 
             if (jpg_job.encode_job.cam_exif_params.debug_params->ae_debug_params_valid) {
                 jpg_job.encode_job.p_metadata->statsdebug_ae_data =
@@ -2476,6 +2486,18 @@
                 jpg_job.encode_job.p_metadata->statsdebug_stats_buffer_data =
                         jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params;
             }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->bestats_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_bestats_buffer_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->bestats_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->bhist_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_bhist_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->bhist_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->q3a_tuning_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_3a_tuning_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->q3a_tuning_debug_params;
+            }
         }
 
     }
@@ -3108,9 +3130,11 @@
         if ((m_parent->isRegularCapture()) || (ppreq_job->offline_buffer)) {
             m_bufCountPPQ++;
             if (m_ongoingPPQ.enqueue((void *)ppreq_job)) {
+                pthread_mutex_lock(&m_reprocess_lock);
                 ret = mPPChannels[mCurChannelIdx]->doReprocessOffline(ppInputFrame,
                         meta_buf, m_parent->mParameters);
                 if (ret != NO_ERROR) {
+                    pthread_mutex_unlock(&m_reprocess_lock);
                     goto end;
                 }
 
@@ -3119,6 +3143,7 @@
                     mPPChannels[mCurChannelIdx]->doReprocessOffline(
                             ppreq_job->offline_reproc_buf, meta_buf);
                 }
+                pthread_mutex_unlock(&m_reprocess_lock);
             } else {
                 LOGW("m_ongoingPPQ is not active!!!");
                 ret = UNKNOWN_ERROR;
diff --git a/QCamera2/HAL/QCameraPostProc.h b/QCamera2/HAL/QCameraPostProc.h
index 58f8865..5c56214 100644
--- a/QCamera2/HAL/QCameraPostProc.h
+++ b/QCamera2/HAL/QCameraPostProc.h
@@ -239,6 +239,7 @@
     int32_t m_bufCountPPQ;
     Vector<mm_camera_buf_def_t *> m_InputMetadata; // store input metadata buffers for AOST cases
     size_t m_PPindex;                   // counter for each incoming AOST buffer
+    pthread_mutex_t m_reprocess_lock;   // lock to ensure reprocess job is not freed early.
 
 public:
     cam_dimension_t m_dst_dim;
diff --git a/QCamera2/HAL/QCameraStateMachine.cpp b/QCamera2/HAL/QCameraStateMachine.cpp
index a177010..f9e85b8 100644
--- a/QCamera2/HAL/QCameraStateMachine.cpp
+++ b/QCamera2/HAL/QCameraStateMachine.cpp
@@ -1834,6 +1834,18 @@
             switch (cam_evt->server_event_type) {
             case CAM_EVENT_TYPE_DAEMON_DIED:
                 {
+                    // Send internal events to stop indefinite wait on prepare
+                    // snapshot done event.
+                    result.status = rc;
+                    result.request_api = QCAMERA_SM_EVT_PREPARE_SNAPSHOT;
+                    result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+                    m_parent->signalAPIResult(&result);
+
+                    result.status = rc;
+                    result.request_api = QCAMERA_SM_EVT_TAKE_PICTURE;
+                    result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+                    m_parent->signalAPIResult(&result);
+
                     m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
                                             CAMERA_ERROR_SERVER_DIED,
                                             0);
@@ -2217,8 +2229,8 @@
             switch (cam_evt->server_event_type) {
             case CAM_EVENT_TYPE_DAEMON_DIED:
                 {
-                    // Send internal events to release statemachine
-                    // thread to process CAMERA_ERROR_SERVER_DIED error
+                    // Send internal events to stop indefinite wait on prepare
+                    // snapshot done event.
                     result.status = rc;
                     result.request_api = QCAMERA_SM_EVT_PREPARE_SNAPSHOT;
                     result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
@@ -3579,6 +3591,18 @@
             switch (cam_evt->server_event_type) {
             case CAM_EVENT_TYPE_DAEMON_DIED:
                 {
+                    // Send internal events to stop indefinite wait on prepare
+                    // snapshot done event.
+                    result.status = rc;
+                    result.request_api = QCAMERA_SM_EVT_PREPARE_SNAPSHOT;
+                    result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+                    m_parent->signalAPIResult(&result);
+
+                    result.status = rc;
+                    result.request_api = QCAMERA_SM_EVT_TAKE_PICTURE;
+                    result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+                    m_parent->signalAPIResult(&result);
+
                     m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
                                             CAMERA_ERROR_SERVER_DIED,
                                             0);
diff --git a/QCamera2/HAL3/QCamera3Channel.cpp b/QCamera2/HAL3/QCamera3Channel.cpp
index f4c8ab0..930577c 100644
--- a/QCamera2/HAL3/QCamera3Channel.cpp
+++ b/QCamera2/HAL3/QCamera3Channel.cpp
@@ -68,7 +68,7 @@
                                mm_camera_ops_t *cam_ops,
                                channel_cb_routine cb_routine,
                                cam_padding_info_t *paddingInfo,
-                               uint32_t postprocess_mask,
+                               cam_feature_mask_t postprocess_mask,
                                void *userData, uint32_t numBuffers)
 {
     m_camHandle = cam_handle;
@@ -156,7 +156,7 @@
                                   cam_dimension_t streamDim,
                                   cam_rotation_t streamRotation,
                                   uint8_t minStreamBufNum,
-                                  uint32_t postprocessMask,
+                                  cam_feature_mask_t postprocessMask,
                                   cam_is_type_t isType,
                                   uint32_t batchSize)
 {
@@ -708,7 +708,7 @@
         void *userData,
         camera3_stream_t *stream,
         cam_stream_type_t stream_type,
-        uint32_t postprocess_mask,
+        cam_feature_mask_t postprocess_mask,
         QCamera3Channel *metadataChannel,
         uint32_t numBuffers) :
             QCamera3Channel(cam_handle, channel_handle, cam_ops, cb_routine,
@@ -733,7 +733,7 @@
     property_get("persist.debug.sf.showfps", prop, "0");
     mDebugFPS = (uint8_t) atoi(prop);
 
-    int32_t rc = m_postprocessor.init(&mMemory, mPostProcMask);
+    int32_t rc = m_postprocessor.init(&mMemory);
     if (rc != 0) {
         LOGE("Init Postprocessor failed");
     }
@@ -1643,7 +1643,7 @@
         void *userData,
         camera3_stream_t *stream,
         cam_stream_type_t stream_type,
-        uint32_t postprocess_mask,
+        cam_feature_mask_t postprocess_mask,
         QCamera3Channel *metadataChannel,
         uint32_t numBuffers) :
             QCamera3ProcessingChannel(cam_handle, channel_handle, cam_ops,
@@ -1911,7 +1911,7 @@
                     mm_camera_ops_t *cam_ops,
                     channel_cb_routine cb_routine,
                     cam_padding_info_t *paddingInfo,
-                    uint32_t postprocess_mask,
+                    cam_feature_mask_t postprocess_mask,
                     void *userData, uint32_t numBuffers) :
                         QCamera3Channel(cam_handle, channel_handle, cam_ops,
                                 cb_routine, paddingInfo, postprocess_mask,
@@ -2019,7 +2019,7 @@
                     cam_padding_info_t *paddingInfo,
                     void *userData,
                     camera3_stream_t *stream,
-                    uint32_t postprocess_mask,
+                    cam_feature_mask_t postprocess_mask,
                     QCamera3Channel *metadataChannel,
                     bool raw_16, uint32_t numBuffers) :
                         QCamera3RegularChannel(cam_handle, channel_handle, cam_ops,
@@ -2240,7 +2240,7 @@
                     cam_dimension_t rawDumpSize,
                     cam_padding_info_t *paddingInfo,
                     void *userData,
-                    uint32_t postprocess_mask, uint32_t numBuffers) :
+                    cam_feature_mask_t postprocess_mask, uint32_t numBuffers) :
                         QCamera3Channel(cam_handle, channel_handle, cam_ops, NULL,
                                 paddingInfo, postprocess_mask,
                                 userData, numBuffers),
@@ -2478,7 +2478,7 @@
         void *userData,
         camera3_stream_t *stream,
         cam_stream_type_t stream_type,
-        uint32_t postprocess_mask,
+        cam_feature_mask_t postprocess_mask,
         QCamera3Channel *metadataChannel) :
             QCamera3ProcessingChannel(cam_handle, channel_handle, cam_ops,
                     cb_routine, paddingInfo, userData, stream, stream_type,
@@ -3123,7 +3123,7 @@
                     cam_padding_info_t *paddingInfo,
                     void *userData,
                     camera3_stream_t *stream,
-                    uint32_t postprocess_mask,
+                    cam_feature_mask_t postprocess_mask,
                     __unused bool is4KVideo,
                     bool isInputStreamConfigured,
                     QCamera3Channel *metadataChannel,
@@ -3597,7 +3597,7 @@
                                                  mm_camera_ops_t *cam_ops,
                                                  channel_cb_routine cb_routine,
                                                  cam_padding_info_t *paddingInfo,
-                                                 uint32_t postprocess_mask,
+                                                 cam_feature_mask_t postprocess_mask,
                                                  void *userData, void *ch_hdl) :
     /* In case of framework reprocessing, pproc and jpeg operations could be
      * parallelized by allowing 1 extra buffer for reprocessing output:
@@ -4657,7 +4657,7 @@
                     uint32_t channel_handle,
                     mm_camera_ops_t *cam_ops,
                     cam_padding_info_t *paddingInfo,
-                    uint32_t postprocess_mask,
+                    cam_feature_mask_t postprocess_mask,
                     cam_stream_type_t streamType,
                     cam_dimension_t *dim,
                     cam_format_t streamFormat,
diff --git a/QCamera2/HAL3/QCamera3Channel.h b/QCamera2/HAL3/QCamera3Channel.h
index 939101c..1c0e9c0 100644
--- a/QCamera2/HAL3/QCamera3Channel.h
+++ b/QCamera2/HAL3/QCamera3Channel.h
@@ -77,7 +77,7 @@
                    mm_camera_ops_t *cam_ops,
                    channel_cb_routine cb_routine,
                    cam_padding_info_t *paddingInfo,
-                   uint32_t postprocess_mask,
+                   cam_feature_mask_t postprocess_mask,
                    void *userData, uint32_t numBuffers);
     virtual ~QCamera3Channel();
 
@@ -131,7 +131,7 @@
                       cam_dimension_t streamDim,
                       cam_rotation_t streamRotation,
                       uint8_t minStreamBufnum,
-                      uint32_t postprocessMask,
+                      cam_feature_mask_t postprocessMask,
                       cam_is_type_t isType,
                       uint32_t batchSize = 0);
 
@@ -150,7 +150,7 @@
     QCamera3HeapMemory *mStreamInfoBuf;
     channel_cb_routine mChannelCB;
     //cam_padding_info_t *mPaddingInfo;
-    uint32_t mPostProcMask;
+    cam_feature_mask_t mPostProcMask;
     uint32_t mYUVDump;
     cam_is_type_t mIsType;
     uint32_t mNumBuffers;
@@ -179,7 +179,7 @@
            void *userData,
            camera3_stream_t *stream,
            cam_stream_type_t stream_type,
-           uint32_t postprocess_mask,
+           cam_feature_mask_t postprocess_mask,
            QCamera3Channel *metadataChannel,
            uint32_t numBuffers = MAX_INFLIGHT_REQUESTS);
 
@@ -273,7 +273,7 @@
                     void *userData,
                     camera3_stream_t *stream,
                     cam_stream_type_t stream_type,
-                    uint32_t postprocess_mask,
+                    cam_feature_mask_t postprocess_mask,
                     QCamera3Channel *metadataChannel,
                     uint32_t numBuffers = MAX_INFLIGHT_REQUESTS);
 
@@ -303,7 +303,7 @@
                     mm_camera_ops_t *cam_ops,
                     channel_cb_routine cb_routine,
                     cam_padding_info_t *paddingInfo,
-                    uint32_t postprocess_mask,
+                    cam_feature_mask_t postprocess_mask,
                     void *userData,
                     uint32_t numBuffers = MIN_STREAMING_BUFFER_NUM);
     virtual ~QCamera3MetadataChannel();
@@ -335,7 +335,7 @@
                     cam_padding_info_t *paddingInfo,
                     void *userData,
                     camera3_stream_t *stream,
-                    uint32_t postprocess_mask,
+                    cam_feature_mask_t postprocess_mask,
                     QCamera3Channel *metadataChannel,
                     bool raw_16 = false,
                     uint32_t numBuffers = MAX_INFLIGHT_REQUESTS);
@@ -371,7 +371,7 @@
                     cam_dimension_t rawDumpSize,
                     cam_padding_info_t *paddingInfo,
                     void *userData,
-                    uint32_t postprocess_mask, uint32_t numBuffers = 3U);
+                    cam_feature_mask_t postprocess_mask, uint32_t numBuffers = 3U);
     virtual ~QCamera3RawDumpChannel();
     virtual int32_t initialize(cam_is_type_t isType);
     virtual void streamCbRoutine(mm_camera_super_buf_t *super_frame,
@@ -405,7 +405,7 @@
             void *userData,
             camera3_stream_t *stream,
             cam_stream_type_t stream_type,
-            uint32_t postprocess_mask,
+            cam_feature_mask_t postprocess_mask,
             QCamera3Channel *metadataChannel);
     ~QCamera3YUVChannel();
     virtual int32_t initialize(cam_is_type_t isType);
@@ -464,7 +464,7 @@
             cam_padding_info_t *paddingInfo,
             void *userData,
             camera3_stream_t *stream,
-            uint32_t postprocess_mask,
+            cam_feature_mask_t postprocess_mask,
             bool is4KVideo,
             bool isInputStreamConfigured,
             QCamera3Channel *metadataChannel,
@@ -522,7 +522,7 @@
                             mm_camera_ops_t *cam_ops,
                             channel_cb_routine cb_routine,
                             cam_padding_info_t *paddingInfo,
-                            uint32_t postprocess_mask,
+                            cam_feature_mask_t postprocess_mask,
                             void *userData, void *ch_hdl);
     QCamera3ReprocessChannel();
     virtual ~QCamera3ReprocessChannel();
@@ -593,7 +593,7 @@
                     uint32_t channel_handle,
                     mm_camera_ops_t *cam_ops,
                     cam_padding_info_t *paddingInfo,
-                    uint32_t postprocess_mask,
+                    cam_feature_mask_t postprocess_mask,
                     cam_stream_type_t streamType,
                     cam_dimension_t *dim,
                     cam_format_t streamFormat,
diff --git a/QCamera2/HAL3/QCamera3HWI.cpp b/QCamera2/HAL3/QCamera3HWI.cpp
index 02ebe18..d1949e8 100644
--- a/QCamera2/HAL3/QCamera3HWI.cpp
+++ b/QCamera2/HAL3/QCamera3HWI.cpp
@@ -313,6 +313,9 @@
     .reserved                           = {0},
 };
 
+// initialise to some default value
+uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
+
 /*===========================================================================
  * FUNCTION   : QCamera3HardwareInterface
  *
@@ -336,6 +339,8 @@
       mAnalysisChannel(NULL),
       mRawDumpChannel(NULL),
       mDummyBatchChannel(NULL),
+      m_perfLock(),
+      mCommon(),
       mChannelHandle(0),
       mFirstConfiguration(true),
       mFlush(false),
@@ -366,10 +371,16 @@
       mLdafCalibExist(false),
       mPowerHintEnabled(false),
       mLastCustIntentFrmNum(-1),
-      mState(CLOSED)
+      mState(CLOSED),
+      mIsDeviceLinked(false),
+      mIsMainCamera(true),
+      mLinkedCameraId(0),
+      m_pRelCamSyncHeap(NULL),
+      m_pRelCamSyncBuf(NULL)
 {
     getLogLevel();
     m_perfLock.lock_init();
+    mCommon.init(gCamCapability[cameraId]);
     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
     mCameraDevice.common.close = close_camera_device;
@@ -738,6 +749,43 @@
         pthread_mutex_unlock(&gCamLock);
     }
 
+    //fill the session id needed while linking dual cam
+    pthread_mutex_lock(&gCamLock);
+    rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
+        &sessionId[mCameraId]);
+    pthread_mutex_unlock(&gCamLock);
+
+    if (rc < 0) {
+        LOGE("Error, failed to get sessiion id");
+        return UNKNOWN_ERROR;
+    } else {
+        //Allocate related cam sync buffer
+        //this is needed for the payload that goes along with bundling cmd for related
+        //camera use cases
+        m_pRelCamSyncHeap = new QCamera3HeapMemory(1);
+        rc = m_pRelCamSyncHeap->allocate(sizeof(cam_sync_related_sensors_event_info_t));
+        if(rc != OK) {
+            rc = NO_MEMORY;
+            LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
+            return NO_MEMORY;
+        }
+
+        //Map memory for related cam sync buffer
+        rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
+                CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF,
+                m_pRelCamSyncHeap->getFd(0),
+                sizeof(cam_sync_related_sensors_event_info_t));
+        if(rc < 0) {
+            LOGE("Dualcam: failed to map Related cam sync buffer");
+            rc = FAILED_TRANSACTION;
+            return NO_MEMORY;
+        }
+        m_pRelCamSyncBuf =
+                (cam_sync_related_sensors_event_info_t*) DATA_PTR(m_pRelCamSyncHeap,0);
+    }
+
+    LOGH("mCameraId=%d",mCameraId);
+
     return NO_ERROR;
 }
 
@@ -763,6 +811,11 @@
     rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
     mCameraHandle = NULL;
 
+    //reset session id to some invalid id
+    pthread_mutex_lock(&gCamLock);
+    sessionId[mCameraId] = 0xDEADBEEF;
+    pthread_mutex_unlock(&gCamLock);
+
     //Notify display HAL that there is no active camera session
     //but avoid calling the same during bootup. Refer to openCamera
     //for more details.
@@ -775,6 +828,13 @@
         pthread_mutex_unlock(&gCamLock);
     }
 
+    if (NULL != m_pRelCamSyncHeap) {
+        m_pRelCamSyncHeap->deallocate();
+        delete m_pRelCamSyncHeap;
+        m_pRelCamSyncHeap = NULL;
+        m_pRelCamSyncBuf = NULL;
+    }
+
     if (mExifParams.debug_params) {
         free(mExifParams.debug_params);
         mExifParams.debug_params = NULL;
@@ -814,13 +874,6 @@
         case OPENED:
             /* valid state */
             break;
-
-        case ERROR:
-            pthread_mutex_unlock(&mMutex);
-            handleCameraDeviceError();
-            rc = -ENODEV;
-            goto err2;
-
         default:
             LOGE("Invalid state %d", mState);
             rc = -ENODEV;
@@ -851,7 +904,6 @@
 
 err1:
     pthread_mutex_unlock(&mMutex);
-err2:
     return rc;
 }
 
@@ -1132,7 +1184,7 @@
         uint32_t stream_idx)
 {
     char feature_mask_value[PROPERTY_VALUE_MAX];
-    uint32_t feature_mask;
+    cam_feature_mask_t feature_mask;
     int args_converted;
     int property_len;
 
@@ -1141,9 +1193,9 @@
             feature_mask_value, "0");
     if ((property_len > 2) && (feature_mask_value[0] == '0') &&
             (feature_mask_value[1] == 'x')) {
-        args_converted = sscanf(feature_mask_value, "0x%x", &feature_mask);
+        args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
     } else {
-        args_converted = sscanf(feature_mask_value, "%d", &feature_mask);
+        args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
     }
     if (1 != args_converted) {
         feature_mask = 0;
@@ -1168,7 +1220,7 @@
     default:
         break;
     }
-    LOGD("PP feature mask %x",
+    LOGD("PP feature mask %llx",
             mStreamConfigInfo.postprocess_mask[stream_idx]);
 }
 
@@ -1320,12 +1372,6 @@
         case STARTED:
             /* valid state */
             break;
-
-        case ERROR:
-            pthread_mutex_unlock(&mMutex);
-            handleCameraDeviceError();
-            return -ENODEV;
-
         default:
             LOGE("Invalid state %d", mState);
             pthread_mutex_unlock(&mMutex);
@@ -1351,7 +1397,7 @@
     cam_dimension_t maxViewfinderSize = {0, 0};
     bool bJpegExceeds4K = false;
     bool bUseCommonFeatureMask = false;
-    uint32_t commonFeatureMask = 0;
+    cam_feature_mask_t commonFeatureMask = 0;
     bool bSmallJpegSize = false;
     uint32_t width_ratio;
     uint32_t height_ratio;
@@ -1449,7 +1495,6 @@
                 stallStreamCnt++;
                 if (isOnEncoder(maxViewfinderSize, newStream->width,
                         newStream->height)) {
-                    commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
                     numStreamsOnEncoder++;
                 }
                 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
@@ -1473,10 +1518,8 @@
                 processedStreamCnt++;
                 if (isOnEncoder(maxViewfinderSize, newStream->width,
                         newStream->height)) {
-                    if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
-                            IS_USAGE_ZSL(newStream->usage)) {
-                        commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
-                    } else {
+                    if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
+                            !IS_USAGE_ZSL(newStream->usage)) {
                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
                     }
                     numStreamsOnEncoder++;
@@ -1492,8 +1535,6 @@
                     if (newStream->width <= VIDEO_4K_WIDTH &&
                             newStream->height <= VIDEO_4K_HEIGHT) {
                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
-                    } else {
-                        commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
                     }
                     numStreamsOnEncoder++;
                     numYuv888OnEncoder++;
@@ -1570,7 +1611,7 @@
         commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
     }
 
-    LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %x",
+    LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
             maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
             commonFeatureMask);
     LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
@@ -1693,9 +1734,12 @@
     }
 
     //Create metadata channel and initialize it
+    cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
+    setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
+            gCamCapability[mCameraId]->color_arrangement);
     mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
                     mChannelHandle, mCameraHandle->ops, captureResultCb,
-                    &padding_info, CAM_QCOM_FEATURE_NONE, this);
+                    &padding_info, metadataFeatureMask, this);
     if (mMetadataChannel == NULL) {
         LOGE("failed to allocate metadata channel");
         rc = -ENOMEM;
@@ -1713,18 +1757,33 @@
 
     // Create analysis stream all the time, even when h/w support is not available
     {
+        cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
+        setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
+                gCamCapability[mCameraId]->color_arrangement);
+        cam_analysis_info_t analysisInfo;
+        rc = mCommon.getAnalysisInfo(
+                FALSE,
+                TRUE,
+                analysisFeatureMask,
+                &analysisInfo);
+        if (rc != NO_ERROR) {
+            LOGE("getAnalysisInfo failed, ret = %d", rc);
+            pthread_mutex_unlock(&mMutex);
+            return rc;
+        }
+
         mAnalysisChannel = new QCamera3SupportChannel(
                 mCameraHandle->camera_handle,
                 mChannelHandle,
                 mCameraHandle->ops,
-                &gCamCapability[mCameraId]->analysis_padding_info,
-                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
+                &analysisInfo.analysis_padding_info,
+                analysisFeatureMask,
                 CAM_STREAM_TYPE_ANALYSIS,
-                &gCamCapability[mCameraId]->analysis_recommended_res,
-                (gCamCapability[mCameraId]->analysis_recommended_format
+                &analysisInfo.analysis_max_res,
+                (analysisInfo.analysis_format
                 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
                 : CAM_FORMAT_YUV_420_NV21),
-                gCamCapability[mCameraId]->hw_analysis_supported,
+                analysisInfo.hw_analysis_supported,
                 this,
                 0); // force buffer count to 0
         if (!mAnalysisChannel) {
@@ -1847,6 +1906,7 @@
             case HAL_PIXEL_FORMAT_RAW16:
             case HAL_PIXEL_FORMAT_RAW10:
                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
+                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
                 isRawStreamRequested = true;
                 break;
             default:
@@ -1856,6 +1916,10 @@
             }
         }
 
+        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
+                (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
+                gCamCapability[mCameraId]->color_arrangement);
+
         if (newStream->priv == NULL) {
             //New stream, construct channel
             switch (newStream->stream_type) {
@@ -1975,7 +2039,8 @@
                             mCameraHandle->camera_handle, mChannelHandle,
                             mCameraHandle->ops, captureResultCb,
                             &padding_info,
-                            this, newStream, CAM_QCOM_FEATURE_NONE,
+                            this, newStream,
+                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
                             mMetadataChannel,
                             (newStream->format == HAL_PIXEL_FORMAT_RAW16));
                     if (mRawChannel == NULL) {
@@ -2054,12 +2119,16 @@
     if (mEnableRawDump && isRawStreamRequested == false){
         cam_dimension_t rawDumpSize;
         rawDumpSize = getMaxRawSize(mCameraId);
+        cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
+        setPAAFSupport(rawDumpFeatureMask,
+                CAM_STREAM_TYPE_RAW,
+                gCamCapability[mCameraId]->color_arrangement);
         mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
                                   mChannelHandle,
                                   mCameraHandle->ops,
                                   rawDumpSize,
                                   &padding_info,
-                                  this, CAM_QCOM_FEATURE_NONE);
+                                  this, rawDumpFeatureMask);
         if (!mRawDumpChannel) {
             LOGE("Raw Dump channel cannot be created");
             pthread_mutex_unlock(&mMutex);
@@ -2069,26 +2138,51 @@
 
 
     if (mAnalysisChannel) {
-        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
-                gCamCapability[mCameraId]->analysis_recommended_res;
+        cam_analysis_info_t analysisInfo;
+        memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
                 CAM_STREAM_TYPE_ANALYSIS;
         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
+        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
+                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
+                gCamCapability[mCameraId]->color_arrangement);
+        rc = mCommon.getAnalysisInfo(FALSE, TRUE,
+                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
+                &analysisInfo);
+        if (rc != NO_ERROR) {
+            LOGE("getAnalysisInfo failed, ret = %d", rc);
+            pthread_mutex_unlock(&mMutex);
+            return rc;
+        }
+        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
+                analysisInfo.analysis_max_res;
         mStreamConfigInfo.num_streams++;
     }
 
     if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
+        cam_analysis_info_t supportInfo;
+        memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
+        cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
+        setPAAFSupport(callbackFeatureMask,
+                CAM_STREAM_TYPE_CALLBACK,
+                gCamCapability[mCameraId]->color_arrangement);
+        rc = mCommon.getAnalysisInfo(FALSE, TRUE, callbackFeatureMask, &supportInfo);
+        if (rc != NO_ERROR) {
+            LOGE("getAnalysisInfo failed, ret = %d", rc);
+            pthread_mutex_unlock(&mMutex);
+            return rc;
+        }
         mSupportChannel = new QCamera3SupportChannel(
                 mCameraHandle->camera_handle,
                 mChannelHandle,
                 mCameraHandle->ops,
                 &gCamCapability[mCameraId]->padding_info,
-                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
+                callbackFeatureMask,
                 CAM_STREAM_TYPE_CALLBACK,
                 &QCamera3SupportChannel::kDim,
                 CAM_FORMAT_YUV_420_NV21,
-                gCamCapability[mCameraId]->hw_analysis_supported,
+                supportInfo.hw_analysis_supported,
                 this);
         if (!mSupportChannel) {
             LOGE("dummy channel cannot be created");
@@ -2104,6 +2198,9 @@
                 CAM_STREAM_TYPE_CALLBACK;
         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
+        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
+                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
+                gCamCapability[mCameraId]->color_arrangement);
         mStreamConfigInfo.num_streams++;
     }
 
@@ -2116,6 +2213,9 @@
                 CAM_STREAM_TYPE_RAW;
         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
                 CAM_QCOM_FEATURE_NONE;
+        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
+                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
+                gCamCapability[mCameraId]->color_arrangement);
         mStreamConfigInfo.num_streams++;
     }
     /* In HFR mode, if video stream is not added, create a dummy channel so that
@@ -2123,6 +2223,10 @@
      * never 'start'ed (no stream-on), it is only 'initialized'  */
     if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
             !m_bIsVideo) {
+        cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
+        setPAAFSupport(dummyFeatureMask,
+                CAM_STREAM_TYPE_VIDEO,
+                gCamCapability[mCameraId]->color_arrangement);
         mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
                 mChannelHandle,
                 mCameraHandle->ops, captureResultCb,
@@ -2130,7 +2234,7 @@
                 this,
                 &mDummyBatchStream,
                 CAM_STREAM_TYPE_VIDEO,
-                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
+                dummyFeatureMask,
                 mMetadataChannel);
         if (NULL == mDummyBatchChannel) {
             LOGE("creation of mDummyBatchChannel failed."
@@ -2146,6 +2250,9 @@
                 CAM_STREAM_TYPE_VIDEO;
         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
+        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
+                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
+                gCamCapability[mCameraId]->color_arrangement);
         mStreamConfigInfo.num_streams++;
     }
 
@@ -3515,6 +3622,63 @@
             goto error_exit;
         }
 
+        //update settings from app here
+        if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
+            mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
+            LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
+        }
+        if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
+            mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
+            LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
+        }
+        if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
+            mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
+            LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
+
+            if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
+                (mLinkedCameraId != mCameraId) ) {
+                LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
+                    mLinkedCameraId, mCameraId);
+                goto error_exit;
+            }
+        }
+
+        // add bundle related cameras
+        LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
+        if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
+            if (mIsDeviceLinked)
+                m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
+            else
+                m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
+
+            pthread_mutex_lock(&gCamLock);
+
+            if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
+                LOGE("Dualcam: Invalid Session Id ");
+                pthread_mutex_unlock(&gCamLock);
+                goto error_exit;
+            }
+
+            if (mIsMainCamera == 1) {
+                m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
+                m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
+                // related session id should be session id of linked session
+                m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
+            } else {
+                m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
+                m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
+                m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
+            }
+            pthread_mutex_unlock(&gCamLock);
+
+            rc = mCameraHandle->ops->sync_related_sensors(
+                    mCameraHandle->camera_handle, m_pRelCamSyncBuf);
+            if (rc < 0) {
+                LOGE("Dualcam: link failed");
+                goto error_exit;
+            }
+        }
+
         //Then start them.
         LOGH("Start META Channel");
         rc = mMetadataChannel->start();
@@ -3596,7 +3760,6 @@
             }
         }
 
-
         goto no_error;
 error_exit:
         m_perfLock.lock_rel();
@@ -4106,6 +4269,30 @@
     pthread_mutex_unlock(&mMutex);
 
     rc = stopAllChannels();
+    // unlink of dualcam
+    if (mIsDeviceLinked) {
+        m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
+        pthread_mutex_lock(&gCamLock);
+
+        if (mIsMainCamera == 1) {
+            m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
+            m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
+            // related session id should be session id of linked session
+            m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
+        } else {
+            m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
+            m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
+            m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
+        }
+        pthread_mutex_unlock(&gCamLock);
+
+        rc = mCameraHandle->ops->sync_related_sensors(
+                mCameraHandle->camera_handle, m_pRelCamSyncBuf);
+        if (rc < 0) {
+            LOGE("Dualcam: Unlink failed, but still proceed to close");
+        }
+    }
+
     if (rc < 0) {
         LOGE("stopAllChannels failed");
         return rc;
@@ -5378,6 +5565,27 @@
             mExifParams.debug_params->stats_debug_params_valid = TRUE;
         }
     }
+    IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
+            CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
+        if (mExifParams.debug_params) {
+            mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
+            mExifParams.debug_params->bestats_debug_params_valid = TRUE;
+        }
+    }
+    IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
+            CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
+        if (mExifParams.debug_params) {
+            mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
+            mExifParams.debug_params->bhist_debug_params_valid = TRUE;
+        }
+    }
+    IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
+            CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
+        if (mExifParams.debug_params) {
+            mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
+            mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
+        }
+    }
 }
 
 /*===========================================================================
@@ -5937,8 +6145,14 @@
     }
     memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
                                         sizeof(cam_capability_t));
-    gCamCapability[cameraId]->analysis_padding_info.offset_info.offset_x = 0;
-    gCamCapability[cameraId]->analysis_padding_info.offset_info.offset_y = 0;
+
+    int index;
+    for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
+        cam_analysis_info_t *p_analysis_info =
+                &gCamCapability[cameraId]->analysis_info[index];
+        p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
+        p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
+    }
     rc = 0;
 
 query_failed:
@@ -6581,29 +6795,32 @@
         /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
         if (fps >= MIN_FPS_FOR_BATCH_MODE) {
             /* For each HFR frame rate, need to advertise one variable fps range
-             * and one fixed fps range. Eg: for 120 FPS, advertise [30, 120] and
-             * [120, 120]. While camcorder preview alone is running [30, 120] is
+             * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
+             * and [120, 120]. While camcorder preview alone is running [30, 120] is
              * set by the app. When video recording is started, [120, 120] is
              * set. This way sensor configuration does not change when recording
              * is started */
 
             /* (width, height, fps_min, fps_max, batch_size_max) */
-            available_hfr_configs.add(
-                    gCamCapability[cameraId]->hfr_tbl[i].dim.width);
-            available_hfr_configs.add(
-                    gCamCapability[cameraId]->hfr_tbl[i].dim.height);
-            available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
-            available_hfr_configs.add(fps);
-            available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
+            for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
+                j < MAX_SIZES_CNT; j++) {
+                available_hfr_configs.add(
+                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
+                available_hfr_configs.add(
+                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
+                available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
+                available_hfr_configs.add(fps);
+                available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
 
-            /* (width, height, fps_min, fps_max, batch_size_max) */
-            available_hfr_configs.add(
-                    gCamCapability[cameraId]->hfr_tbl[i].dim.width);
-            available_hfr_configs.add(
-                    gCamCapability[cameraId]->hfr_tbl[i].dim.height);
-            available_hfr_configs.add(fps);
-            available_hfr_configs.add(fps);
-            available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
+                /* (width, height, fps_min, fps_max, batch_size_max) */
+                available_hfr_configs.add(
+                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
+                available_hfr_configs.add(
+                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
+                available_hfr_configs.add(fps);
+                available_hfr_configs.add(fps);
+                available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
+            }
        }
     }
     //Advertise HFR capability only if the property is set
@@ -7968,6 +8185,15 @@
                              tnr_enable, tnr_process_type, type);
     }
 
+    //Update Link tags to default
+    int32_t sync_type = CAM_TYPE_STANDALONE;
+    settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
+
+    int32_t is_main = 0; //this doesn't matter as app should overwrite
+    settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
+
+    settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
+
     /* CDS default */
     char prop[PROPERTY_VALUE_MAX];
     memset(prop, 0, sizeof(prop));
@@ -9556,7 +9782,7 @@
  * RETURN     : true: needed
  *              false: no need
  *==========================================================================*/
-bool QCamera3HardwareInterface::needReprocess(uint32_t postprocess_mask)
+bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
 {
     if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
         // TODO: add for ZSL HDR later
@@ -10273,4 +10499,45 @@
             get_num_overall_buffers());
 }
 
+/*===========================================================================
+ * FUNCTION   : setPAAFSupport
+ *
+ * DESCRIPTION: Set the preview-assisted auto focus support bit in
+ *              feature mask according to stream type and filter
+ *              arrangement
+ *
+ * PARAMETERS : @feature_mask: current feature mask, which may be modified
+ *              @stream_type: stream type
+ *              @filter_arrangement: filter arrangement
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera3HardwareInterface::setPAAFSupport(
+        cam_feature_mask_t& feature_mask,
+        cam_stream_type_t stream_type,
+        cam_color_filter_arrangement_t filter_arrangement)
+{
+    LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
+            feature_mask, stream_type, filter_arrangement);
+
+    switch (filter_arrangement) {
+    case CAM_FILTER_ARRANGEMENT_RGGB:
+    case CAM_FILTER_ARRANGEMENT_GRBG:
+    case CAM_FILTER_ARRANGEMENT_GBRG:
+    case CAM_FILTER_ARRANGEMENT_BGGR:
+        if ((stream_type == CAM_STREAM_TYPE_CALLBACK) ||
+                (stream_type == CAM_STREAM_TYPE_PREVIEW) ||
+                (stream_type == CAM_STREAM_TYPE_VIDEO)) {
+            feature_mask |= CAM_QCOM_FEATURE_PAAF;
+        }
+        break;
+    case CAM_FILTER_ARRANGEMENT_Y:
+        if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
+            feature_mask |= CAM_QCOM_FEATURE_PAAF;
+        }
+        break;
+    default:
+        break;
+    }
+}
 }; //end namespace qcamera
diff --git a/QCamera2/HAL3/QCamera3HWI.h b/QCamera2/HAL3/QCamera3HWI.h
index c6859c2..d211193 100644
--- a/QCamera2/HAL3/QCamera3HWI.h
+++ b/QCamera2/HAL3/QCamera3HWI.h
@@ -43,6 +43,7 @@
 #include "QCamera3HALHeader.h"
 #include "QCamera3Mem.h"
 #include "QCameraPerf.h"
+#include "QCameraCommon.h"
 
 extern "C" {
 #include "mm_camera_interface.h"
@@ -121,6 +122,8 @@
 public:
     /* static variable and functions accessed by camera service */
     static camera3_device_ops_t mCameraOps;
+    //Id of each session in bundle/link
+    static uint32_t sessionId[MM_CAMERA_MAX_NUM_SENSORS];
     static int initialize(const struct camera3_device *,
                 const camera3_callback_ops_t *callback_ops);
     static int configure_streams(const struct camera3_device *,
@@ -196,8 +199,9 @@
     QCamera3ReprocessChannel *addOfflineReprocChannel(const reprocess_config_t &config,
             QCamera3ProcessingChannel *inputChHandle);
     bool needRotationReprocess();
-    bool needReprocess(uint32_t postprocess_mask);
     bool needJpegExifRotation();
+    bool needReprocess(cam_feature_mask_t postprocess_mask);
+    bool needJpegRotation();
     cam_denoise_process_type_t getWaveletDenoiseProcessPlate();
     cam_denoise_process_type_t getTemporalDenoiseProcessPlate();
 
@@ -324,6 +328,10 @@
     static bool supportBurstCapture(uint32_t cameraId);
     int32_t setBundleInfo();
 
+    static void setPAAFSupport(cam_feature_mask_t& feature_mask,
+            cam_stream_type_t stream_type,
+            cam_color_filter_arrangement_t filter_arrangement);
+
     camera3_device_t   mCameraDevice;
     uint32_t           mCameraId;
     mm_camera_vtbl_t  *mCameraHandle;
@@ -339,6 +347,7 @@
     QCamera3RawDumpChannel *mRawDumpChannel;
     QCamera3RegularChannel *mDummyBatchChannel;
     QCameraPerfLock m_perfLock;
+    QCameraCommon   mCommon;
 
     uint32_t mChannelHandle;
 
@@ -506,6 +515,14 @@
     uint32_t mSurfaceStridePadding;
 
     State mState;
+    //Dual camera related params
+    bool mIsDeviceLinked;
+    bool mIsMainCamera;
+    uint8_t mLinkedCameraId;
+    QCamera3HeapMemory *m_pRelCamSyncHeap;
+    cam_sync_related_sensors_event_info_t *m_pRelCamSyncBuf;
+    cam_sync_related_sensors_event_info_t m_relCamSyncInfo;
+
 };
 
 }; // namespace qcamera
diff --git a/QCamera2/HAL3/QCamera3PostProc.cpp b/QCamera2/HAL3/QCamera3PostProc.cpp
index 6daadc7..e582d9a 100644
--- a/QCamera2/HAL3/QCamera3PostProc.cpp
+++ b/QCamera2/HAL3/QCamera3PostProc.cpp
@@ -110,18 +110,15 @@
  *
  * PARAMETERS :
  *   @memory              : output buffer memory
- *   @postprocess_mask    : postprocess mask for the buffer
  *
  * RETURN     : int32_t type of status
  *              NO_ERROR  -- success
  *              none-zero failure code
  *==========================================================================*/
-int32_t QCamera3PostProcessor::init(QCamera3StreamMem *memory,
-        uint32_t postprocess_mask)
+int32_t QCamera3PostProcessor::init(QCamera3StreamMem *memory)
 {
     ATRACE_CALL();
     mOutputMem = memory;
-    mPostProcMask = postprocess_mask;
     m_dataProcTh.launch(dataProcessRoutine, this);
 
     return NO_ERROR;
@@ -1630,6 +1627,12 @@
                     jpg_job.encode_job.cam_exif_params.debug_params->asd_debug_params_valid;
             jpg_job.encode_job.p_metadata->is_statsdebug_stats_params_valid =
                     jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_bestats_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->bestats_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_bhist_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->bhist_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_3a_tuning_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->q3a_tuning_debug_params_valid;
 
             if (jpg_job.encode_job.cam_exif_params.debug_params->ae_debug_params_valid) {
                 jpg_job.encode_job.p_metadata->statsdebug_ae_data =
@@ -1651,6 +1654,18 @@
                 jpg_job.encode_job.p_metadata->statsdebug_stats_buffer_data =
                         jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params;
             }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->bestats_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_bestats_buffer_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->bestats_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->bhist_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_bhist_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->bhist_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->q3a_tuning_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_3a_tuning_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->q3a_tuning_debug_params;
+            }
         }
     } else {
        LOGW("Metadata is null");
diff --git a/QCamera2/HAL3/QCamera3PostProc.h b/QCamera2/HAL3/QCamera3PostProc.h
index 9114b49..9fcd497 100644
--- a/QCamera2/HAL3/QCamera3PostProc.h
+++ b/QCamera2/HAL3/QCamera3PostProc.h
@@ -111,8 +111,7 @@
     QCamera3PostProcessor(QCamera3ProcessingChannel *ch_ctrl);
     virtual ~QCamera3PostProcessor();
 
-    int32_t init(QCamera3StreamMem *mMemory,
-            uint32_t postprocess_mask);
+    int32_t init(QCamera3StreamMem *mMemory);
     int32_t initJpeg(jpeg_encode_callback_t jpeg_cb,
             cam_dimension_t *m_max_pic_dim,
             void *user_data);
@@ -167,7 +166,6 @@
     mm_jpeg_ops_t              mJpegHandle;
     uint32_t                   mJpegClientHandle;
     uint32_t                   mJpegSessionId;
-    uint32_t                   mPostProcMask;
 
     uint32_t                   m_bThumbnailNeeded;
     QCamera3StreamMem          *mOutputMem;
diff --git a/QCamera2/HAL3/QCamera3Stream.cpp b/QCamera2/HAL3/QCamera3Stream.cpp
index 434e071..71935e8 100644
--- a/QCamera2/HAL3/QCamera3Stream.cpp
+++ b/QCamera2/HAL3/QCamera3Stream.cpp
@@ -324,7 +324,7 @@
                             cam_rotation_t streamRotation,
                             cam_stream_reproc_config_t* reprocess_config,
                             uint8_t minNumBuffers,
-                            uint32_t postprocess_mask,
+                            cam_feature_mask_t postprocess_mask,
                             cam_is_type_t is_type,
                             uint32_t batchSize,
                             hal3_stream_cb_routine stream_cb,
@@ -366,7 +366,7 @@
     mStreamInfo->pp_config.feature_mask = postprocess_mask;
     mStreamInfo->is_type = is_type;
     mStreamInfo->pp_config.rotation = streamRotation;
-    LOGD("stream_type is %d, feature_mask is %d",
+    LOGD("stream_type is %d, feature_mask is %Ld",
            mStreamInfo->stream_type, mStreamInfo->pp_config.feature_mask);
 
     bufSize = mStreamInfoBuf->getSize(0);
diff --git a/QCamera2/HAL3/QCamera3Stream.h b/QCamera2/HAL3/QCamera3Stream.h
index 101e3f3..5825be0 100644
--- a/QCamera2/HAL3/QCamera3Stream.h
+++ b/QCamera2/HAL3/QCamera3Stream.h
@@ -67,7 +67,7 @@
                          cam_rotation_t streamRotation,
                          cam_stream_reproc_config_t* reprocess_config,
                          uint8_t minStreamBufNum,
-                         uint32_t postprocess_mask,
+                         cam_feature_mask_t postprocess_mask,
                          cam_is_type_t is_type,
                          uint32_t batchSize,
                          hal3_stream_cb_routine stream_cb,
diff --git a/QCamera2/stack/common/cam_intf.h b/QCamera2/stack/common/cam_intf.h
index e57c6f2..95c2efd 100644
--- a/QCamera2/stack/common/cam_intf.h
+++ b/QCamera2/stack/common/cam_intf.h
@@ -61,6 +61,97 @@
     CAM_PRIV_FLUSH
 } cam_private_ioctl_enum_t;
 
+typedef enum {
+    /* start syncing for related cameras */
+    CAM_SYNC_RELATED_SENSORS_ON = 1,
+    /* stop syncing for related cameras */
+    CAM_SYNC_RELATED_SENSORS_OFF
+} cam_sync_related_sensors_control_t;
+
+typedef enum {
+    /* Driving camera of the related camera sub-system */
+    /* Certain features are enabled only for primary camera
+       such as display mode for preview, autofocus etc
+       In certain configurations for eg. when optical zoom
+       limit is reached, Aux Camera would become
+       the driving camera and there will be role switch.*/
+    CAM_MODE_PRIMARY = 0,
+    /* Non-driving camera of the related camera sub-system
+       no display mode set for secondary camera */
+    CAM_MODE_SECONDARY
+} cam_sync_mode_t;
+
+/* Payload for sending bundling info to backend */
+typedef struct {
+    cam_sync_related_sensors_control_t sync_control;
+    cam_sync_type_t type;
+    cam_sync_mode_t mode;
+    /* session Id of the other camera session
+       Linking will be done with this session in the
+       backend */
+    uint32_t related_sensor_session_id;
+    uint8_t is_frame_sync_enabled;
+}cam_sync_related_sensors_event_info_t;
+
+/* Related camera sensor specific calibration data */
+typedef struct {
+    /* Focal length in pixels @ calibration resolution.*/
+    float       normalized_focal_length;
+    /* Native sensor resolution W that was used to capture calibration image */
+    uint16_t    native_sensor_resolution_width;
+    /* Native sensor resolution H that was used to capture calibration image */
+    uint16_t    native_sensor_resolution_height;
+    /* Image size W used internally by calibration tool */
+    uint16_t    calibration_sensor_resolution_width;
+    /* Image size H used internally by calibration tool */
+    uint16_t    calibration_sensor_resolution_height;
+    /* Focal length ratio @ Calibration */
+    float       focal_length_ratio;
+}cam_related_sensor_calibration_data_t;
+
+/* Related Camera System Calibration data
+   Calibration data for the entire related cam sub-system is
+   in a shared EEPROM. We have 2 fields which are specific to
+   each sensor followed by a set of common calibration of the
+   entire related cam system*/
+typedef struct {
+    /* Version information */
+    uint32_t    calibration_format_version;
+    /* Main Camera Sensor specific calibration */
+    cam_related_sensor_calibration_data_t  main_cam_specific_calibration;
+    /* Aux Camera Sensor specific calibration */
+    cam_related_sensor_calibration_data_t  aux_cam_specific_calibration;
+    /* Relative viewpoint matching matrix w.r.t Main */
+    float      relative_rotation_matrix[RELCAM_CALIB_ROT_MATRIX_MAX];
+    /* Relative geometric surface description parameters */
+    float      relative_geometric_surface_parameters[
+            RELCAM_CALIB_SURFACE_PARMS_MAX];
+    /* Relative offset of sensor center from optical axis along horizontal dimension */
+    float      relative_principle_point_x_offset;
+    /* Relative offset of sensor center from optical axis along vertical dimension */
+    float      relative_principle_point_y_offset;
+    /* 0=Main Camera is on the left of Aux; 1=Main Camera is on the right of Aux */
+    uint16_t   relative_position_flag;
+    /* Camera separation in mm */
+    float      relative_baseline_distance;
+    /* main sensor setting during cal: 0-none, 1-hor-mirror, 2-ver-flip, 3-both */
+    uint16_t   main_sensor_mirror_flip_setting;
+    /* aux sensor setting during cal: 0-none, 1-hor-mirror, 2-ver-flip, 3-both */
+    uint16_t   aux_sensor_mirror_flip_setting;
+    /* module orientation during cal: 0-sensors in landscape, 1-sensors in portrait */
+    uint16_t   module_orientation_during_calibration;
+    /* cal images required rotation: 0-no, 1-90 degrees right, 2-90 degrees left */
+    uint16_t   rotation_flag;
+    /* Reserved for future use */
+    float      reserved[RELCAM_CALIB_RESERVED_MAX];
+} cam_related_system_calibration_data_t;
+
+typedef struct {
+  uint32_t default_sensor_flip;
+  uint32_t sensor_mount_angle;
+  cam_related_system_calibration_data_t otp_calibration_data;
+} cam_jpeg_metadata_t;
+
 /* capability struct definition for HAL 1*/
 typedef struct{
     cam_hal_version_t version;
@@ -218,8 +309,8 @@
     /* QCOM HDR specific control. Indicates number of frames and exposure needs for the frames */
     cam_hdr_bracketing_info_t hdr_bracketing_setting;
 
-    uint32_t qcom_supported_feature_mask; /* mask of qcom specific features supported:
-                                           * such as CAM_QCOM_FEATURE_SUPPORTED_FACE_DETECTION*/
+    cam_feature_mask_t qcom_supported_feature_mask; /* mask of qcom specific features supported:
+                                                     * such as CAM_QCOM_FEATURE_SUPPORTED_FACE_DETECTION*/
     cam_padding_info_t padding_info;      /* padding information from PP */
     uint32_t min_num_pp_bufs;             /* minimum number of buffers needed by postproc module */
     cam_format_t rdi_mode_stream_fmt;  /* stream format supported in rdi mode */
@@ -306,7 +397,6 @@
     cam_format_t supported_scalar_fmts[CAM_FORMAT_MAX];
 
     uint32_t max_face_detection_count;
-    uint8_t hw_analysis_supported;
 
     uint8_t histogram_supported;
     /* Number of histogram buckets supported */
@@ -385,18 +475,11 @@
      * timestamps from other sub-systems (gyro, accelerometer etc.) */
     uint8_t isTimestampCalibrated;
 
-    /* Analysis stream max supported size */
-    cam_dimension_t analysis_max_res;
-    /* Analysis stream padding info */
-    cam_padding_info_t analysis_padding_info;
     /* Max size supported by ISP viewfinder path */
     cam_dimension_t max_viewfinder_size;
 
-    /* Analysis recommended size */
-    cam_dimension_t analysis_recommended_res;
-
-    /* Analysis recommended format */
-    cam_format_t analysis_recommended_format;
+    /* Analysis buffer requirements */
+    cam_analysis_info_t analysis_info[CAM_ANALYSIS_INFO_MAX];
 
     /* This is set to 'true' if sensor cannot guarantee per frame control */
     /* Default value of this capability is 'false' indicating per-frame */
@@ -437,6 +520,9 @@
     /* supported instant capture/AEC convergence modes */
     size_t supported_instant_aec_modes_cnt;
     cam_aec_convergence_type supported_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
+
+    /* Dual cam calibration data */
+    cam_related_system_calibration_data_t related_cam_calibration;
 } cam_capability_t;
 
 typedef enum {
@@ -469,105 +555,6 @@
     uint32_t flip_mask;
 } cam_flip_mode_t;
 
-typedef enum {
-    /* start syncing for related cameras */
-    CAM_SYNC_RELATED_SENSORS_ON = 1,
-    /* stop syncing for related cameras */
-    CAM_SYNC_RELATED_SENSORS_OFF
-} cam_sync_related_sensors_control_t;
-
-typedef enum {
-    /* Driving camera of the related camera sub-system */
-    /* Certain features are enabled only for primary camera
-       such as display mode for preview, autofocus etc
-       In certain configurations for eg. when optical zoom
-       limit is reached, Aux Camera would become
-       the driving camera and there will be role switch.*/
-    CAM_MODE_PRIMARY = 0,
-    /* Non-driving camera of the related camera sub-system
-       no display mode set for secondary camera */
-    CAM_MODE_SECONDARY
-} cam_sync_mode_t;
-
-typedef enum {
-    /* Main camera of the related cam subsystem which controls
-       HW sync at sensor level*/
-    CAM_TYPE_MAIN = 0,
-    /* Aux camera of the related cam subsystem */
-    CAM_TYPE_AUX
-} cam_sync_type_t;
-
-/* Payload for sending bundling info to backend */
-typedef struct {
-    cam_sync_related_sensors_control_t sync_control;
-    cam_sync_type_t type;
-    cam_sync_mode_t mode;
-    /* session Id of the other camera session
-       Linking will be done with this session in the
-       backend */
-    uint32_t related_sensor_session_id;
-    uint8_t reserved;
-}cam_sync_related_sensors_event_info_t;
-
-/* Related camera sensor specific calibration data */
-typedef struct {
-    /* Focal length in pixels @ calibration resolution.*/
-    float       normalized_focal_length;
-    /* Native sensor resolution W that was used to capture calibration image */
-    uint16_t    native_sensor_resolution_width;
-    /* Native sensor resolution H that was used to capture calibration image */
-    uint16_t    native_sensor_resolution_height;
-    /* Image size W used internally by calibration tool */
-    uint16_t    calibration_sensor_resolution_width;
-    /* Image size H used internally by calibration tool */
-    uint16_t    calibration_sensor_resolution_height;
-    /* Focal length ratio @ Calibration */
-    float       focal_length_ratio;
-}cam_related_sensor_calibration_data_t;
-
-/* Related Camera System Calibration data
-   Calibration data for the entire related cam sub-system is
-   in a shared EEPROM. We have 2 fields which are specific to
-   each sensor followed by a set of common calibration of the
-   entire related cam system*/
-typedef struct {
-    /* Version information */
-    uint32_t    calibration_format_version;
-    /* Main Camera Sensor specific calibration */
-    cam_related_sensor_calibration_data_t  main_cam_specific_calibration;
-    /* Aux Camera Sensor specific calibration */
-    cam_related_sensor_calibration_data_t  aux_cam_specific_calibration;
-    /* Relative viewpoint matching matrix w.r.t Main */
-    float      relative_rotation_matrix[RELCAM_CALIB_ROT_MATRIX_MAX];
-    /* Relative geometric surface description parameters */
-    float      relative_geometric_surface_parameters[
-            RELCAM_CALIB_SURFACE_PARMS_MAX];
-    /* Relative offset of sensor center from optical axis along horizontal dimension */
-    float      relative_principle_point_x_offset;
-    /* Relative offset of sensor center from optical axis along vertical dimension */
-    float      relative_principle_point_y_offset;
-    /* 0=Main Camera is on the left of Aux; 1=Main Camera is on the right of Aux */
-    uint16_t   relative_position_flag;
-    /* Camera separation in mm */
-    float      relative_baseline_distance;
-    /* main sensor setting during cal: 0-none, 1-hor-mirror, 2-ver-flip, 3-both */
-    uint16_t   main_sensor_mirror_flip_setting;
-    /* aux sensor setting during cal: 0-none, 1-hor-mirror, 2-ver-flip, 3-both */
-    uint16_t   aux_sensor_mirror_flip_setting;
-    /* module orientation during cal: 0-sensors in landscape, 1-sensors in portrait */
-    uint16_t   module_orientation_during_calibration;
-    /* cal images required rotation: 0-no, 1-90 degrees right, 2-90 degrees left */
-    uint16_t   rotation_flag;
-    /* Reserved for future use */
-    float      reserved[RELCAM_CALIB_RESERVED_MAX];
-} cam_related_system_calibration_data_t;
-
-typedef struct {
-  uint32_t default_sensor_flip;
-  uint32_t sensor_mount_angle;
-  cam_related_system_calibration_data_t otp_calibration_data;
-} cam_jpeg_metadata_t;
-
 #define IMG_NAME_SIZE 32
 typedef struct {
     cam_rect_t crop;  /* crop info for the image */
@@ -833,7 +820,11 @@
     INCLUDE(CAM_INTF_META_EXIF_DEBUG_AWB,               cam_awb_exif_debug_t,        1);
     INCLUDE(CAM_INTF_META_EXIF_DEBUG_AF,                cam_af_exif_debug_t,         1);
     INCLUDE(CAM_INTF_META_EXIF_DEBUG_ASD,               cam_asd_exif_debug_t,        1);
-    INCLUDE(CAM_INTF_META_EXIF_DEBUG_STATS,             cam_stats_buffer_exif_debug_t, 1);
+    INCLUDE(CAM_INTF_META_EXIF_DEBUG_STATS,             cam_stats_buffer_exif_debug_t,   1);
+    INCLUDE(CAM_INTF_META_EXIF_DEBUG_BESTATS,           cam_bestats_buffer_exif_debug_t, 1);
+    INCLUDE(CAM_INTF_META_EXIF_DEBUG_BHIST,             cam_bhist_buffer_exif_debug_t,   1);
+    INCLUDE(CAM_INTF_META_EXIF_DEBUG_3A_TUNING,         cam_q3a_tuning_info_t,       1);
+    INCLUDE(CAM_INTF_META_ASD_SCENE_CAPTURE_TYPE,       cam_auto_scene_t,            1);
     INCLUDE(CAM_INTF_PARM_EFFECT,                       uint32_t,                    1);
     /* Defining as int32_t so that this array is 4 byte aligned */
     INCLUDE(CAM_INTF_META_PRIVATE_DATA,                 int32_t,
@@ -1003,6 +994,16 @@
 
     uint8_t is_statsdebug_stats_params_valid;
     cam_stats_buffer_exif_debug_t statsdebug_stats_buffer_data;
+
+    uint8_t is_statsdebug_bestats_params_valid;
+    cam_bestats_buffer_exif_debug_t statsdebug_bestats_buffer_data;
+
+    uint8_t is_statsdebug_bhist_params_valid;
+    cam_bhist_buffer_exif_debug_t statsdebug_bhist_data;
+
+    uint8_t is_statsdebug_3a_tuning_params_valid;
+    cam_q3a_tuning_info_t statsdebug_3a_tuning_data;
+
 } metadata_buffer_t;
 
 typedef metadata_buffer_t parm_buffer_t;
@@ -1024,6 +1025,9 @@
       meta->is_statsdebug_af_params_valid = 0;
       meta->is_statsdebug_asd_params_valid = 0;
       meta->is_statsdebug_stats_params_valid = 0;
+      meta->is_statsdebug_bestats_params_valid = 0;
+      meta->is_statsdebug_bhist_params_valid = 0;
+      meta->is_statsdebug_3a_tuning_params_valid = 0;
     }
 }
 
diff --git a/QCamera2/stack/common/cam_types.h b/QCamera2/stack/common/cam_types.h
index 9539ef1..72ddba6 100644
--- a/QCamera2/stack/common/cam_types.h
+++ b/QCamera2/stack/common/cam_types.h
@@ -38,10 +38,12 @@
 #define MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES 8096
 #define AWB_DEBUG_DATA_SIZE               (45000)
 #define AEC_DEBUG_DATA_SIZE               (5000)
-#define AF_DEBUG_DATA_SIZE                (10000)
-#define AF_STATS_DEBUG_DATA_SIZE          (40000)
+#define AF_DEBUG_DATA_SIZE                (50000)
 #define ASD_DEBUG_DATA_SIZE               (100)
 #define STATS_BUFFER_DEBUG_DATA_SIZE      (75000)
+#define BESTATS_BUFFER_DEBUG_DATA_SIZE    (150000)
+#define BHIST_STATS_DEBUG_DATA_SIZE       (70000)
+#define TUNING_INFO_DEBUG_DATA_SIZE       (4)
 
 #define CEILING64(X) (((X) + 0x0003F) & 0xFFFFFFC0)
 #define CEILING32(X) (((X) + 0x0001F) & 0xFFFFFFE0)
@@ -142,6 +144,8 @@
 /* Defines the number of columns in the color correction matrix (CCM) */
 #define AWB_NUM_CCM_COLS (3)
 
+typedef uint64_t cam_feature_mask_t;
+
 typedef enum {
     CAM_HAL_V1 = 1,
     CAM_HAL_V3 = 3
@@ -556,9 +560,9 @@
 
 typedef struct {
     cam_hfr_mode_t mode;
-    cam_dimension_t dim;
-    uint8_t frame_skip;
-    uint8_t livesnapshot_sizes_tbl_cnt;                     /* livesnapshot sizes table size */
+    uint8_t dim_cnt;                                        /* hfr sizes table count */
+    cam_dimension_t dim[MAX_SIZES_CNT];                     /* hfr sizes table */
+    uint8_t livesnapshot_sizes_tbl_cnt;                     /* livesnapshot sizes table count */
     cam_dimension_t livesnapshot_sizes_tbl[MAX_SIZES_CNT];  /* livesnapshot sizes table */
 } cam_hfr_info_t;
 
@@ -1499,15 +1503,10 @@
     char awb_private_debug_data[AWB_DEBUG_DATA_SIZE];
 } cam_awb_exif_debug_t;
 
+/* AF debug data for exif*/
 typedef struct {
     int32_t af_debug_data_size;
-    int32_t haf_debug_data_size;
-    int32_t tof_debug_data_size;
-    int32_t dciaf_debug_data_size;
-    int32_t pdaf_debug_data_size;
     char af_private_debug_data[AF_DEBUG_DATA_SIZE];
-    int32_t af_stats_buffer_size;
-    char af_stats_private_debug_data[AF_STATS_DEBUG_DATA_SIZE];
 } cam_af_exif_debug_t;
 
 typedef struct {
@@ -1517,11 +1516,21 @@
 
 typedef struct {
     int32_t bg_stats_buffer_size;
-    int32_t bhist_stats_buffer_size;
     int32_t bg_config_buffer_size;
     char stats_buffer_private_debug_data[STATS_BUFFER_DEBUG_DATA_SIZE];
 } cam_stats_buffer_exif_debug_t;
 
+typedef struct {
+    int32_t be_stats_buffer_size;
+    int32_t be_config_buffer_size;
+    char bestats_buffer_private_debug_data[BESTATS_BUFFER_DEBUG_DATA_SIZE];
+} cam_bestats_buffer_exif_debug_t;
+
+typedef struct {
+    int32_t bhist_stats_buffer_size;
+    char bhist_private_debug_data[BHIST_STATS_DEBUG_DATA_SIZE];
+} cam_bhist_buffer_exif_debug_t;
+
 /* 3A version*/
 typedef struct {
     uint16_t major_version;
@@ -1531,6 +1540,11 @@
 } cam_q3a_version_t;
 
 typedef struct {
+    int32_t tuning_info_buffer_size;
+    char tuning_info_private_debug_data[TUNING_INFO_DEBUG_DATA_SIZE];
+} cam_q3a_tuning_info_t;
+
+typedef struct {
     uint32_t tuning_data_version;
     size_t tuning_sensor_data_size;
     size_t tuning_vfe_data_size;
@@ -1578,11 +1592,21 @@
    uint32_t max_buffers;
 } cam_buffer_info_t;
 
+typedef enum {
+    /* Standalone camera (won't be linked) */
+    CAM_TYPE_STANDALONE=0,
+    /* Main camera of the related cam subsystem which controls
+       HW sync at sensor level*/
+    CAM_TYPE_MAIN,
+    /* Aux camera of the related cam subsystem */
+    CAM_TYPE_AUX
+} cam_sync_type_t;
+
 typedef struct {
     cam_dimension_t stream_sizes[MAX_NUM_STREAMS];
     uint32_t num_streams;
     cam_stream_type_t type[MAX_NUM_STREAMS];
-    uint32_t postprocess_mask[MAX_NUM_STREAMS];
+    cam_feature_mask_t postprocess_mask[MAX_NUM_STREAMS];
     cam_buffer_info_t buffer_info;
     cam_is_type_t is_type;
     cam_hfr_mode_t hfr_mode;
@@ -1591,6 +1615,7 @@
     uint32_t min_stride;
     uint32_t min_scanline;
     uint8_t batch_size;
+    cam_sync_type_t sync_type;
 } cam_stream_size_info_t;
 
 
@@ -1709,6 +1734,14 @@
     uint8_t is_stats_buffer_exif_debug_valid;
     cam_stats_buffer_exif_debug_t stats_buffer_exif_debug_params;
 
+    /* BE stats buffer exif debug parameters */
+    uint8_t is_bestats_buffer_exif_debug_valid;
+    cam_bestats_buffer_exif_debug_t bestats_buffer_exif_debug_params;
+
+    /* Bhist exif debug parameters. */
+    uint8_t is_bhist_exif_debug_valid;
+    cam_bhist_buffer_exif_debug_t bhist_exif_debug_params;
+
     /* AWB parameters */
     uint8_t is_awb_params_valid;
     cam_awb_params_t awb_params;
@@ -1837,6 +1870,9 @@
     CAM_INTF_META_EXIF_DEBUG_AF,
     CAM_INTF_META_EXIF_DEBUG_ASD,
     CAM_INTF_META_EXIF_DEBUG_STATS,
+    CAM_INTF_META_EXIF_DEBUG_BESTATS,
+    CAM_INTF_META_EXIF_DEBUG_BHIST,
+    CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
     CAM_INTF_PARM_GET_CHROMATIX,
     CAM_INTF_PARM_SET_RELOAD_CHROMATIX,
     CAM_INTF_PARM_SET_AUTOFOCUSTUNING, /* 80 */
@@ -2314,40 +2350,40 @@
     int32_t step;
 } cam_control_range_t;
 
-#define CAM_QCOM_FEATURE_NONE            0U
-#define CAM_QCOM_FEATURE_FACE_DETECTION (1U<<0)
-#define CAM_QCOM_FEATURE_DENOISE2D      (1U<<1)
-#define CAM_QCOM_FEATURE_CROP           (1U<<2)
-#define CAM_QCOM_FEATURE_ROTATION       (1U<<3)
-#define CAM_QCOM_FEATURE_FLIP           (1U<<4)
-#define CAM_QCOM_FEATURE_HDR            (1U<<5)
-#define CAM_QCOM_FEATURE_REGISTER_FACE  (1U<<6)
-#define CAM_QCOM_FEATURE_SHARPNESS      (1U<<7)
-#define CAM_QCOM_FEATURE_VIDEO_HDR      (1U<<8)
-#define CAM_QCOM_FEATURE_CAC            (1U<<9)
-#define CAM_QCOM_FEATURE_SCALE          (1U<<10)
-#define CAM_QCOM_FEATURE_EFFECT         (1U<<11)
-#define CAM_QCOM_FEATURE_UBIFOCUS       (1U<<12)
-#define CAM_QCOM_FEATURE_CHROMA_FLASH   (1U<<13)
-#define CAM_QCOM_FEATURE_OPTIZOOM       (1U<<14)
-#define CAM_QCOM_FEATURE_SENSOR_HDR     (1U<<15)
-#define CAM_QCOM_FEATURE_REFOCUS        (1U<<16)
-#define CAM_QCOM_FEATURE_CPP_TNR        (1U<<17)
-#define CAM_QCOM_FEATURE_RAW_PROCESSING (1U<<18)
-#define CAM_QCOM_FEATURE_TRUEPORTRAIT   (1U<<19)
-#define CAM_QCOM_FEATURE_LLVD           (1U<<20)
-#define CAM_QCOM_FEATURE_DIS20          (1U<<21)
-#define CAM_QCOM_FEATURE_STILLMORE      (1U<<22)
-#define CAM_QCOM_FEATURE_DCRF           (1U<<23)
-#define CAM_QCOM_FEATURE_CDS            (1U<<24)
-#define CAM_QCOM_FEATURE_EZTUNE         (1U<<25)
-#define CAM_QCOM_FEATURE_DSDN           (1U<<26) //Special CDS in CPP block
-#define CAM_QCOM_FEATURE_SW2D           (1U<<27)
-#define CAM_OEM_FEATURE_1               (1U<<28)
-#define CAM_OEM_FEATURE_2               (1U<<29)
-#define CAM_QTI_FEATURE_SW_TNR          (1U<<30)
-#define CAM_QCOM_FEATURE_METADATA_PROCESSING (1U<<31)
-#define CAM_QCOM_FEATURE_MAX            32
+#define CAM_QCOM_FEATURE_NONE            (cam_feature_mask_t)0UL
+#define CAM_QCOM_FEATURE_FACE_DETECTION ((cam_feature_mask_t)1UL<<0)
+#define CAM_QCOM_FEATURE_DENOISE2D      ((cam_feature_mask_t)1UL<<1)
+#define CAM_QCOM_FEATURE_CROP           ((cam_feature_mask_t)1UL<<2)
+#define CAM_QCOM_FEATURE_ROTATION       ((cam_feature_mask_t)1UL<<3)
+#define CAM_QCOM_FEATURE_FLIP           ((cam_feature_mask_t)1UL<<4)
+#define CAM_QCOM_FEATURE_HDR            ((cam_feature_mask_t)1UL<<5)
+#define CAM_QCOM_FEATURE_REGISTER_FACE  ((cam_feature_mask_t)1UL<<6)
+#define CAM_QCOM_FEATURE_SHARPNESS      ((cam_feature_mask_t)1UL<<7)
+#define CAM_QCOM_FEATURE_VIDEO_HDR      ((cam_feature_mask_t)1UL<<8)
+#define CAM_QCOM_FEATURE_CAC            ((cam_feature_mask_t)1UL<<9)
+#define CAM_QCOM_FEATURE_SCALE          ((cam_feature_mask_t)1UL<<10)
+#define CAM_QCOM_FEATURE_EFFECT         ((cam_feature_mask_t)1UL<<11)
+#define CAM_QCOM_FEATURE_UBIFOCUS       ((cam_feature_mask_t)1UL<<12)
+#define CAM_QCOM_FEATURE_CHROMA_FLASH   ((cam_feature_mask_t)1UL<<13)
+#define CAM_QCOM_FEATURE_OPTIZOOM       ((cam_feature_mask_t)1UL<<14)
+#define CAM_QCOM_FEATURE_SENSOR_HDR     ((cam_feature_mask_t)1UL<<15)
+#define CAM_QCOM_FEATURE_REFOCUS        ((cam_feature_mask_t)1UL<<16)
+#define CAM_QCOM_FEATURE_CPP_TNR        ((cam_feature_mask_t)1UL<<17)
+#define CAM_QCOM_FEATURE_RAW_PROCESSING ((cam_feature_mask_t)1UL<<18)
+#define CAM_QCOM_FEATURE_TRUEPORTRAIT   ((cam_feature_mask_t)1UL<<19)
+#define CAM_QCOM_FEATURE_LLVD           ((cam_feature_mask_t)1UL<<20)
+#define CAM_QCOM_FEATURE_DIS20          ((cam_feature_mask_t)1UL<<21)
+#define CAM_QCOM_FEATURE_STILLMORE      ((cam_feature_mask_t)1UL<<22)
+#define CAM_QCOM_FEATURE_DCRF           ((cam_feature_mask_t)1UL<<23)
+#define CAM_QCOM_FEATURE_CDS            ((cam_feature_mask_t)1UL<<24)
+#define CAM_QCOM_FEATURE_EZTUNE         ((cam_feature_mask_t)1UL<<25)
+#define CAM_QCOM_FEATURE_DSDN           ((cam_feature_mask_t)1UL<<26) //Special CDS in CPP block
+#define CAM_QCOM_FEATURE_SW2D           ((cam_feature_mask_t)1UL<<27)
+#define CAM_OEM_FEATURE_1               ((cam_feature_mask_t)1UL<<28)
+#define CAM_OEM_FEATURE_2               ((cam_feature_mask_t)1UL<<29)
+#define CAM_QTI_FEATURE_SW_TNR          ((cam_feature_mask_t)1UL<<30)
+#define CAM_QCOM_FEATURE_METADATA_PROCESSING ((cam_feature_mask_t)1UL<<31)
+#define CAM_QCOM_FEATURE_PAAF           (((cam_feature_mask_t)1UL)<<32)
 #define CAM_QCOM_FEATURE_PP_SUPERSET    (CAM_QCOM_FEATURE_DENOISE2D|CAM_QCOM_FEATURE_CROP|\
                                          CAM_QCOM_FEATURE_ROTATION|CAM_QCOM_FEATURE_SHARPNESS|\
                                          CAM_QCOM_FEATURE_SCALE|CAM_QCOM_FEATURE_CAC|\
@@ -2465,7 +2501,7 @@
 
 typedef struct {
     /* reprocess feature mask */
-    uint32_t feature_mask;
+    cam_feature_mask_t feature_mask;
 
     /* individual setting for features to be reprocessed */
     cam_denoise_param_t denoise2d;
@@ -2657,6 +2693,33 @@
     CAM_MANUAL_CAPTURE_TYPE_4    /*Offline RAW processing with multiple RAW*/
 } cam_manual_capture_type;
 
+typedef enum {
+    CAM_ANALYSIS_INFO_FD_STILL,   /*Analysis requirements for STILL PREVIEW*/
+    CAM_ANALYSIS_INFO_FD_VIDEO,   /*Analysis requirements for VIDEO*/
+    CAM_ANALYSIS_INFO_PAAF,       /*Analysis requirements for PAAF*/
+    CAM_ANALYSIS_INFO_MAX,     /*Max number*/
+} cam_analysis_info_type;
+
+typedef struct {
+    /* Whether the information here is valid or not */
+    uint8_t valid;
+
+    /* Whether analysis supported by hw */
+    uint8_t hw_analysis_supported;
+
+    /* Analysis stream max supported size */
+    cam_dimension_t analysis_max_res;
+
+    /* Analysis stream padding info */
+    cam_padding_info_t analysis_padding_info;
+
+    /* Analysis format */
+    cam_format_t analysis_format;
+
+    /* Analysis recommended size */
+    cam_dimension_t analysis_recommended_res;
+} cam_analysis_info_t;
+
 /***********************************
 * ENUM definition for custom parameter type
 ************************************/
@@ -2664,4 +2727,5 @@
     CAM_CUSTOM_PARM_EXAMPLE,
     CAM_CUSTOM_PARM_MAX,
 } cam_custom_parm_type;
+
 #endif /* __QCAMERA_TYPES_H__ */
diff --git a/QCamera2/stack/common/mm_jpeg_interface.h b/QCamera2/stack/common/mm_jpeg_interface.h
index a00d940..73fc7ad 100644
--- a/QCamera2/stack/common/mm_jpeg_interface.h
+++ b/QCamera2/stack/common/mm_jpeg_interface.h
@@ -59,11 +59,17 @@
   cam_af_exif_debug_t af_debug_params;
   cam_asd_exif_debug_t asd_debug_params;
   cam_stats_buffer_exif_debug_t stats_debug_params;
+  cam_bestats_buffer_exif_debug_t bestats_debug_params;
+  cam_bhist_buffer_exif_debug_t bhist_debug_params;
+  cam_q3a_tuning_info_t q3a_tuning_debug_params;
   uint8_t ae_debug_params_valid;
   uint8_t awb_debug_params_valid;
   uint8_t af_debug_params_valid;
   uint8_t asd_debug_params_valid;
   uint8_t stats_debug_params_valid;
+  uint8_t bestats_debug_params_valid;
+  uint8_t bhist_debug_params_valid;
+  uint8_t q3a_tuning_debug_params_valid;
 } mm_jpeg_debug_exif_params_t;
 
 typedef struct {
diff --git a/QCamera2/stack/mm-camera-interface/inc/mm_camera.h b/QCamera2/stack/mm-camera-interface/inc/mm_camera.h
index 106af22..971ae74 100644
--- a/QCamera2/stack/mm-camera-interface/inc/mm_camera.h
+++ b/QCamera2/stack/mm-camera-interface/inc/mm_camera.h
@@ -361,6 +361,8 @@
     uint8_t matched;
     uint8_t expected_frame;
     uint32_t frame_idx;
+    /* unmatched meta idx needed in case of low priority queue */
+    uint32_t unmatched_meta_idx;
 } mm_channel_queue_node_t;
 
 typedef struct {
diff --git a/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c b/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
index a358dc6..8878514 100644
--- a/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
+++ b/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
@@ -2382,6 +2382,8 @@
     struct cam_list *pos = NULL;
     mm_channel_queue_node_t* super_buf = NULL;
 
+    (void)ch_obj;
+
     /* comp */
     pthread_mutex_lock(&queue->que.lock);
     head = &queue->que.head.list;
@@ -2745,7 +2747,7 @@
                 /* find a matched super buf, move to next one */
                 pos = pos->next;
                 continue;
-            } else if ( buf_info->frame_idx == super_buf->frame_idx
+            } else if (( buf_info->frame_idx == super_buf->frame_idx )
                     /*Pick metadata greater than available frameID*/
                     || ((queue->attr.priority == MM_CAMERA_SUPER_BUF_PRIORITY_LOW)
                     && (super_buf->super_buf[buf_s_idx].frame_idx == 0)
@@ -2755,11 +2757,20 @@
                     || ((queue->attr.priority == MM_CAMERA_SUPER_BUF_PRIORITY_LOW)
                     && (buf_info->buf->stream_type != CAM_STREAM_TYPE_METADATA)
                     && (super_buf->super_buf[buf_s_idx].frame_idx == 0)
-                    && (super_buf->frame_idx > buf_info->frame_idx))){
+                    && (super_buf->unmatched_meta_idx > buf_info->frame_idx))){
                 /*super buffer frame IDs matching OR In low priority bundling
                 metadata frameID greater than avialbale super buffer frameID  OR
                 metadata frame closest to incoming frameID will be bundled*/
                 found_super_buf = 1;
+                /* If we are filling into a 'meta only' superbuf, make sure to reset
+                the super_buf frame_idx so that missing streams in this superbuf
+                are filled as per matching frame id logic. Note that, in low priority
+                queue, only meta frame id need not match (closest suffices) but
+                the other streams in this superbuf should have same frame id. */
+                if (super_buf->unmatched_meta_idx > 0) {
+                    super_buf->unmatched_meta_idx = 0;
+                    super_buf->frame_idx = buf_info->frame_idx;
+                }
                 break;
             } else {
                 unmatched_bundles++;
@@ -2935,6 +2946,13 @@
                         pthread_mutex_unlock(&fs_lock);
                     }
                 }
+                /* In low priority queue, this will become a 'meta only' superbuf. Set the
+                unmatched_frame_idx so that the upcoming stream buffers (other than meta)
+                can be filled into this which are nearest to this idx. */
+                if ((queue->attr.priority == MM_CAMERA_SUPER_BUF_PRIORITY_LOW)
+                    && (buf_info->buf->stream_type == CAM_STREAM_TYPE_METADATA)) {
+                    new_buf->unmatched_meta_idx = buf_info->frame_idx;
+                }
             } else {
                 /* No memory */
                 if (NULL != new_buf) {
diff --git a/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c b/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
index 2e62858..632b7df 100644
--- a/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
+++ b/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
@@ -1251,8 +1251,10 @@
     struct msm_camera_user_buf_cont_t *cont_buf = NULL;
 
     if (buf->buf_type == CAM_STREAM_BUF_TYPE_USERPTR) {
+        pthread_mutex_lock(&my_obj->buf_lock);
         my_obj->buf_status[buf->buf_idx].buf_refcnt--;
         if (0 == my_obj->buf_status[buf->buf_idx].buf_refcnt) {
+            pthread_mutex_unlock(&my_obj->buf_lock);
             cont_buf = (struct msm_camera_user_buf_cont_t *)my_obj->buf[buf->buf_idx].buffer;
             cont_buf->buf_cnt = my_obj->buf[buf->buf_idx].user_buf.bufs_used;
             for (i = 0; i < (int32_t)cont_buf->buf_cnt; i++) {
@@ -1272,6 +1274,7 @@
         } else {
             LOGD("<DEBUG> : ref count pending count :%d idx = %d",
                  my_obj->buf_status[buf->buf_idx].buf_refcnt, buf->buf_idx);
+            pthread_mutex_unlock(&my_obj->buf_lock);
         }
         return rc;
     }
@@ -1308,8 +1311,10 @@
 
     if (my_obj->cur_bufs_staged
             == my_obj->buf[index].user_buf.bufs_used){
+        pthread_mutex_lock(&my_obj->buf_lock);
         my_obj->buf_status[index].buf_refcnt--;
         if (0 == my_obj->buf_status[index].buf_refcnt) {
+            pthread_mutex_unlock(&my_obj->buf_lock);
             cont_buf = (struct msm_camera_user_buf_cont_t *)my_obj->buf[index].buffer;
             cont_buf->buf_cnt = my_obj->buf[index].user_buf.bufs_used;
             for (i = 0; i < (int32_t)cont_buf->buf_cnt; i++) {
@@ -1331,6 +1336,7 @@
         }else{
             LOGD("<DEBUG> : ref count pending count :%d idx = %d",
                  my_obj->buf_status[index].buf_refcnt, index);
+            pthread_mutex_unlock(&my_obj->buf_lock);
         }
     }
 
@@ -1467,6 +1473,7 @@
             LOGH("Stopped poll on stream %p type: %d",
                 my_obj, my_obj->stream_info->stream_type);
         }
+        pthread_mutex_unlock(&my_obj->buf_lock);
         uint32_t idx = vb.index;
         buf_info->buf = &my_obj->buf[idx];
         buf_info->frame_idx = vb.sequence;
@@ -1492,7 +1499,6 @@
         if(buf_info->buf->buf_type == CAM_STREAM_BUF_TYPE_USERPTR) {
             mm_stream_read_user_buf(my_obj, buf_info);
         }
-        pthread_mutex_unlock(&my_obj->buf_lock);
 
         if ( NULL != my_obj->mem_vtbl.clean_invalidate_buf ) {
             rc = my_obj->mem_vtbl.clean_invalidate_buf(idx,
@@ -1685,6 +1691,7 @@
         LOGE("Cache invalidate op not added");
     }
 
+    pthread_mutex_lock(&my_obj->buf_lock);
     my_obj->queued_buffer_count++;
     if (1 == my_obj->queued_buffer_count) {
         /* Add fd to data poll thread */
@@ -1701,8 +1708,10 @@
                 my_obj, my_obj->stream_info->stream_type);
         }
     }
+    pthread_mutex_unlock(&my_obj->buf_lock);
 
     rc = ioctl(my_obj->fd, VIDIOC_QBUF, &buffer);
+    pthread_mutex_lock(&my_obj->buf_lock);
     if (0 > rc) {
         LOGE("VIDIOC_QBUF ioctl call failed on stream type %d (rc=%d): %s",
              my_obj->stream_info->stream_type, rc, strerror(errno));
@@ -1723,6 +1732,7 @@
                  buffer.index, buf->frame_idx, my_obj->stream_info->stream_type, rc,
                 my_obj->queued_buffer_count, buf->buf_type);
     }
+    pthread_mutex_unlock(&my_obj->buf_lock);
 
     return rc;
 }
@@ -2143,7 +2153,6 @@
         return rc;
     }
 
-    pthread_mutex_lock(&my_obj->buf_lock);
     my_obj->queued_buffer_count = 0;
     for(i = 0; i < my_obj->buf_num; i++){
         /* check if need to qbuf initially */
@@ -2162,7 +2171,6 @@
             my_obj->buf_status[i].in_kernel = 0;
         }
     }
-    pthread_mutex_unlock(&my_obj->buf_lock);
 
     return rc;
 }
@@ -2294,6 +2302,18 @@
     case CAM_FORMAT_Y_ONLY:
         val= V4L2_PIX_FMT_GREY;
         break;
+    case CAM_FORMAT_Y_ONLY_10_BPP:
+        val= V4L2_PIX_FMT_Y10;
+        break;
+    case CAM_FORMAT_Y_ONLY_12_BPP:
+        val= V4L2_PIX_FMT_Y12;
+        break;
+    case CAM_FORMAT_Y_ONLY_14_BPP:
+        /* No v4l2 format is defined yet for CAM_FORMAT_Y_ONLY_14_BPP */
+        /* val= V4L2_PIX_FMT_Y14; */
+        val = 0;
+        LOGE("Unknown fmt=%d", fmt);
+        break;
     case CAM_FORMAT_MAX:
         /* CAM_STREAM_TYPE_DEFAULT,
          * CAM_STREAM_TYPE_OFFLINE_PROC,
@@ -2339,6 +2359,10 @@
     switch (stream_info->fmt) {
     case CAM_FORMAT_YUV_420_NV12:
     case CAM_FORMAT_YUV_420_NV21:
+    case CAM_FORMAT_Y_ONLY:
+    case CAM_FORMAT_Y_ONLY_10_BPP:
+    case CAM_FORMAT_Y_ONLY_12_BPP:
+    case CAM_FORMAT_Y_ONLY_14_BPP:
         /* 2 planes: Y + CbCr */
         buf_planes->plane_info.num_planes = 2;
 
@@ -2675,6 +2699,10 @@
     switch (fmt) {
     case CAM_FORMAT_YUV_420_NV12:
     case CAM_FORMAT_YUV_420_NV21:
+    case CAM_FORMAT_Y_ONLY:
+    case CAM_FORMAT_Y_ONLY_10_BPP:
+    case CAM_FORMAT_Y_ONLY_12_BPP:
+    case CAM_FORMAT_Y_ONLY_14_BPP:
         /* 2 planes: Y + CbCr */
         buf_planes->plane_info.num_planes = 2;
 
@@ -2984,6 +3012,10 @@
     switch (fmt) {
     case CAM_FORMAT_YUV_420_NV12:
     case CAM_FORMAT_YUV_420_NV21:
+    case CAM_FORMAT_Y_ONLY:
+    case CAM_FORMAT_Y_ONLY_10_BPP:
+    case CAM_FORMAT_Y_ONLY_12_BPP:
+    case CAM_FORMAT_Y_ONLY_14_BPP:
         /* 2 planes: Y + CbCr */
         buf_planes->plane_info.num_planes = 2;
 
@@ -3343,18 +3375,22 @@
     case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GRBG:
     case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_RGGB:
     case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_BGGR:
+    case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GREY:
     case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG:
     case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GRBG:
     case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_RGGB:
     case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_BGGR:
+    case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GREY:
     case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GBRG:
     case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GRBG:
     case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_RGGB:
     case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GREY:
     case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GBRG:
     case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GRBG:
     case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_RGGB:
     case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GREY:
     case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GBRG:
     case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GRBG:
     case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_RGGB:
@@ -3382,10 +3418,12 @@
     case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GRBG:
     case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_RGGB:
     case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_BGGR:
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GREY:
     case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GBRG:
     case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GRBG:
     case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_RGGB:
     case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GREY:
         /* Every 12 pixels occupy 16 bytes */
         stride = (dim->width + 11)/12 * 12;
         stride_in_bytes = stride * 8 / 6;
@@ -3408,10 +3446,12 @@
     case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GRBG:
     case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_RGGB:
     case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_BGGR:
+    case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GREY:
     case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GBRG:
     case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GRBG:
     case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_RGGB:
     case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GREY:
         /* Every 10 pixels occupy 16 bytes */
         stride = (dim->width + 9)/10 * 10;
         stride_in_bytes = stride * 8 / 5;
@@ -3434,10 +3474,12 @@
     case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GRBG:
     case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_RGGB:
     case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_BGGR:
+    case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GREY:
     case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GBRG:
     case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GRBG:
     case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_RGGB:
     case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GREY:
         /* Every 64 pixels occupy 80 bytes */
         stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_4);
         stride_in_bytes = PAD_TO_SIZE(stride * 5 / 4, CAM_PAD_TO_8);
@@ -3460,10 +3502,12 @@
     case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GRBG:
     case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_RGGB:
     case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_BGGR:
+    case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GREY:
     case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GBRG:
     case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GRBG:
     case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_RGGB:
     case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GREY:
         /* Every 32 pixels occupy 48 bytes */
         stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_32);
         stride_in_bytes = stride * 3 / 2;
@@ -3520,10 +3564,12 @@
     case CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GRBG:
     case CAM_FORMAT_BAYER_MIPI_RAW_14BPP_RGGB:
     case CAM_FORMAT_BAYER_MIPI_RAW_14BPP_BGGR:
+    case CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GREY:
     case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GBRG:
     case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GRBG:
     case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_RGGB:
     case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GREY:
         /* Every 64 pixels occupy 112 bytes */
         stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_64);
         stride_in_bytes = stride * 7 / 4;
@@ -3546,10 +3592,12 @@
     case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GRBG:
     case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_RGGB:
     case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_BGGR:
+    case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GREY:
     case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GBRG:
     case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GRBG:
     case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_RGGB:
     case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GREY:
         /* Every 16 pixels occupy 32 bytes */
         stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
         stride_in_bytes = stride * 2;
@@ -3598,11 +3646,18 @@
 {
     int32_t rc = 0;
     int stride = 0, scanline = 0;
+
+    #ifdef UBWC_PRESENT
     int meta_stride = 0,meta_scanline = 0;
+    #endif
 
 
     switch (fmt) {
         case CAM_FORMAT_YUV_420_NV12:
+        case CAM_FORMAT_Y_ONLY:
+        case CAM_FORMAT_Y_ONLY_10_BPP:
+        case CAM_FORMAT_Y_ONLY_12_BPP:
+        case CAM_FORMAT_Y_ONLY_14_BPP:
             buf_planes->plane_info.num_planes = 2;
 
             stride = dim->width;
@@ -3965,6 +4020,9 @@
             CAM_PAD_TO_4K);
         break;
     case CAM_FORMAT_Y_ONLY:
+    case CAM_FORMAT_Y_ONLY_10_BPP:
+    case CAM_FORMAT_Y_ONLY_12_BPP:
+    case CAM_FORMAT_Y_ONLY_14_BPP:
         buf_planes->plane_info.num_planes = 1;
 
         buf_planes->plane_info.mp[0].len =
@@ -4383,16 +4441,22 @@
           my_obj->my_hdl, my_obj->fd, my_obj->state);
 
     pthread_mutex_lock(&my_obj->buf_lock);
-    if (my_obj->stream_info->streaming_mode == CAM_STREAMING_MODE_BATCH) {
-        rc = mm_stream_write_user_buf(my_obj, frame);
-    } else if(my_obj->buf_status[frame->buf_idx].buf_refcnt == 0) {
-        LOGD("Error Trying to free second time?(idx=%d) count=%d\n",
+    if(my_obj->buf_status[frame->buf_idx].buf_refcnt == 0) {
+        LOGE("Error Trying to free second time?(idx=%d) count=%d\n",
                     frame->buf_idx,
                    my_obj->buf_status[frame->buf_idx].buf_refcnt);
+        pthread_mutex_unlock(&my_obj->buf_lock);
         rc = -1;
+        return rc;
+    }
+    pthread_mutex_unlock(&my_obj->buf_lock);
+    if (my_obj->stream_info->streaming_mode == CAM_STREAMING_MODE_BATCH) {
+        rc = mm_stream_write_user_buf(my_obj, frame);
     } else {
+        pthread_mutex_lock(&my_obj->buf_lock);
         my_obj->buf_status[frame->buf_idx].buf_refcnt--;
         if (0 == my_obj->buf_status[frame->buf_idx].buf_refcnt) {
+            pthread_mutex_unlock(&my_obj->buf_lock);
             LOGD("<DEBUG> : Buf done for buffer:%d, stream:%d", frame->buf_idx, frame->stream_type);
             rc = mm_stream_qbuf(my_obj, frame);
             if(rc < 0) {
@@ -4406,9 +4470,9 @@
                  my_obj->buf_status[frame->buf_idx].buf_refcnt);
             LOGD("<DEBUG> : for buffer:%p:%d",
                  my_obj, frame->buf_idx);
+            pthread_mutex_unlock(&my_obj->buf_lock);
         }
     }
-    pthread_mutex_unlock(&my_obj->buf_lock);
     return rc;
 }
 
diff --git a/QCamera2/stack/mm-camera-test/src/mm_qcamera_preview.c b/QCamera2/stack/mm-camera-test/src/mm_qcamera_preview.c
index 5a5da5d..2072f20 100644
--- a/QCamera2/stack/mm-camera-test/src/mm_qcamera_preview.c
+++ b/QCamera2/stack/mm-camera-test/src/mm_qcamera_preview.c
@@ -562,7 +562,8 @@
                                                const cam_dimension_t* preview_dim)
 {
     cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
-    cam_dimension_t max_analysis_dim = cam_cap->analysis_max_res;
+    cam_dimension_t max_analysis_dim =
+        cam_cap->analysis_info[CAM_ANALYSIS_INFO_FD_STILL].analysis_max_res;
     cam_dimension_t analysis_dim = {0, 0};
 
     if (preview_dim->width > max_analysis_dim.width ||
@@ -637,7 +638,8 @@
     stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
     stream->s_config.stream_info->fmt = DEFAULT_PREVIEW_FORMAT;
     stream->s_config.stream_info->dim = analysis_dim;
-    stream->s_config.padding_info = cam_cap->analysis_padding_info;
+    stream->s_config.padding_info =
+        cam_cap->analysis_info[CAM_ANALYSIS_INFO_FD_STILL].analysis_padding_info;
 
     rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
     if (MM_CAMERA_OK != rc) {
diff --git a/QCamera2/util/QCameraCommon.cpp b/QCamera2/util/QCameraCommon.cpp
new file mode 100644
index 0000000..0cc2654
--- /dev/null
+++ b/QCamera2/util/QCameraCommon.cpp
@@ -0,0 +1,226 @@
+/* Copyright (c) 2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCameraCommon"
+
+// System dependencies
+#include <utils/Errors.h>
+#include <stdlib.h>
+#include <string.h>
+#include <utils/Log.h>
+
+// Camera dependencies
+#include "QCameraCommon.h"
+
+using namespace android;
+
+namespace qcamera {
+
+#ifndef TRUE
+#define TRUE 1
+#endif
+
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+/*===========================================================================
+ * FUNCTION   : QCameraCommon
+ *
+ * DESCRIPTION: default constructor of QCameraCommon
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraCommon::QCameraCommon() :
+    m_pCapability(NULL)
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraCommon
+ *
+ * DESCRIPTION: destructor of QCameraCommon
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraCommon::~QCameraCommon()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : init
+ *
+ * DESCRIPTION: Init function for QCameraCommon
+ *
+ * PARAMETERS :
+ *   @pCapability : Capabilities
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCommon::init(cam_capability_t *pCapability)
+{
+    m_pCapability = pCapability;
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : calculateLCM
+ *
+ * DESCRIPTION: Get the LCM of 2 numbers
+ *
+ * PARAMETERS :
+ *   @num1   : First number
+ *   @num2   : second number
+ *
+ * RETURN     : int32_t type (LCM)
+ *
+ *==========================================================================*/
+uint32_t QCameraCommon::calculateLCM(int32_t num1, int32_t num2)
+{
+   uint32_t lcm = 0;
+   uint32_t temp = 0;
+
+   if ((num1 < 1) && (num2 < 1)) {
+       return 0;
+   } else if (num1 < 1) {
+       return num2;
+   } else if (num2 < 1) {
+       return num1;
+   }
+
+   if (num1 > num2) {
+       lcm = num1;
+   } else {
+       lcm = num2;
+   }
+   temp = lcm;
+
+   while (1) {
+       if (((lcm % num1) == 0) && ((lcm % num2) == 0)) {
+           break;
+       }
+       lcm += temp;
+   }
+   return lcm;
+}
+
+/*===========================================================================
+ * FUNCTION   : getAnalysisInfo
+ *
+ * DESCRIPTION: Get the Analysis information based on
+ *     current mode and feature mask
+ *
+ * PARAMETERS :
+ *   @fdVideoEnabled : Whether fdVideo enabled currently
+ *   @hal3           : Whether hal3 or hal1
+ *   @featureMask    : Feature mask
+ *   @pAnalysis_info : Analysis info to be filled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCommon::getAnalysisInfo(
+        bool fdVideoEnabled,
+        bool hal3,
+        cam_feature_mask_t featureMask,
+        cam_analysis_info_t *pAnalysisInfo)
+{
+    if (!pAnalysisInfo) {
+        return BAD_VALUE;
+    }
+
+    pAnalysisInfo->valid = 0;
+
+    if ((fdVideoEnabled == TRUE) && (hal3 == FALSE) &&
+            (m_pCapability->analysis_info[CAM_ANALYSIS_INFO_FD_VIDEO].hw_analysis_supported) &&
+            (m_pCapability->analysis_info[CAM_ANALYSIS_INFO_FD_VIDEO].valid)) {
+        *pAnalysisInfo =
+                m_pCapability->analysis_info[CAM_ANALYSIS_INFO_FD_VIDEO];
+    } else if (m_pCapability->analysis_info[CAM_ANALYSIS_INFO_FD_STILL].valid) {
+        *pAnalysisInfo =
+                m_pCapability->analysis_info[CAM_ANALYSIS_INFO_FD_STILL];
+        if (hal3 == TRUE) {
+            pAnalysisInfo->analysis_max_res = pAnalysisInfo->analysis_recommended_res;
+        }
+    }
+
+    if ((featureMask & CAM_QCOM_FEATURE_PAAF) &&
+      (m_pCapability->analysis_info[CAM_ANALYSIS_INFO_PAAF].valid)) {
+        cam_analysis_info_t *pPaafInfo =
+          &m_pCapability->analysis_info[CAM_ANALYSIS_INFO_PAAF];
+
+        if (!pAnalysisInfo->valid) {
+            *pAnalysisInfo = *pPaafInfo;
+        } else {
+            pAnalysisInfo->analysis_max_res.width =
+                MAX(pAnalysisInfo->analysis_max_res.width,
+                pPaafInfo->analysis_max_res.width);
+            pAnalysisInfo->analysis_max_res.height =
+                MAX(pAnalysisInfo->analysis_max_res.height,
+                pPaafInfo->analysis_max_res.height);
+            pAnalysisInfo->analysis_padding_info.height_padding =
+                calculateLCM(pAnalysisInfo->analysis_padding_info.height_padding,
+                pPaafInfo->analysis_padding_info.height_padding);
+            pAnalysisInfo->analysis_padding_info.width_padding =
+                calculateLCM(pAnalysisInfo->analysis_padding_info.width_padding,
+                pPaafInfo->analysis_padding_info.width_padding);
+            pAnalysisInfo->analysis_padding_info.plane_padding =
+                calculateLCM(pAnalysisInfo->analysis_padding_info.plane_padding,
+                pPaafInfo->analysis_padding_info.plane_padding);
+            pAnalysisInfo->analysis_padding_info.min_stride =
+                MAX(pAnalysisInfo->analysis_padding_info.min_stride,
+                pPaafInfo->analysis_padding_info.min_stride);
+            pAnalysisInfo->analysis_padding_info.min_stride =
+                ALIGN(pAnalysisInfo->analysis_padding_info.min_stride,
+                pAnalysisInfo->analysis_padding_info.width_padding);
+
+            pAnalysisInfo->analysis_padding_info.min_scanline =
+                MAX(pAnalysisInfo->analysis_padding_info.min_scanline,
+                pPaafInfo->analysis_padding_info.min_scanline);
+            pAnalysisInfo->analysis_padding_info.min_scanline =
+                ALIGN(pAnalysisInfo->analysis_padding_info.min_scanline,
+                pAnalysisInfo->analysis_padding_info.height_padding);
+
+            pAnalysisInfo->hw_analysis_supported |=
+                pPaafInfo->hw_analysis_supported;
+        }
+    }
+
+    return pAnalysisInfo->valid ? NO_ERROR : BAD_VALUE;
+}
+
+}; // namespace qcamera
diff --git a/QCamera2/util/QCameraCommon.h b/QCamera2/util/QCameraCommon.h
new file mode 100644
index 0000000..844b087
--- /dev/null
+++ b/QCamera2/util/QCameraCommon.h
@@ -0,0 +1,61 @@
+/* Copyright (c) 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_COMMON_H__
+#define __QCAMERA_COMMON_H__
+
+// Camera dependencies
+#include "cam_types.h"
+#include "cam_intf.h"
+
+namespace qcamera {
+
+#define ALIGN(a, b) (((a) + (b)) & ~(b))
+#define MAX(a, b) ((a) > (b) ? (a) : (b))
+
+class QCameraCommon {
+public:
+    QCameraCommon();
+    ~QCameraCommon();
+
+    int32_t init(cam_capability_t *cap);
+
+    int32_t getAnalysisInfo(
+        bool fdVideoEnabled, bool hal3, cam_feature_mask_t featureMask,
+        cam_analysis_info_t *pAnalysisInfo);
+    static uint32_t calculateLCM(int32_t num1, int32_t num2);
+
+private:
+    cam_capability_t *m_pCapability;
+
+};
+
+}; // namespace qcamera
+#endif /* __QCAMERA_COMMON_H__ */
+