msmcobalt: Update to LA.UM.5.7_RB1.07.00.00.251.006

msmcobalt: from hardware/qcom/camera
  ca24979 Merge "Merge AU_LINUX_ANDROID_LA.UM.5.7.R1.07.00.00.253.042 on remote branch" into LA.UM.5.7_rb1.4
  03dce79 HAL-1: Set the HAL state appropriately when backend notifies error
  a8a2835 Merge AU_LINUX_ANDROID_LA.UM.5.7.R1.07.00.00.253.042 on remote branch
  5b0c894 Promotion of camera.lnx.1.0-00154.
  860df7d QCamera2: FOV-control interface with Spatial alignment block
  7d39636 Merge "QCamera2:test: Bug fix to display current saturation value" into camera.lnx.1.0-dev.1.0
  f082eac Merge "Add msm8998 to eventually replace msmcobalt. CRs-Fixed: 1087377" into camera.lnx.1.0-dev.1.0
  ed09f86 Merge "QCamera2: HAL1: Provide rotation info in meta info." into camera.lnx.1.0-dev.1.0
  b7d34e1 Merge "QCamera2: Add new metadata for Spatial Alignment Compute output" into camera.lnx.1.0-dev.1.0
  06957fb Merge "QCamera2: HAL1: Changes to enable Low Power mode in dual camera." into camera.lnx.1.0-dev.1.0
  918496a Merge "QCamera2: Changes to clean-up dual camera snapshot." into camera.lnx.1.0-dev.1.0
  0afbb04 Merge "QCamera2: HAL: Add Spatial transform feature mask" into camera.lnx.1.0-dev.1.0
  b5cafe4 Merge "camera: HAL: Add margins to stream size definition" into camera.lnx.1.0-dev.1.0
  a983ed9 Promotion of camera.lnx.1.0-00152.
  77e0147 Merge "QCamera2: HAL1: Enable CDS feature mask for postview Stream" into camera.lnx.1.0-dev.1.0
  5aa82cd Merge "QCamera: Enable IR Mode Auto" into camera.lnx.1.0-dev.1.0
  cddb024 Merge "QCamera2: HAL3: Adding SVHDR to PP feature mask." into camera.lnx.1.0-dev.1.0
  1b5dbba Merge "QCamera2: Restrict template to maximum of 30 fps." into camera.lnx.1.0-dev.1.0
  282f572 Merge "QCamera2: Changes to release video duped FD's." into camera.lnx.1.0-dev.1.0
  c3103ed Merge "QCamera2: HAF2.0 : VM related change" into camera.lnx.1.0-dev.1.0
  3d6c99b Merge "QCamera2: Add Camscope functionality to the HAL" into camera.lnx.1.0-dev.1.0
  4ee41c5 QCamera2: Add new metadata for Spatial Alignment Compute output
  6da4fb5 QCamera2: HAL: Add Spatial transform feature mask
  05477d4 Merge "QCamera2: Add Setprop to disable Preview TNR" into camera.lnx.1.0-dev.1.0
  b59f5fc Add msm8998 to eventually replace msmcobalt. CRs-Fixed: 1087377
  234bd8d QCamera2: HAL1: Provide rotation info in meta info.
  15945c2 Promotion of camera.lnx.1.0-00146.
  3d9d245 QCamera2: Add Camscope functionality to the HAL
  5588917 Merge "QCamera2: HAL3: Update the max size for CPP bypass" into camera.lnx.1.0-dev.1.0
  161e7c1 Merge "QCamera2: HAL3: Enable HDR snapshot support" into camera.lnx.1.0-dev.1.0
  721908c Merge "QCamera2:HAL1: Deleting channel if add stream fails." into camera.lnx.1.0-dev.1.0
  d0cda11 Merge "QCamera2:HAL1.0: Add validation to cleanup gralloc memory." into camera.lnx.1.0-dev.1.0
  7bac37e Merge "QCamera2: HAL3: Disable PAAF for Callback stream" into camera.lnx.1.0-dev.1.0
  482bfc3 Merge "QCamera2: Fix race condition with handleCameraDeviceError." into camera.lnx.1.0-dev.1.0
  4173056 camera: HAL: Add margins to stream size definition
  77116b5 QCamera2: HAL1: Changes to enable Low Power mode in dual camera.
  c9dd09c QCamera2: Changes to clean-up dual camera snapshot.
  89114ea QCamera2: HAL3: Update the max size for CPP bypass
  66866b1 QCamera2:test: Bug fix to display current saturation value
  4187881 QCamera2: HAL3: Enable HDR snapshot support
  dc16aa2 QCamera2: HAL3: Adding SVHDR to PP feature mask.
  414a270 QCamera2: Add Setprop to disable Preview TNR
  aff499c QCamera2:HAL1: Deleting channel if add stream fails.
  93ced79 QCamera2: Restrict template to maximum of 30 fps.
  d7a9681 QCamera2: HAL1: Enable CDS feature mask for postview Stream
  c6c8955 QCamera: Enable IR Mode Auto
  4f36254 QCamera2:HAL1.0: Add validation to cleanup gralloc memory.
  17b45a0 QCamera2: Changes to release video duped FD's.
  75cf1fa QCamera2: HAF2.0 : VM related change
  5ed32fc QCamera2: Fix race condition with handleCameraDeviceError.
  c234939 QCamera2: HAL3: Disable PAAF for Callback stream

Bug: 33405777
Change-Id: Idf806f076e3e587f21fb63a9f553086c64504910
Signed-off-by: Thierry Strudel <tstrudel@google.com>
diff --git a/msmcobalt/Android.mk b/msmcobalt/Android.mk
new file mode 100644
index 0000000..2c330c3
--- /dev/null
+++ b/msmcobalt/Android.mk
@@ -0,0 +1,28 @@
+MM_V4L2_DRIVER_LIST += msm8960
+MM_V4L2_DRIVER_LIST += msm8974
+MM_V4L2_DRIVER_LIST += msm8916
+MM_V4L2_DRIVER_LIST += msm8226
+MM_V4L2_DRIVER_LIST += msm8610
+MM_V4L2_DRIVER_LIST += apq8084
+MM_V4L2_DRIVER_LIST += mpq8092
+MM_V4L2_DRIVER_LIST += msm_bronze
+MM_V4L2_DRIVER_LIST += msm8916
+MM_V4L2_DRIVER_LIST += msm8994
+MM_V4L2_DRIVER_LIST += msm8084
+MM_V4L2_DRIVER_LIST += msm8909
+MM_V4L2_DRIVER_LIST += msm8952
+MM_V4L2_DRIVER_LIST += msm8996
+MM_V4L2_DRIVER_LIST += msm8992
+MM_V4L2_DRIVER_LIST += msm8937
+MM_V4L2_DRIVER_LIST += msm8953
+MM_V4L2_DRIVER_LIST += msm8998
+MM_V4L2_DRIVER_LIST += msmcobalt
+MM_V4L2_DRIVER_LIST += msmfalcon
+
+ifneq (,$(filter $(MM_V4L2_DRIVER_LIST),$(TARGET_BOARD_PLATFORM)))
+  ifneq ($(strip $(USE_CAMERA_STUB)),true)
+    ifneq ($(BUILD_TINY_ANDROID),true)
+      include $(call all-subdir-makefiles)
+    endif
+  endif
+endif
diff --git a/msmcobalt/QCamera2/Android.mk b/msmcobalt/QCamera2/Android.mk
index 935ea6f..60be149 100644
--- a/msmcobalt/QCamera2/Android.mk
+++ b/msmcobalt/QCamera2/Android.mk
@@ -22,6 +22,8 @@
         util/QCameraPerf.cpp \
         util/QCameraQueue.cpp \
         util/QCameraCommon.cpp \
+        util/QCameraTrace.cpp \
+        util/camscope_packet_type.cpp \
         QCamera2Hal.cpp \
         QCamera2Factory.cpp
 
@@ -73,9 +75,6 @@
 LOCAL_CFLAGS += -DUSE_HAL_3_3
 endif
 
-#Enable to simulate B+B snapshot use case. Will be removed later
-#LOCAL_CFLAGS += -DDUAL_CAM_TEST
-
 #use media extension
 ifeq ($(TARGET_USES_MEDIA_EXTENSIONS), true)
 LOCAL_CFLAGS += -DUSE_MEDIA_EXTENSIONS
@@ -112,11 +111,11 @@
 LOCAL_CFLAGS += -DTARGET_TS_MAKEUP
 LOCAL_C_INCLUDES += $(LOCAL_PATH)/HAL/tsMakeuplib/include
 endif
-ifneq (,$(filter msm8974 msm8916 msm8226 msm8610 msm8916 apq8084 msm8084 msm8994 msm8992 msm8952 msm8937 msm8953 msm8996 msmcobalt msmfalcon, $(TARGET_BOARD_PLATFORM)))
+ifneq (,$(filter msm8974 msm8916 msm8226 msm8610 msm8916 apq8084 msm8084 msm8994 msm8992 msm8952 msm8937 msm8953 msm8996 msmcobalt msmfalcon msm8998, $(TARGET_BOARD_PLATFORM)))
     LOCAL_CFLAGS += -DVENUS_PRESENT
 endif
 
-ifneq (,$(filter msm8996 msmcobalt msmfalcon,$(TARGET_BOARD_PLATFORM)))
+ifneq (,$(filter msm8996 msmcobalt msmfalcon msm8998,$(TARGET_BOARD_PLATFORM)))
     LOCAL_CFLAGS += -DUBWC_PRESENT
 endif
 
diff --git a/msmcobalt/QCamera2/HAL/QCamera2HWI.cpp b/msmcobalt/QCamera2/HAL/QCamera2HWI.cpp
index 5d24915..b5735ab 100644
--- a/msmcobalt/QCamera2/HAL/QCamera2HWI.cpp
+++ b/msmcobalt/QCamera2/HAL/QCamera2HWI.cpp
@@ -132,7 +132,7 @@
 int QCamera2HardwareInterface::set_preview_window(struct camera_device *device,
         struct preview_stream_ops *window)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_SET_PREVIEW_WINDOW);
     int rc = NO_ERROR;
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
@@ -177,7 +177,7 @@
         camera_request_memory get_memory,
         void *user)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_SET_CALLBACKS);
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
     if (!hw) {
@@ -217,7 +217,7 @@
  *==========================================================================*/
 void QCamera2HardwareInterface::enable_msg_type(struct camera_device *device, int32_t msg_type)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_ENABLE_MSG_TYPE);
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
     if (!hw) {
@@ -250,7 +250,7 @@
  *==========================================================================*/
 void QCamera2HardwareInterface::disable_msg_type(struct camera_device *device, int32_t msg_type)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_DISABLE_MSG_TYPE);
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
     if (!hw) {
@@ -284,7 +284,7 @@
  *==========================================================================*/
 int QCamera2HardwareInterface::msg_type_enabled(struct camera_device *device, int32_t msg_type)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_MSG_TYPE_ENABLED);
     int ret = NO_ERROR;
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
@@ -321,7 +321,7 @@
  *==========================================================================*/
 int QCamera2HardwareInterface::prepare_preview(struct camera_device *device)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_PREPARE_PREVIEW);
     int ret = NO_ERROR;
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
@@ -359,7 +359,7 @@
  *==========================================================================*/
 int QCamera2HardwareInterface::start_preview(struct camera_device *device)
 {
-    KPI_ATRACE_CALL();
+    KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_START_PREVIEW);
     int ret = NO_ERROR;
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
@@ -400,7 +400,7 @@
  *==========================================================================*/
 void QCamera2HardwareInterface::stop_preview(struct camera_device *device)
 {
-    KPI_ATRACE_CALL();
+    KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_STOP_PREVIEW);
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
     if (!hw) {
@@ -438,7 +438,7 @@
  *==========================================================================*/
 int QCamera2HardwareInterface::preview_enabled(struct camera_device *device)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_PREVIEW_ENABLED);
     int ret = NO_ERROR;
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
@@ -518,7 +518,7 @@
  *==========================================================================*/
 int QCamera2HardwareInterface::restart_start_preview(struct camera_device *device)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_RESTART_START_PREVIEW);
     int ret = NO_ERROR;
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
@@ -562,7 +562,7 @@
  *==========================================================================*/
 int QCamera2HardwareInterface::restart_stop_preview(struct camera_device *device)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_RESTART_STOP_PREVIEW);
     int ret = NO_ERROR;
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
@@ -605,7 +605,7 @@
  *==========================================================================*/
 int QCamera2HardwareInterface::pre_start_recording(struct camera_device *device)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_PRE_START_RECORDING);
     int ret = NO_ERROR;
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
@@ -641,7 +641,7 @@
  *==========================================================================*/
 int QCamera2HardwareInterface::start_recording(struct camera_device *device)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_START_RECORDING);
     int ret = NO_ERROR;
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
@@ -691,7 +691,7 @@
  *==========================================================================*/
 void QCamera2HardwareInterface::stop_recording(struct camera_device *device)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_STOP_RECORDING);
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
     if (!hw) {
@@ -727,7 +727,7 @@
  *==========================================================================*/
 int QCamera2HardwareInterface::recording_enabled(struct camera_device *device)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_RECORDING_ENABLED);
     int ret = NO_ERROR;
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
@@ -763,7 +763,7 @@
 void QCamera2HardwareInterface::release_recording_frame(
             struct camera_device *device, const void *opaque)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_REL_REC_FRAME);
     int32_t ret = NO_ERROR;
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
@@ -838,7 +838,7 @@
  *==========================================================================*/
 int QCamera2HardwareInterface::cancel_auto_focus(struct camera_device *device)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_CANCEL_AF);
     int ret = NO_ERROR;
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
@@ -874,7 +874,7 @@
  *==========================================================================*/
 int QCamera2HardwareInterface::pre_take_picture(struct camera_device *device)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_PRE_TAKE_PICTURE);
     int ret = NO_ERROR;
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
@@ -910,7 +910,7 @@
  *==========================================================================*/
 int QCamera2HardwareInterface::take_picture(struct camera_device *device)
 {
-    KPI_ATRACE_CALL();
+    KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_TAKE_PICTURE);
     int ret = NO_ERROR;
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
@@ -1027,7 +1027,7 @@
  *==========================================================================*/
 int QCamera2HardwareInterface::cancel_picture(struct camera_device *device)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_CANCEL_PICTURE);
     int ret = NO_ERROR;
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
@@ -1066,7 +1066,7 @@
 int QCamera2HardwareInterface::set_parameters(struct camera_device *device,
                                               const char *parms)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_SET_PARAMETERS);
     int ret = NO_ERROR;
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
@@ -1134,7 +1134,7 @@
  *==========================================================================*/
 int QCamera2HardwareInterface::stop_after_set_params(struct camera_device *device)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_STOP_AFTER_SET_PARAMS);
     int ret = NO_ERROR;
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
@@ -1177,7 +1177,7 @@
  *==========================================================================*/
 int QCamera2HardwareInterface::commit_params(struct camera_device *device)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_COMMIT_PARAMS);
     int ret = NO_ERROR;
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
@@ -1220,7 +1220,7 @@
  *==========================================================================*/
 int QCamera2HardwareInterface::restart_after_set_params(struct camera_device *device)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_RESTART_AFTER_SET_PARAMS);
     int ret = NO_ERROR;
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
@@ -1260,7 +1260,7 @@
  *==========================================================================*/
 char* QCamera2HardwareInterface::get_parameters(struct camera_device *device)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_GET_PARAMETERS);
     char *ret = NULL;
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
@@ -1296,7 +1296,7 @@
 void QCamera2HardwareInterface::put_parameters(struct camera_device *device,
                                                char *parm)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_PUT_PARAMETERS);
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
     if (!hw) {
@@ -1334,7 +1334,7 @@
                                             int32_t arg1,
                                             int32_t arg2)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_SEND_COMMAND);
     int ret = NO_ERROR;
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
@@ -1382,7 +1382,7 @@
         int32_t arg1,
         int32_t arg2)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_SEND_COMMAND_RESTART);
     int ret = NO_ERROR;
     QCamera2HardwareInterface *hw =
             reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
@@ -1421,7 +1421,7 @@
  *==========================================================================*/
 void QCamera2HardwareInterface::release(struct camera_device *device)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_RELEASE);
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
     if (!hw) {
@@ -1494,7 +1494,7 @@
  *==========================================================================*/
 int QCamera2HardwareInterface::close_camera_device(hw_device_t *hw_dev)
 {
-    KPI_ATRACE_CALL();
+    KPI_ATRACE_CAMSCOPE_BEGIN(CAMSCOPE_HAL1_CLOSECAMERA);
     int ret = NO_ERROR;
 
     QCamera2HardwareInterface *hw =
@@ -1507,6 +1507,8 @@
     LOGI("[KPI Perf]: E camera id %d", hw->getCameraId());
     delete hw;
     LOGI("[KPI Perf]: X");
+    KPI_ATRACE_CAMSCOPE_END(CAMSCOPE_HAL1_CLOSECAMERA);
+    CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
     return ret;
 }
 
@@ -1528,7 +1530,7 @@
                                                    void *img_ptr,
                                                    cam_pp_offline_src_config_t *config)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_REGISTER_FACE_IMAGE);
     int ret = NO_ERROR;
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
@@ -1568,7 +1570,7 @@
  *==========================================================================*/
 int QCamera2HardwareInterface::prepare_snapshot(struct camera_device *device)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_PREPARE_SNAPSHOT);
     int ret = NO_ERROR;
     QCamera2HardwareInterface *hw =
         reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
@@ -1688,7 +1690,7 @@
     memset(&mFaceRect, -1, sizeof(mFaceRect));
 #endif
     getLogLevel();
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_QCAMERA2HWI);
     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
     mCameraDevice.common.version = HARDWARE_DEVICE_API_VERSION(1, 0);
     mCameraDevice.common.close = close_camera_device;
@@ -1830,7 +1832,7 @@
  *==========================================================================*/
 int QCamera2HardwareInterface::openCamera(struct hw_device_t **hw_device)
 {
-    KPI_ATRACE_CALL();
+    KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_OPENCAMERA);
     int rc = NO_ERROR;
     if (mCameraOpened) {
         *hw_device = NULL;
@@ -2355,6 +2357,7 @@
 cam_capability_t *QCamera2HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
         uint32_t cam_handle)
 {
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_GET_CAP);
     int rc = NO_ERROR;
     QCameraHeapMemory *capabilityHeap = NULL;
     cam_capability_t *cap_ptr = NULL;
@@ -2450,7 +2453,7 @@
 int QCamera2HardwareInterface::initCapabilities(uint32_t cameraId,
         mm_camera_vtbl_t *cameraHandle)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_INIT_CAP);
     int rc = 0;
     uint32_t handle = 0;
 
@@ -2516,7 +2519,7 @@
 int QCamera2HardwareInterface::getCapabilities(uint32_t cameraId,
         struct camera_info *info, cam_sync_type_t *p_cam_type)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_GET_CAP);
     int rc = NO_ERROR;
     struct  camera_info *p_info = NULL;
     pthread_mutex_lock(&gCamLock);
@@ -3787,7 +3790,7 @@
  *==========================================================================*/
 int QCamera2HardwareInterface::startPreview()
 {
-    KPI_ATRACE_CALL();
+    KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_STARTPREVIEW);
     int32_t rc = NO_ERROR;
 
     LOGI("E ZSL = %d Recording Hint = %d", mParameters.isZSLMode(),
@@ -3864,7 +3867,7 @@
  *==========================================================================*/
 int QCamera2HardwareInterface::stopPreview()
 {
-    KPI_ATRACE_CALL();
+    KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_STOPPREVIEW);
     LOGI("E");
     mNumPreviewFaces = -1;
     mActiveAF = false;
@@ -3889,7 +3892,6 @@
     unpreparePreview();
 
     m_perfLockMgr.releasePerfLock(PERF_LOCK_STOP_PREVIEW);
-
     LOGI("X");
     return NO_ERROR;
 }
@@ -4811,11 +4813,10 @@
         return rc;
     }
 
-#ifdef DUAL_CAM_TEST //Temporary macro. Added to simulate B+B snapshot. Will be removed
-    if(mActiveCamera == (MM_CAMERA_TYPE_MAIN | MM_CAMERA_TYPE_AUX)) {
-        numSnapshots = 1;
+    if(mActiveCamera == MM_CAMERA_DUAL_CAM) {
+        /*Need to remove once we have dual camera fusion*/
+        numSnapshots = numSnapshots/MM_CAMERA_MAX_CAM_CNT;
     }
-#endif
 
     if (mAdvancedCaptureConfigured) {
         numSnapshots = mParameters.getBurstCountForAdvancedCapture();
@@ -6965,7 +6966,7 @@
 
     fovControlResult = m_pFovControl->getFovControlResult();
 
-    camState = fovControlResult.camState;
+    camState = fovControlResult.activeCamState;
 
     if (camState != mActiveCamera) {
         processCameraControl(camState);
@@ -7362,6 +7363,7 @@
     rc = pChannel->init(NULL, NULL, NULL);
     if (rc != NO_ERROR) {
         LOGE("init preview channel failed, ret = %d", rc);
+        delete pChannel;
         return rc;
     }
 
@@ -7370,6 +7372,7 @@
             metadata_stream_cb_routine, this);
     if (rc != NO_ERROR) {
         LOGE("add metadata stream failed, ret = %d", rc);
+        delete pChannel;
         return rc;
     }
 
@@ -7382,7 +7385,7 @@
                     nodisplay_preview_stream_cb_routine, this);
         } else {
             rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
-                                    preview_stream_cb_routine, this);
+                    preview_stream_cb_routine, this);
             if (needSyncCB(CAM_STREAM_TYPE_PREVIEW) == TRUE) {
                 pChannel->setStreamSyncCB(CAM_STREAM_TYPE_PREVIEW,
                         synchronous_stream_cb_routine);
@@ -7390,6 +7393,12 @@
         }
     }
 
+    if (rc != NO_ERROR) {
+        LOGE("add raw/preview stream failed, ret = %d", rc);
+        delete pChannel;
+        return rc;
+    }
+
     if (((mParameters.fdModeInVideo())
             || (mParameters.getDcrf() == true)
             || (mParameters.getRecordingHintValue() != true))
@@ -7398,6 +7407,7 @@
                 NULL, this);
         if (rc != NO_ERROR) {
             LOGE("add Analysis stream failed, ret = %d", rc);
+            delete pChannel;
             return rc;
         }
     }
@@ -7703,7 +7713,7 @@
                 nodisplay_preview_stream_cb_routine, this);
     } else {
         rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
-                                preview_stream_cb_routine, this);
+                preview_stream_cb_routine, this);
         if (needSyncCB(CAM_STREAM_TYPE_PREVIEW) == TRUE) {
             pChannel->setStreamSyncCB(CAM_STREAM_TYPE_PREVIEW,
                     synchronous_stream_cb_routine);
@@ -7801,6 +7811,7 @@
                         this);
     if (rc != NO_ERROR) {
         LOGE("init capture channel failed, ret = %d", rc);
+        delete pChannel;
         return rc;
     }
 
@@ -7809,6 +7820,7 @@
             metadata_stream_cb_routine, this);
     if (rc != NO_ERROR) {
         LOGE("add metadata stream failed, ret = %d", rc);
+        delete pChannel;
         return rc;
     }
 
@@ -7817,6 +7829,7 @@
                 preview_stream_cb_routine, this);
         if (rc != NO_ERROR) {
             LOGE("add preview stream failed, ret = %d", rc);
+            delete pChannel;
             return rc;
         }
         if (needSyncCB(CAM_STREAM_TYPE_PREVIEW) == TRUE) {
@@ -7829,6 +7842,7 @@
                                 NULL, this);
         if (rc != NO_ERROR) {
             LOGE("add postview stream failed, ret = %d", rc);
+            delete pChannel;
             return rc;
         }
     }
@@ -7838,6 +7852,7 @@
                 NULL, this);
         if (rc != NO_ERROR) {
             LOGE("add snapshot stream failed, ret = %d", rc);
+            delete pChannel;
             return rc;
         }
     }
@@ -7855,6 +7870,7 @@
                 CAM_STREAM_TYPE_RAW, stream_cb, this);
         if (rc != NO_ERROR) {
             LOGE("add raw stream failed, ret = %d", rc);
+            delete pChannel;
             return rc;
         }
     }
@@ -8339,11 +8355,10 @@
         pChannel->setReprocCount(1);
     }
 
-#ifdef DUAL_CAM_TEST //Temporary macro. Added to simulate B+B snapshot. Will be removed
     if (isDualCamera()) {
         minStreamBufNum += 1;
     }
-#endif
+
     // Add non inplace image lib buffers only when ppproc is present,
     // becuase pproc is non inplace and input buffers for img lib
     // are output for pproc and this number of extra buffers is required
@@ -8593,7 +8608,7 @@
  *==========================================================================*/
 int32_t QCamera2HardwareInterface::preparePreview()
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_PREPAREPREVIEW);
     int32_t rc = NO_ERROR;
 
     LOGI("E");
@@ -10023,7 +10038,7 @@
  *==========================================================================*/
 int32_t QCamera2HardwareInterface::prepareHardwareForSnapshot(int32_t afNeeded)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_PREPARE_HW_FOR_SNAPSHOT);
     LOGI("[KPI Perf]: Send PREPARE SANSPHOT event");
     return mCameraHandle->ops->prepare_snapshot(mCameraHandle->camera_handle,
                                                 afNeeded);
@@ -10815,9 +10830,6 @@
     }
 #endif
 
-    if (isDualCamera() == TRUE) {
-        return FALSE;
-    }
     char value[PROPERTY_VALUE_MAX];
     property_get("persist.camera.preview.sync_cb", value, "1");
     if ((atoi(value) == 1) && (stream_type == CAM_STREAM_TYPE_PREVIEW)) {
diff --git a/msmcobalt/QCamera2/HAL/QCamera2HWICallbacks.cpp b/msmcobalt/QCamera2/HAL/QCamera2HWICallbacks.cpp
index 8c004ef..94b4d3d 100644
--- a/msmcobalt/QCamera2/HAL/QCamera2HWICallbacks.cpp
+++ b/msmcobalt/QCamera2/HAL/QCamera2HWICallbacks.cpp
@@ -66,7 +66,7 @@
 void QCamera2HardwareInterface::zsl_channel_cb(mm_camera_super_buf_t *recvd_frame,
                                                void *userdata)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_ZSL_CH_CB);
     LOGH("[KPI Perf]: E");
     char value[PROPERTY_VALUE_MAX];
     bool dump_raw = false;
@@ -75,16 +75,16 @@
 
     if (pme == NULL ||
         pme->mCameraHandle == 0 ||
-        !validate_handle(pme->mCameraHandle->camera_handle,
-        recvd_frame->camera_handle)) {
+        (!validate_handle(pme->mCameraHandle->camera_handle,
+        recvd_frame->camera_handle))) {
        LOGE("camera obj not valid");
        return;
     }
 
     QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_ZSL];
     if (pChannel == NULL ||
-            !validate_handle(pChannel->getMyHandle(),
-            recvd_frame->ch_id)) {
+            (!validate_handle(pChannel->getMyHandle(),
+            recvd_frame->ch_id))) {
         LOGE("ZSL channel doesn't exist, return here");
         return;
     }
@@ -381,13 +381,14 @@
 void QCamera2HardwareInterface::capture_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
                                                            void *userdata)
 {
-    KPI_ATRACE_CALL();
+    KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_CAPTURE_CH_CB);
     char value[PROPERTY_VALUE_MAX];
     LOGH("[KPI Perf]: E PROFILE_YUV_CB_TO_HAL");
     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
     if (pme == NULL ||
         pme->mCameraHandle == NULL ||
-        pme->mCameraHandle->camera_handle != recvd_frame->camera_handle){
+       !validate_handle(pme->mCameraHandle->camera_handle,
+        recvd_frame->camera_handle)){
         LOGE("camera obj not valid");
         return;
     }
@@ -640,7 +641,7 @@
 void QCamera2HardwareInterface::postproc_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
                                                             void *userdata)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_PP_CH_CB);
     LOGH("[KPI Perf]: E");
     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
 
@@ -696,7 +697,7 @@
     nsecs_t frameTime = 0, mPreviewTimestamp = 0;
     int err = NO_ERROR;
 
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_SYNC_STRM_CB);
     LOGH("[KPI Perf] : BEGIN");
     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
 
@@ -794,7 +795,8 @@
                                                           QCameraStream * stream,
                                                           void *userdata)
 {
-    KPI_ATRACE_CALL();
+    CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
+    KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_PREVIEW_STRM_CB);
     LOGH("[KPI Perf] : BEGIN");
     int err = NO_ERROR;
     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
@@ -1147,12 +1149,13 @@
                                                           QCameraStream *stream,
                                                           void * userdata)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_NODIS_PREVIEW_STRMCB);
     LOGH("[KPI Perf] E");
     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
     if (pme == NULL ||
         pme->mCameraHandle == NULL ||
-        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        !validate_handle(pme->mCameraHandle->camera_handle,
+        super_frame->camera_handle)){
         LOGE("camera obj not valid");
         // simply free super frame
         free(super_frame);
@@ -1231,12 +1234,13 @@
   QCameraStream *stream,
   void * userdata)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_RDI_MODE_STRM_CB);
     LOGH("RDI_DEBUG Enter");
     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
     if (pme == NULL ||
         pme->mCameraHandle == NULL ||
-        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        !validate_handle(pme->mCameraHandle->camera_handle,
+        super_frame->camera_handle)){
         LOGE("camera obj not valid");
         free(super_frame);
         return;
@@ -1352,7 +1356,7 @@
                                                            QCameraStream *stream,
                                                            void *userdata)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_POSTVIEW_STRM_CB);
     int err = NO_ERROR;
     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
     QCameraGrallocMemory *memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info;
@@ -1415,7 +1419,7 @@
                                                         QCameraStream *stream,
                                                         void *userdata)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_VIDEO_STRM_CB);
     QCameraVideoMemory *videoMemObj = NULL;
     camera_memory_t *video_mem = NULL;
     nsecs_t timeStamp = 0;
@@ -1628,7 +1632,7 @@
 void QCamera2HardwareInterface::snapshot_channel_cb_routine(mm_camera_super_buf_t *super_frame,
        void *userdata)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_SNAPSHOT_CH_CB);
     char value[PROPERTY_VALUE_MAX];
     QCameraChannel *pChannel = NULL;
 
@@ -1636,7 +1640,8 @@
     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
     if (pme == NULL ||
         pme->mCameraHandle == NULL ||
-        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        !validate_handle(pme->mCameraHandle->camera_handle,
+        super_frame->camera_handle)){
         LOGE("camera obj not valid");
         // simply free super frame
         free(super_frame);
@@ -1725,12 +1730,13 @@
                                                       QCameraStream * /*stream*/,
                                                       void * userdata)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_RAW_STRM_CB);
     LOGH("[KPI Perf] : BEGIN");
     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
     if (pme == NULL ||
         pme->mCameraHandle == NULL ||
-        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        !validate_handle(pme->mCameraHandle->camera_handle,
+        super_frame->camera_handle)){
         LOGE("camera obj not valid");
         // simply free super frame
         free(super_frame);
@@ -1761,14 +1767,15 @@
         void *userdata)
 
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_RAW_CH_CB);
     char value[PROPERTY_VALUE_MAX];
 
     LOGH("[KPI Perf]: E");
     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
     if (pme == NULL ||
         pme->mCameraHandle == NULL ||
-        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        !validate_handle(pme->mCameraHandle->camera_handle,
+        super_frame->camera_handle)){
         LOGE("camera obj not valid");
         // simply free super frame
         free(super_frame);
@@ -1855,7 +1862,7 @@
                                                               QCameraStream * stream,
                                                               void * userdata)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_PREVIEW_RAW_STRM_CB);
     LOGH("[KPI Perf] : BEGIN");
     char value[PROPERTY_VALUE_MAX];
     bool dump_preview_raw = false, dump_video_raw = false;
@@ -1863,7 +1870,8 @@
     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
     if (pme == NULL ||
         pme->mCameraHandle == NULL ||
-        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        !validate_handle(pme->mCameraHandle->camera_handle,
+        super_frame->camera_handle)){
         LOGE("camera obj not valid");
         // simply free super frame
         free(super_frame);
@@ -1907,7 +1915,7 @@
                                                                QCameraStream * stream,
                                                                void * userdata)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_SNAPSHOT_RAW_STRM_CB);
     LOGH("[KPI Perf] : BEGIN");
     char value[PROPERTY_VALUE_MAX];
     bool dump_raw = false;
@@ -1915,7 +1923,8 @@
     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
     if (pme == NULL ||
         pme->mCameraHandle == NULL ||
-        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        !validate_handle(pme->mCameraHandle->camera_handle,
+        super_frame->camera_handle)){
         LOGE("camera obj not valid");
         // simply free super frame
         free(super_frame);
@@ -2056,7 +2065,7 @@
                                                            QCameraStream * stream,
                                                            void * userdata)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_METADATA_STRM_CB);
     LOGD("[KPI Perf] : BEGIN");
     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
 
@@ -2544,12 +2553,13 @@
                                                             QCameraStream * /*stream*/,
                                                             void * userdata)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_REPROC_STRM_CB);
     LOGH("[KPI Perf]: E");
     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
     if (pme == NULL ||
         pme->mCameraHandle == NULL ||
-        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        !validate_handle(pme->mCameraHandle->camera_handle,
+        super_frame->camera_handle)){
         LOGE("camera obj not valid");
         // simply free super frame
         free(super_frame);
@@ -2577,7 +2587,7 @@
 void QCamera2HardwareInterface::callback_stream_cb_routine(mm_camera_super_buf_t *super_frame,
         QCameraStream *stream, void *userdata)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL1_CB_STRM_CB);
     LOGH("[KPI Perf]: E");
     QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
 
diff --git a/msmcobalt/QCamera2/HAL/QCameraMem.cpp b/msmcobalt/QCamera2/HAL/QCameraMem.cpp
index ea3a9af..8f9ce50 100644
--- a/msmcobalt/QCamera2/HAL/QCameraMem.cpp
+++ b/msmcobalt/QCamera2/HAL/QCameraMem.cpp
@@ -1602,7 +1602,7 @@
 /*===========================================================================
  * FUNCTION   : closeNativeHandle
  *
- * DESCRIPTION: close video native handle and update cached ptrs
+ * DESCRIPTION: static function to close video native handle.
  *
  * PARAMETERS :
  *   @data  : ptr to video frame to be returned
@@ -1611,6 +1611,39 @@
  *              NO_ERROR  -- success
  *              none-zero failure code
  *==========================================================================*/
+int QCameraVideoMemory::closeNativeHandle(const void *data)
+{
+    int32_t rc = NO_ERROR;
+
+#ifdef USE_MEDIA_EXTENSIONS
+    const media_metadata_buffer *packet =
+            (const media_metadata_buffer *)data;
+    if ((packet != NULL) && (packet->eType ==
+            kMetadataBufferTypeNativeHandleSource)
+            && (packet->pHandle)) {
+        native_handle_close(packet->pHandle);
+        native_handle_delete(packet->pHandle);
+    } else {
+        LOGE("Invalid Data. Could not release");
+        return BAD_VALUE;
+    }
+#endif
+   return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : closeNativeHandle
+ *
+ * DESCRIPTION: close video native handle and update cached ptrs
+ *
+ * PARAMETERS :
+ *   @data     : ptr to video frame to be returned
+ *   @metadata : Flag to update metadata mode
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
 int QCameraVideoMemory::closeNativeHandle(const void *data, bool metadata)
 {
     int32_t rc = NO_ERROR;
@@ -1621,6 +1654,8 @@
         if ((packet != NULL) && (packet->eType ==
                 kMetadataBufferTypeNativeHandleSource)
                 && (packet->pHandle)) {
+            native_handle_close(packet->pHandle);
+            native_handle_delete(packet->pHandle);
             for (int i = 0; i < mMetaBufCount; i++) {
                 if(mMetadata[i]->data == data) {
                     media_metadata_buffer *mem =
@@ -2300,7 +2335,9 @@
     LOGD("E ", __FUNCTION__);
 
     for (int cnt = 0; cnt < mMappableBuffers; cnt++) {
-        mCameraMemory[cnt]->release(mCameraMemory[cnt]);
+        if (mCameraMemory[cnt] != NULL) {
+            mCameraMemory[cnt]->release(mCameraMemory[cnt]);
+        }
         struct ion_handle_data ion_handle;
         memset(&ion_handle, 0, sizeof(ion_handle));
         ion_handle.handle = mMemInfo[cnt].handle;
@@ -2309,12 +2346,14 @@
         }
         close(mMemInfo[cnt].main_ion_fd);
         if(mLocalFlag[cnt] != BUFFER_NOT_OWNED) {
-            if (mWindow) {
+            if (mWindow && (mBufferHandle[cnt] != NULL)
+                && (*mBufferHandle[cnt] != NULL)) {
+                LOGH("cancel_buffer: buffer_handle =%p",  *mBufferHandle[cnt]);
                 mWindow->cancel_buffer(mWindow, mBufferHandle[cnt]);
-                LOGH("cancel_buffer: hdl =%p", (*mBufferHandle[cnt]));
+                mBufferHandle[cnt]= NULL;
             } else {
-                LOGE("Preview window is NULL, cannot cancel_buffer: hdl =%p",
-                      (*mBufferHandle[cnt]));
+                LOGE("Cannot cancel buffer: hdl =%p window = %p local ptr = %p",
+                      (*mBufferHandle[cnt]), mWindow, mBufferHandle[cnt]);
             }
         }
         mLocalFlag[cnt] = BUFFER_NOT_OWNED;
diff --git a/msmcobalt/QCamera2/HAL/QCameraMem.h b/msmcobalt/QCamera2/HAL/QCameraMem.h
index c450ba0..3713781 100644
--- a/msmcobalt/QCamera2/HAL/QCameraMem.h
+++ b/msmcobalt/QCamera2/HAL/QCameraMem.h
@@ -249,6 +249,7 @@
     int convCamtoOMXFormat(cam_format_t format);
     int closeNativeHandle(const void *data, bool metadata = true);
     native_handle_t *getNativeHandle(uint32_t index, bool metadata = true);
+    static int closeNativeHandle(const void *data);
 private:
     camera_memory_t *mMetadata[MM_CAMERA_MAX_NUM_FRAMES];
     uint8_t mMetaBufCount;
diff --git a/msmcobalt/QCamera2/HAL/QCameraParameters.cpp b/msmcobalt/QCamera2/HAL/QCameraParameters.cpp
index 4213658..18a14e7 100644
--- a/msmcobalt/QCamera2/HAL/QCameraParameters.cpp
+++ b/msmcobalt/QCamera2/HAL/QCameraParameters.cpp
@@ -4318,8 +4318,11 @@
     }
 
     LOGH("nBurstNum = %d, nExpnum = %d", nBurstNum, nExpnum);
-    if (!isDualCamera()) {
-        set(KEY_QC_NUM_SNAPSHOT_PER_SHUTTER, nBurstNum * nExpnum);
+    if (mActiveState == MM_CAMERA_DUAL_CAM) {
+        set(KEY_QC_NUM_SNAPSHOT_PER_SHUTTER,
+                (nBurstNum * nExpnum * MM_CAMERA_MAX_CAM_CNT));
+    } else {
+        set(KEY_QC_NUM_SNAPSHOT_PER_SHUTTER, (nBurstNum * nExpnum));
     }
     return NO_ERROR;
 }
@@ -10056,6 +10059,44 @@
  *
  * PARAMETERS :
  *   @streamType        : stream type
+ *
+ * RETURN     : rotation value for stream
+ *==========================================================================*/
+cam_rotation_t QCameraParameters::getStreamRotation(cam_stream_type_t streamType)
+{
+    cam_rotation_t rotation = ROTATE_0;
+    const char *str = get(KEY_QC_VIDEO_ROTATION);
+    int rotationParam = lookupAttr(VIDEO_ROTATION_MODES_MAP,
+            PARAM_MAP_SIZE(VIDEO_ROTATION_MODES_MAP), str);
+    switch (streamType) {
+        case CAM_STREAM_TYPE_VIDEO:
+            switch(rotationParam) {
+                case 90:
+                    rotation = ROTATE_90;
+                    break;
+                case 180:
+                    rotation = ROTATE_180;
+                    break;
+                case 270:
+                    rotation = ROTATE_270;
+                    break;
+                default:
+                    rotation = ROTATE_0;
+            }
+            break;
+        default:
+            break;
+    }
+    return rotation;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamRotation
+ *
+ * DESCRIPTION: get stream rotation by its type
+ *
+ * PARAMETERS :
+ *   @streamType        : stream type
  *   @featureConfig     : stream feature config structure
  *   @dim               : stream dimension
  *
@@ -10068,24 +10109,23 @@
                                             cam_dimension_t &dim)
 {
     int32_t ret = NO_ERROR;
-    const char *str = get(KEY_QC_VIDEO_ROTATION);
-    int rotationParam = lookupAttr(VIDEO_ROTATION_MODES_MAP,
-            PARAM_MAP_SIZE(VIDEO_ROTATION_MODES_MAP), str);
+
+    cam_rotation_t rotation = getStreamRotation(streamType);
     featureConfig.rotation = ROTATE_0;
     int swapDim = 0;
     switch (streamType) {
         case CAM_STREAM_TYPE_VIDEO:
-            switch(rotationParam) {
-                case 90:
+            switch(rotation) {
+                case ROTATE_90:
                     featureConfig.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
                     featureConfig.rotation = ROTATE_90;
                     swapDim = 1;
                     break;
-                case 180:
+                case ROTATE_180:
                     featureConfig.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
                     featureConfig.rotation = ROTATE_180;
                     break;
-                case 270:
+                case ROTATE_270:
                     featureConfig.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
                     featureConfig.rotation = ROTATE_270;
                     swapDim = 1;
@@ -12257,10 +12297,10 @@
                 memcpy(&m_pDualCamCmdPtr[i]->value,
                         &info[i],
                         sizeof(cam_dual_camera_perf_control_t));
-                LOGH("LPM CMD %d: cmd %d LPM Enable - %d fps = %d", i,
+                LOGH("LPM CMD %d: cmd %d LPM Enable - %d mode = %d", i,
                         m_pDualCamCmdPtr[i]->cmd_type,
                         m_pDualCamCmdPtr[i]->value.enable,
-                        m_pDualCamCmdPtr[i]->value.low_fps);
+                        m_pDualCamCmdPtr[i]->value.perf_mode);
             }
         }
         break;
@@ -12385,16 +12425,16 @@
 /*===========================================================================
  * FUNCTION   : getPointerofParam
  *
- * DESCRIPTION:
+ * DESCRIPTION: get a pointer to parameter structure
  *
  * PARAMETERS :
  *    @meta_id : parameter / meta id enum
- *    @metadata: metadata buffer pointer
+ *    @metadata : pointer to parameter buffer.
  *
  * RETURN     :Pointer of member_variable_<meta_ID>
  *
  *==========================================================================*/
-void * QCameraParameters::getPointerofParam(cam_intf_parm_type_t meta_id,
+void *QCameraParameters::getPointerofParam(cam_intf_parm_type_t meta_id,
         metadata_buffer_t* metadata)
 {
     switch(meta_id) {
@@ -12811,7 +12851,7 @@
 /*===========================================================================
  * FUNCTION   : getSizeofParam
  *
- * DESCRIPTION:
+ * DESCRIPTION: get size of parameter structure
  *
  * PARAMETERS :
  *    @meta_id : parameter / meta id enum
@@ -12821,8 +12861,8 @@
  *==========================================================================*/
 uint32_t QCameraParameters::getSizeofParam(cam_intf_parm_type_t param_id)
 {
-      metadata_buffer_t* metadata = NULL;
-      switch(param_id) {
+    metadata_buffer_t* metadata = NULL;
+    switch(param_id) {
         case CAM_INTF_META_HISTOGRAM:
           return SIZE_OF_PARAM(CAM_INTF_META_HISTOGRAM, metadata);
         case CAM_INTF_META_FACE_DETECTION:
@@ -13233,8 +13273,8 @@
         default:
           LOGE("parameter is not found");
           return 0;
-        }
-        return 0;
+    }
+    return 0;
 }
 
 /*===========================================================================
@@ -13342,7 +13382,6 @@
     }
 
     setAuxParameters();
-
     rc = m_pCamOpsTbl->ops->set_parms(
             get_aux_camera_handle(m_pCamOpsTbl->camera_handle),
             m_pParamBufAux);
@@ -14129,6 +14168,16 @@
         return rc;
     }
 
+    if(isDualCamera()) {
+        // Update FOV-control config settings due to the change in the configuration
+        rc = m_pFovControl->updateConfigSettings(m_pParamBuf, m_pParamBufAux);
+
+        if (rc != NO_ERROR) {
+            LOGE("Failed to update FOV-control config settings");
+            return rc;
+        }
+    }
+
     return rc;
 }
 
@@ -14193,6 +14242,8 @@
                 mStreamPpMask[CAM_STREAM_TYPE_PREVIEW];
         getStreamFormat(CAM_STREAM_TYPE_PREVIEW,
                 stream_config_info.format[stream_config_info.num_streams]);
+        stream_config_info.rotation[stream_config_info.num_streams] =
+                getStreamRotation(CAM_STREAM_TYPE_PREVIEW);
         stream_config_info.num_streams++;
 
         stream_config_info.type[stream_config_info.num_streams] =
@@ -14204,6 +14255,8 @@
                 mStreamPpMask[CAM_STREAM_TYPE_ANALYSIS];
         getStreamFormat(CAM_STREAM_TYPE_ANALYSIS,
                 stream_config_info.format[stream_config_info.num_streams]);
+        stream_config_info.rotation[stream_config_info.num_streams] =
+                getStreamRotation(CAM_STREAM_TYPE_ANALYSIS);
         stream_config_info.num_streams++;
 
         stream_config_info.type[stream_config_info.num_streams] =
@@ -14215,6 +14268,8 @@
                 mStreamPpMask[CAM_STREAM_TYPE_SNAPSHOT];
         getStreamFormat(CAM_STREAM_TYPE_SNAPSHOT,
                 stream_config_info.format[stream_config_info.num_streams]);
+        stream_config_info.rotation[stream_config_info.num_streams] =
+                getStreamRotation(CAM_STREAM_TYPE_SNAPSHOT);
         stream_config_info.num_streams++;
 
         if (isUBWCEnabled() && getRecordingHintValue() != true) {
@@ -14230,6 +14285,8 @@
                         mStreamPpMask[CAM_STREAM_TYPE_CALLBACK];
                 getStreamFormat(CAM_STREAM_TYPE_CALLBACK,
                         stream_config_info.format[stream_config_info.num_streams]);
+                stream_config_info.rotation[stream_config_info.num_streams] =
+                        getStreamRotation(CAM_STREAM_TYPE_CALLBACK);
                 stream_config_info.num_streams++;
             }
         }
@@ -14249,7 +14306,10 @@
                     mStreamPpMask[CAM_STREAM_TYPE_SNAPSHOT];
             getStreamFormat(CAM_STREAM_TYPE_SNAPSHOT,
                         stream_config_info.format[stream_config_info.num_streams]);
+            stream_config_info.rotation[stream_config_info.num_streams] =
+                        getStreamRotation(CAM_STREAM_TYPE_SNAPSHOT);
             stream_config_info.num_streams++;
+
             stream_config_info.is_type[stream_config_info.num_streams] = mIsTypeVideo;
             stream_config_info.type[stream_config_info.num_streams] =
                     CAM_STREAM_TYPE_VIDEO;
@@ -14260,6 +14320,8 @@
                     mStreamPpMask[CAM_STREAM_TYPE_VIDEO];
             getStreamFormat(CAM_STREAM_TYPE_VIDEO,
                     stream_config_info.format[stream_config_info.num_streams]);
+            stream_config_info.rotation[stream_config_info.num_streams] =
+                        getStreamRotation(CAM_STREAM_TYPE_VIDEO);
             stream_config_info.num_streams++;
         }
 
@@ -14276,6 +14338,8 @@
                     mStreamPpMask[CAM_STREAM_TYPE_ANALYSIS];
             getStreamFormat(CAM_STREAM_TYPE_ANALYSIS,
                     stream_config_info.format[stream_config_info.num_streams]);
+            stream_config_info.rotation[stream_config_info.num_streams] =
+                    getStreamRotation(CAM_STREAM_TYPE_ANALYSIS);
             stream_config_info.num_streams++;
         }
 
@@ -14289,6 +14353,8 @@
         getStreamFormat(CAM_STREAM_TYPE_PREVIEW,
                     stream_config_info.format[stream_config_info.num_streams]);
         stream_config_info.is_type[stream_config_info.num_streams] = mIsTypePreview;
+        stream_config_info.rotation[stream_config_info.num_streams] =
+                getStreamRotation(CAM_STREAM_TYPE_PREVIEW);
         stream_config_info.num_streams++;
 
         if (isUBWCEnabled() && getRecordingHintValue() != true) {
@@ -14305,6 +14371,8 @@
                 getStreamFormat(CAM_STREAM_TYPE_CALLBACK,
                         stream_config_info.format[stream_config_info.num_streams]);
                 stream_config_info.is_type[stream_config_info.num_streams] = IS_TYPE_NONE;
+                stream_config_info.rotation[stream_config_info.num_streams] =
+                        getStreamRotation(CAM_STREAM_TYPE_CALLBACK);
                 stream_config_info.num_streams++;
             }
         }
@@ -14322,6 +14390,8 @@
                 getStreamFormat(CAM_STREAM_TYPE_SNAPSHOT,
                         stream_config_info.format[stream_config_info.num_streams]);
                 stream_config_info.is_type[stream_config_info.num_streams] = IS_TYPE_NONE;
+                stream_config_info.rotation[stream_config_info.num_streams] =
+                        getStreamRotation(CAM_STREAM_TYPE_SNAPSHOT);
                 stream_config_info.num_streams++;
             }
 
@@ -14336,6 +14406,8 @@
                 getStreamFormat(CAM_STREAM_TYPE_PREVIEW,
                         stream_config_info.format[stream_config_info.num_streams]);
                 stream_config_info.is_type[stream_config_info.num_streams] = IS_TYPE_NONE;
+                stream_config_info.rotation[stream_config_info.num_streams] =
+                        getStreamRotation(CAM_STREAM_TYPE_PREVIEW);
                 stream_config_info.num_streams++;
             } else if(!getQuadraCfa()) {
                 stream_config_info.type[stream_config_info.num_streams] =
@@ -14348,6 +14420,8 @@
                 getStreamFormat(CAM_STREAM_TYPE_POSTVIEW,
                         stream_config_info.format[stream_config_info.num_streams]);
                 stream_config_info.is_type[stream_config_info.num_streams] = IS_TYPE_NONE;
+                stream_config_info.rotation[stream_config_info.num_streams] =
+                        getStreamRotation(CAM_STREAM_TYPE_POSTVIEW);
                 stream_config_info.num_streams++;
             }
         } else {
@@ -14362,6 +14436,8 @@
             getStreamFormat(CAM_STREAM_TYPE_RAW,
                     stream_config_info.format[stream_config_info.num_streams]);
             stream_config_info.is_type[stream_config_info.num_streams] = IS_TYPE_NONE;
+            stream_config_info.rotation[stream_config_info.num_streams] =
+                        getStreamRotation(CAM_STREAM_TYPE_RAW);
             stream_config_info.num_streams++;
         }
     }
@@ -14437,8 +14513,6 @@
                 stream_config_info.is_type[k]);
     }
 
-    rc = sendStreamConfigInfo(stream_config_info);
-
     if (rc == NO_ERROR && isDualCamera()) {
         cam_3a_sync_mode_t sync_3a_mode = CAM_3A_SYNC_FOLLOW;
         char prop[PROPERTY_VALUE_MAX];
@@ -14474,6 +14548,8 @@
                 num_cam, &bundle_info[0]);
     }
 
+    rc = sendStreamConfigInfo(stream_config_info);
+
     return rc;
 }
 
@@ -14925,6 +15001,7 @@
             ((CAM_STREAM_TYPE_PREVIEW == stream_type) ||
             (CAM_STREAM_TYPE_VIDEO == stream_type) ||
             (CAM_STREAM_TYPE_CALLBACK == stream_type) ||
+            (CAM_STREAM_TYPE_POSTVIEW == stream_type) ||
             ((CAM_STREAM_TYPE_SNAPSHOT == stream_type) &&
             getRecordingHintValue() && is4k2kVideoResolution()))) {
          if (m_nMinRequiredPpMask & CAM_QCOM_FEATURE_DSDN) {
@@ -15005,6 +15082,27 @@
         feature_mask |= CAM_QTI_FEATURE_PPEISCORE;
     }
 
+    if(isDualCamera()) {
+        char prop[PROPERTY_VALUE_MAX];
+        memset(prop, 0, sizeof(prop));
+        bool satEnabledFlag = FALSE;
+        property_get("persist.camera.sat.enable", prop, "0");
+        satEnabledFlag = atoi(prop);
+
+        if (satEnabledFlag) {
+        LOGH("SAT flag enabled");
+            if (stream_type == CAM_STREAM_TYPE_VIDEO &&
+                !is4k2kVideoResolution()) {
+                feature_mask |= CAM_QTI_FEATURE_SAT;
+                LOGH("SAT feature mask set");
+            } else if ((stream_type == CAM_STREAM_TYPE_PREVIEW)||
+                (stream_type == CAM_STREAM_TYPE_CALLBACK)) {
+                feature_mask |= CAM_QTI_FEATURE_SAT;
+                LOGH("SAT feature mask set");
+            }
+        }
+    }
+
     // Store stream feature mask
     setStreamPpMask(stream_type, feature_mask);
     LOGH("stream type: %d, pp_mask: 0x%llx", stream_type, feature_mask);
@@ -16132,14 +16230,16 @@
     cam_dual_camera_perf_control_t perf_value[MM_CAMERA_MAX_CAM_CNT];
     uint8_t num_cam = 0;
 
-    property_get("persist.dualcam.lpm.fps", prop, "0");
+    property_get("persist.dualcam.lpm.mode", prop, "0");
     value = atoi(prop);
 
-    perf_value[num_cam].low_fps = value;
+    perf_value[num_cam].perf_mode = (cam_dual_camera_perf_mode_t)value;
     perf_value[num_cam].enable = cameraControl[0] ? 0 : 1;
+    perf_value[num_cam].priority = 0;
     num_cam++;
-    perf_value[num_cam].low_fps = value;
+    perf_value[num_cam].perf_mode = (cam_dual_camera_perf_mode_t)value;
     perf_value[num_cam].enable = cameraControl[1] ? 0 : 1;
+    perf_value[num_cam].priority = 0;
     num_cam++;
 
     rc = sendDualCamCmd(CAM_DUAL_CAMERA_LOW_POWER_MODE,
@@ -16150,13 +16250,8 @@
         mActiveCamera = state;
     }
 
-#ifdef DUAL_CAM_TEST //Temporary macro. Added to simulate B+B snapshot. Will be removed
-    if (controls == MM_CAMERA_DUAL_CAM) {
-        set(KEY_QC_NUM_SNAPSHOT_PER_SHUTTER, 2);
-    } else {
-        set(KEY_QC_NUM_SNAPSHOT_PER_SHUTTER, 1);
-    }
-#endif
+    /*Need to remove once we have dual camera fusion*/
+    setNumOfSnapshot();
 
     return rc;
 }
diff --git a/msmcobalt/QCamera2/HAL/QCameraParameters.h b/msmcobalt/QCamera2/HAL/QCameraParameters.h
index b186471..3ea9d05 100644
--- a/msmcobalt/QCamera2/HAL/QCameraParameters.h
+++ b/msmcobalt/QCamera2/HAL/QCameraParameters.h
@@ -641,6 +641,7 @@
 #endif
 
     int getPreviewHalPixelFormat();
+    cam_rotation_t getStreamRotation(cam_stream_type_t streamType);
     int32_t getStreamRotation(cam_stream_type_t streamType,
                                cam_pp_feature_config_t &featureConfig,
                                cam_dimension_t &dim);
diff --git a/msmcobalt/QCamera2/HAL/QCameraStateMachine.cpp b/msmcobalt/QCamera2/HAL/QCameraStateMachine.cpp
index b0751a6..47fed37 100644
--- a/msmcobalt/QCamera2/HAL/QCameraStateMachine.cpp
+++ b/msmcobalt/QCamera2/HAL/QCameraStateMachine.cpp
@@ -632,12 +632,16 @@
             m_parent->signalAPIResult(&result);
         }
         break;
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+        {
+            LOGW("Free video handle %d %d", evt, m_state);
+            QCameraVideoMemory::closeNativeHandle((const void *)payload);
+        }
     case QCAMERA_SM_EVT_PRE_START_RECORDING:
     case QCAMERA_SM_EVT_RESTART_STOP_PREVIEW:
     case QCAMERA_SM_EVT_RESTART_START_PREVIEW:
     case QCAMERA_SM_EVT_START_RECORDING:
     case QCAMERA_SM_EVT_STOP_RECORDING:
-    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
     case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
     case QCAMERA_SM_EVT_PRE_TAKE_PICTURE:
     case QCAMERA_SM_EVT_TAKE_PICTURE:
@@ -1048,6 +1052,11 @@
             m_parent->signalAPIResult(&result);
         }
         break;
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+        {
+            LOGW("Free video handle %d %d", evt, m_state);
+            QCameraVideoMemory::closeNativeHandle((const void *)payload);
+        }
     case QCAMERA_SM_EVT_PRE_START_RECORDING:
     case QCAMERA_SM_EVT_RESTART_STOP_PREVIEW:
     case QCAMERA_SM_EVT_RESTART_START_PREVIEW:
@@ -1057,7 +1066,6 @@
     case QCAMERA_SM_EVT_PRE_TAKE_PICTURE:
     case QCAMERA_SM_EVT_TAKE_PICTURE:
     case QCAMERA_SM_EVT_CANCEL_PICTURE:
-    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
     case QCAMERA_SM_EVT_RELEASE:
         {
             LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
@@ -1580,9 +1588,13 @@
             m_parent->signalAPIResult(&result);
         }
         break;
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+        {
+            LOGW("Free video handle %d %d", evt, m_state);
+            QCameraVideoMemory::closeNativeHandle((const void *)payload);
+        }
     case QCAMERA_SM_EVT_CANCEL_PICTURE:
     case QCAMERA_SM_EVT_STOP_RECORDING:
-    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
     case QCAMERA_SM_EVT_RELEASE:
         {
             LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
@@ -1842,6 +1854,8 @@
                 {
                     // Send internal events to stop indefinite wait on prepare
                     // snapshot done event.
+                    m_state = QCAMERA_SM_STATE_PREVIEWING;
+
                     result.status = rc;
                     result.request_api = QCAMERA_SM_EVT_PREPARE_SNAPSHOT;
                     result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
diff --git a/msmcobalt/QCamera2/HAL/QCameraStream.cpp b/msmcobalt/QCamera2/HAL/QCameraStream.cpp
index adb3e02..d4bdeb8 100644
--- a/msmcobalt/QCamera2/HAL/QCameraStream.cpp
+++ b/msmcobalt/QCamera2/HAL/QCameraStream.cpp
@@ -772,7 +772,12 @@
     if (isDualStream()) {
         mActiveCamera |= MM_CAMERA_TYPE_AUX;
         if (needFrameSync()) {
-            mCamOps->start_stream_frame_sync(mCamHandle, mChannelHandle, mHandle);
+            mCamOps->handle_frame_sync_cb(mCamHandle, mChannelHandle,
+                    mHandle, MM_CAMERA_CB_REQ_TYPE_FRAME_SYNC);
+        }
+        if (!needCbSwitch()) {
+            mCamOps->handle_frame_sync_cb(mCamHandle, mChannelHandle,
+                    mHandle, MM_CAMERA_CB_REQ_TYPE_ALL_CB);
         }
     }
 
@@ -1490,7 +1495,6 @@
                        this);
         pthread_setname_np(mBufAllocPid, "CAM_strmBuf");
     }
-
     return NO_ERROR;
 }
 
@@ -2827,10 +2831,12 @@
 int32_t QCameraStream::switchStreamCb()
 {
     int32_t ret = NO_ERROR;
-
     if ((getMyType() != CAM_STREAM_TYPE_SNAPSHOT)
-            && (mActiveCamera == MM_CAMERA_DUAL_CAM)) {
-        ret = mCamOps->switch_stream_callback(mCamHandle, mChannelHandle, mHandle);
+            && (mActiveCamera == MM_CAMERA_DUAL_CAM)
+            && !(needFrameSync())
+            && (needCbSwitch())) {
+        ret = mCamOps->handle_frame_sync_cb(mCamHandle, mChannelHandle,
+                mHandle, MM_CAMERA_CB_REQ_TYPE_SWITCH);
     }
 
     if (get_aux_camera_handle(mHandle)
@@ -2846,6 +2852,31 @@
 }
 
 /*===========================================================================
+ * FUNCTION   : needCbSwitch
+ *
+ * DESCRIPTION: Function to enable callback switch based on availability of
+ *              spatial alignment
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+bool QCameraStream::needCbSwitch()
+{
+    if (!isDualStream()) {
+        return false;
+    }
+
+    if (mStreamInfo->pp_config.feature_mask == CAM_QTI_FEATURE_SAT) {
+        return false;
+    } else {
+        return true;
+    }
+}
+
+/*===========================================================================
  * FUNCTION   : needFrameSync
  *
  * DESCRIPTION: Function to enable stream frame buffer sync
@@ -2906,12 +2937,12 @@
         param.bundleInfo = bundleInfo;
     }
 
+    memset(&aux_param, 0, sizeof(cam_stream_parm_buffer_t));
     if (isDualStream()) {
         active_handle = get_aux_camera_handle(mChannelHandle);
         memset(&bundleInfo, 0, sizeof(bundleInfo));
         ret = mCamOps->get_bundle_info(mCamHandle, active_handle,
                 &bundleInfo);
-        memset(&aux_param, 0, sizeof(cam_stream_parm_buffer_t));
         aux_param.type = CAM_STREAM_PARAM_TYPE_SET_BUNDLE_INFO;
         aux_param.bundleInfo = bundleInfo;
     }
diff --git a/msmcobalt/QCamera2/HAL/QCameraStream.h b/msmcobalt/QCamera2/HAL/QCameraStream.h
index ea65112..2b723de 100644
--- a/msmcobalt/QCamera2/HAL/QCameraStream.h
+++ b/msmcobalt/QCamera2/HAL/QCameraStream.h
@@ -135,6 +135,7 @@
     int32_t switchStreamCb();
     int32_t processCameraControl(uint32_t camState);
     bool isDualStream(){return mDualStream;};
+    bool needCbSwitch();
     bool needFrameSync();
     //Stream time stamp. We need this for preview stream to update display
     nsecs_t mStreamTimestamp;
diff --git a/msmcobalt/QCamera2/HAL3/QCamera3Channel.cpp b/msmcobalt/QCamera2/HAL3/QCamera3Channel.cpp
index e2fcd9f..c81a8e6 100644
--- a/msmcobalt/QCamera2/HAL3/QCamera3Channel.cpp
+++ b/msmcobalt/QCamera2/HAL3/QCamera3Channel.cpp
@@ -212,7 +212,7 @@
  *==========================================================================*/
 int32_t QCamera3Channel::start()
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CH_START);
     int32_t rc = NO_ERROR;
 
     if (m_numStreams > 1) {
@@ -250,7 +250,7 @@
  *==========================================================================*/
 int32_t QCamera3Channel::stop()
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CH_STOP);
     int32_t rc = NO_ERROR;
     if(!m_bIsActive) {
         LOGE("Attempt to stop inactive channel");
@@ -334,7 +334,7 @@
  *==========================================================================*/
 int32_t QCamera3Channel::flush()
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CH_FLUSH);
     return NO_ERROR;
 }
 
@@ -807,7 +807,7 @@
 void QCamera3ProcessingChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
         QCamera3Stream *stream)
 {
-     ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CH_STRM_CB);
     //FIXME Q Buf back in case of error?
     uint8_t frameIndex;
     buffer_handle_t *resultBuffer;
@@ -984,6 +984,10 @@
  * @frameNumber     : frame number of the request
  * @pInputBuffer    : pointer to input buffer if an input request
  * @metadata        : parameters associated with the request
+ * @internalreq      : boolean to indicate if this is purely internal request
+ *                    needing internal buffer allocation
+ * @meteringonly    : boolean indicating metering only frame subset of internal
+ *                    not consumed by postprocessor
  *
  * RETURN     : 0 on a success start of capture
  *              -EINVAL on invalid input
@@ -993,7 +997,9 @@
         uint32_t frameNumber,
         camera3_stream_buffer_t* pInputBuffer,
         metadata_buffer_t* metadata,
-        int &indexUsed)
+        int &indexUsed,
+        __unused bool internalRequest = false,
+        __unused bool meteringOnly = false)
 {
     int32_t rc = NO_ERROR;
     int index;
@@ -1122,7 +1128,7 @@
 int32_t QCamera3ProcessingChannel::registerBuffer(buffer_handle_t *buffer,
         cam_is_type_t isType)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CH_REG_BUF);
     int rc = 0;
     mIsType = isType;
     cam_stream_type_t streamType;
@@ -1311,7 +1317,7 @@
  *==========================================================================*/
 QCamera3StreamMem* QCamera3ProcessingChannel::getStreamBufs(uint32_t /*len*/)
 {
-    KPI_ATRACE_CALL();
+    KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GETSTREAMBUFS);
     return &mMemory;
 }
 
@@ -1577,6 +1583,11 @@
         LOGE("Stream %d plane info calculation failed!", mStreamType);
         return rc;
     }
+
+    IF_META_AVAILABLE(cam_hdr_param_t, hdr_info, CAM_INTF_PARM_HAL_BRACKETING_HDR, metadata) {
+        reproc_cfg.hdr_param = *hdr_info;
+    }
+
     return rc;
 }
 
@@ -1596,7 +1607,7 @@
 void QCamera3ProcessingChannel::reprocessCbRoutine(buffer_handle_t *resultBuffer,
         uint32_t resultFrameNumber)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CH_REPROC_CB);
     int rc = NO_ERROR;
 
     rc = releaseOfflineMemory(resultFrameNumber);
@@ -1795,7 +1806,7 @@
 
 int32_t QCamera3RegularChannel::initialize(cam_is_type_t isType)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REG_CH_INIT);
     int32_t rc = NO_ERROR;
 
     cam_dimension_t streamDim;
@@ -1959,7 +1970,7 @@
  *==========================================================================*/
 int32_t QCamera3RegularChannel::request(buffer_handle_t *buffer, uint32_t frameNumber, int &indexUsed)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REG_CH_REQ);
     //FIX ME: Return buffer back in case of failures below.
 
     int32_t rc = NO_ERROR;
@@ -2061,7 +2072,7 @@
 
 int32_t QCamera3MetadataChannel::initialize(cam_is_type_t isType)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_METADATA_CH_INIT);
     int32_t rc;
     cam_dimension_t streamDim;
 
@@ -2189,7 +2200,7 @@
                         mm_camera_super_buf_t *super_frame,
                         QCamera3Stream * stream)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_RAW_CH_STRM_CB);
     /* Move this back down once verified */
     if (mRawDump)
         dumpRawSnapshot(super_frame->bufs[0]);
@@ -2658,7 +2669,7 @@
  *==========================================================================*/
 int32_t QCamera3YUVChannel::initialize(cam_is_type_t isType)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_YUV_CH_INIT);
     int32_t rc = NO_ERROR;
     cam_dimension_t streamDim;
 
@@ -2734,6 +2745,10 @@
  * @frameNumber     : frame number of the request
  * @pInputBuffer    : pointer to input buffer if an input request
  * @metadata        : parameters associated with the request
+ * @internalreq      : boolean to indicate if this is purely internal request
+ *                    needing internal buffer allocation
+ * @meteringonly    : boolean indicating metering only frame subset of internal
+ *                    not consumed by postprocessor
  *
  * RETURN     : 0 on a success start of capture
  *              -EINVAL on invalid input
@@ -2743,7 +2758,9 @@
         uint32_t frameNumber,
         camera3_stream_buffer_t* pInputBuffer,
         metadata_buffer_t* metadata, bool &needMetadata,
-        int &indexUsed)
+        int &indexUsed,
+        __unused bool internalRequest = false,
+        __unused bool meteringOnly = false)
 {
     int32_t rc = NO_ERROR;
     Mutex::Autolock lock(mOfflinePpLock);
@@ -2830,7 +2847,7 @@
 void QCamera3YUVChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
         QCamera3Stream *stream)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_YUV_CH_STRM_CB);
     uint8_t frameIndex;
     int32_t resultFrameNumber;
 
@@ -3137,14 +3154,14 @@
                                               mm_jpeg_output_t *p_output,
                                               void *userdata)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PIC_CH_JPEG_EVT_HANDLE);
     buffer_handle_t *resultBuffer = NULL;
     buffer_handle_t *jpegBufferHandle = NULL;
     int resultStatus = CAMERA3_BUFFER_STATUS_OK;
     camera3_stream_buffer_t result;
     camera3_jpeg_blob_t jpegHeader;
 
-    KPI_ATRACE_INT("SNAPSHOT", 0);
+    KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 0);
     QCamera3PicChannel *obj = (QCamera3PicChannel *)userdata;
     if (obj) {
         //Construct payload for process_capture_result. Call mChannelCb
@@ -3375,6 +3392,7 @@
     }
     Mutex::Autolock lock(mFreeBuffersLock);
     mFreeBufferList.clear();
+
     for (uint32_t i = 0; i < mCamera3Stream->max_buffers; i++) {
         mFreeBufferList.push_back(i);
     }
@@ -3440,6 +3458,10 @@
  * @frameNumber  : frame number of the request
  * @pInputBuffer : pointer to input buffer if an input request
  * @metadata     : parameters associated with the request
+ * @internalreq      : boolean to indicate if this is purely internal request
+ *                    needing internal buffer allocation
+ * @meteringonly    : boolean indicating metering only frame subset of internal
+ *                    not consumed by postprocessor
  *
  * RETURN     : 0 on a success start of capture
  *              -EINVAL on invalid input
@@ -3448,9 +3470,10 @@
 int32_t QCamera3PicChannel::request(buffer_handle_t *buffer,
         uint32_t frameNumber,
         camera3_stream_buffer_t *pInputBuffer,
-        metadata_buffer_t *metadata, int &indexUsed)
+        metadata_buffer_t *metadata, int &indexUsed,
+        bool internalRequest, bool meteringOnly)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PIC_CH_REQ);
     //FIX ME: Return buffer back in case of failures below.
 
     int32_t rc = NO_ERROR;
@@ -3462,6 +3485,7 @@
     //and recalculate the plane info
     dim.width = (int32_t)mYuvWidth;
     dim.height = (int32_t)mYuvHeight;
+
     setReprocConfig(reproc_cfg, pInputBuffer, metadata, mStreamFormat, dim);
 
     // Picture stream has already been started before any request comes in
@@ -3470,31 +3494,37 @@
         return NO_INIT;
     }
 
-    int index = mMemory.getMatchBufIndex((void*)buffer);
-
-    if(index < 0) {
-        rc = registerBuffer(buffer, mIsType);
-        if (NO_ERROR != rc) {
-            LOGE("On-the-fly buffer registration failed %d",
-                     rc);
-            return rc;
-        }
-
-        index = mMemory.getMatchBufIndex((void*)buffer);
-        if (index < 0) {
-            LOGE("Could not find object among registered buffers");
-            return DEAD_OBJECT;
-        }
-    }
-    LOGD("buffer index %d, frameNumber: %u", index, frameNumber);
-
-    rc = mMemory.markFrameNumber((uint32_t)index, frameNumber);
-
     // Start postprocessor
     startPostProc(reproc_cfg);
 
-    // Queue jpeg settings
-    rc = queueJpegSetting((uint32_t)index, metadata);
+    if (!internalRequest) {
+        int index = mMemory.getMatchBufIndex((void*)buffer);
+
+        if(index < 0) {
+            rc = registerBuffer(buffer, mIsType);
+            if (NO_ERROR != rc) {
+                LOGE("On-the-fly buffer registration failed %d",
+                         rc);
+                return rc;
+            }
+
+            index = mMemory.getMatchBufIndex((void*)buffer);
+            if (index < 0) {
+                LOGE("Could not find object among registered buffers");
+                return DEAD_OBJECT;
+            }
+        }
+        LOGD("buffer index %d, frameNumber: %u", index, frameNumber);
+
+        rc = mMemory.markFrameNumber((uint32_t)index, frameNumber);
+
+        // Queue jpeg settings
+        rc = queueJpegSetting((uint32_t)index, metadata);
+
+    } else {
+        LOGD("Internal request @ Picchannel");
+    }
+
 
     if (pInputBuffer == NULL) {
         Mutex::Autolock lock(mFreeBuffersLock);
@@ -3512,7 +3542,11 @@
             bufIdx = *it;
             mFreeBufferList.erase(it);
         }
-        mYuvMemory->markFrameNumber(bufIdx, frameNumber);
+        if (meteringOnly) {
+            mYuvMemory->markFrameNumber(bufIdx, 0xFFFFFFFF);
+        } else {
+            mYuvMemory->markFrameNumber(bufIdx, frameNumber);
+        }
         mStreams[0]->bufDone(bufIdx);
         indexUsed = bufIdx;
     } else {
@@ -3554,7 +3588,7 @@
 void QCamera3PicChannel::dataNotifyCB(mm_camera_super_buf_t *recvd_frame,
                                  void *userdata)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PIC_CH_DATA_NOTIFY_CB);
     LOGD("E\n");
     QCamera3PicChannel *channel = (QCamera3PicChannel *)userdata;
 
@@ -3594,7 +3628,7 @@
 void QCamera3PicChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
                             QCamera3Stream *stream)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PIC_CH_STRM_CB);
     //TODO
     //Used only for getting YUV. Jpeg callback will be sent back from channel
     //directly to HWI. Refer to func jpegEvtHandle
@@ -3626,6 +3660,13 @@
          return;
     }
 
+    if ((uint32_t)mYuvMemory->getFrameNumber(frameIndex) == EMPTY_FRAMEWORK_FRAME_NUMBER) {
+        LOGD("Internal Request recycle frame");
+        Mutex::Autolock lock(mFreeBuffersLock);
+        mFreeBufferList.push_back(frameIndex);
+        return;
+    }
+
     frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
     if (frame == NULL) {
        LOGE("Error allocating memory to save received_frame structure.");
@@ -3725,6 +3766,14 @@
                 sizeof(settings->gps_processing_method));
     }
 
+    settings->hdr_snapshot = 0;
+    IF_META_AVAILABLE(cam_hdr_param_t, hdr_info, CAM_INTF_PARM_HAL_BRACKETING_HDR, metadata) {
+        if (hdr_info->hdr_enable) {
+            settings->hdr_snapshot = 1;
+        }
+    }
+
+
     // Image description
     const char *eepromVersion = hal_obj->getEepromVersionInfo();
     const uint32_t *ldafCalib = hal_obj->getLdafCalib();
@@ -3898,7 +3947,7 @@
 int32_t QCamera3ReprocessChannel::registerBuffer(buffer_handle_t *buffer,
         cam_is_type_t isType)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REPROC_CH_REG_BUF);
     int rc = 0;
     mIsType = isType;
     cam_stream_type_t streamType;
@@ -4147,7 +4196,7 @@
  *==========================================================================*/
 int32_t QCamera3ReprocessChannel::start()
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REPROC_CH_START);
     int32_t rc = NO_ERROR;
 
     rc = QCamera3Channel::start();
@@ -4177,7 +4226,7 @@
  *==========================================================================*/
 int32_t QCamera3ReprocessChannel::stop()
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REPROC_CH_STOP);
     int32_t rc = NO_ERROR;
 
     rc = QCamera3Channel::stop();
@@ -4382,6 +4431,9 @@
                 } else if (jpeg_settings->jpeg_orientation == 270) {
                    rotation_info.rotation = ROTATE_270;
                 }
+                if (jpeg_settings->hdr_snapshot) {
+                    rotation_info.rotation = ROTATE_0;
+                }
                 rotation_info.streamId = mStreams[0]->getMyServerID();
                 ADD_SET_PARAM_ENTRY_TO_BATCH(meta, CAM_INTF_PARM_ROTATION, rotation_info);
             }
diff --git a/msmcobalt/QCamera2/HAL3/QCamera3Channel.h b/msmcobalt/QCamera2/HAL3/QCamera3Channel.h
index ff4d656..cbf42bd 100644
--- a/msmcobalt/QCamera2/HAL3/QCamera3Channel.h
+++ b/msmcobalt/QCamera2/HAL3/QCamera3Channel.h
@@ -107,7 +107,9 @@
                 uint32_t /*frameNumber*/,
                 camera3_stream_buffer_t* /*pInputBuffer*/,
                 metadata_buffer_t* /*metadata*/,
-                int & /*indexUsed*/){ return 0;};
+                int & /*indexUsed*/,
+                __unused bool internalRequest = false,
+                __unused bool meteringOnly = false){ return 0;};
     virtual void streamCbRoutine(mm_camera_super_buf_t *super_frame,
                             QCamera3Stream *stream) = 0;
 
@@ -202,7 +204,8 @@
     virtual int32_t request(buffer_handle_t *buffer,
             uint32_t frameNumber,
             camera3_stream_buffer_t* pInputBuffer,
-            metadata_buffer_t* metadata, int &indexUsed);
+            metadata_buffer_t* metadata, int &indexUsed,
+            __unused bool internalRequest, __unused bool meteringOnly);
     virtual void streamCbRoutine(mm_camera_super_buf_t *super_frame,
             QCamera3Stream *stream);
     virtual QCamera3StreamMem *getStreamBufs(uint32_t len);
@@ -438,7 +441,7 @@
             uint32_t frameNumber,
             camera3_stream_buffer_t* pInputBuffer,
             metadata_buffer_t* metadata, bool &needMetadata,
-            int &indexUsed);
+            int &indexUsed, bool internalRequest, bool meteringOnly);
     virtual reprocess_type_t getReprocessType();
     virtual void streamCbRoutine(mm_camera_super_buf_t *super_frame,
             QCamera3Stream *stream);
@@ -504,7 +507,7 @@
             uint32_t frameNumber,
             camera3_stream_buffer_t* pInputBuffer,
             metadata_buffer_t* metadata,
-            int &indexUsed);
+            int &indexUsed, bool internalRequest, bool meteringOnly);
     virtual void streamCbRoutine(mm_camera_super_buf_t *super_frame,
             QCamera3Stream *stream);
 
diff --git a/msmcobalt/QCamera2/HAL3/QCamera3HALHeader.h b/msmcobalt/QCamera2/HAL3/QCamera3HALHeader.h
index 062b14f..8ab9119 100644
--- a/msmcobalt/QCamera2/HAL3/QCamera3HALHeader.h
+++ b/msmcobalt/QCamera2/HAL3/QCamera3HALHeader.h
@@ -4,14 +4,14 @@
 * modification, are permitted provided that the following conditions are
 * met:
 *     * Redistributions of source code must retain the above copyright
-*	notice, this list of conditions and the following disclaimer.
+*       notice, this list of conditions and the following disclaimer.
 *     * Redistributions in binary form must reproduce the above
-*	copyright notice, this list of conditions and the following
-*	disclaimer in the documentation and/or other materials provided
-*	with the distribution.
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
 *     * Neither the name of The Linux Foundation nor the names of its
-*	contributors may be used to endorse or promote products derived
-*	from this software without specific prior written permission.
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
 *
 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
@@ -73,6 +73,7 @@
         char gps_processing_method[GPS_PROCESSING_METHOD_SIZE];
         uint8_t image_desc_valid;
         char image_desc[EXIF_IMAGE_DESCRIPTION_SIZE];
+        bool hdr_snapshot;
     } jpeg_settings_t;
 
     typedef struct {
@@ -88,6 +89,7 @@
         cam_dimension_t output_stream_dim;
         cam_padding_info_t *padding;
         reprocess_type_t reprocess_type;
+        cam_hdr_param_t hdr_param;
         QCamera3ProcessingChannel *src_channel;
     } reprocess_config_t;
 
diff --git a/msmcobalt/QCamera2/HAL3/QCamera3HWI.cpp b/msmcobalt/QCamera2/HAL3/QCamera3HWI.cpp
index 8fae872..abb2a34 100644
--- a/msmcobalt/QCamera2/HAL3/QCamera3HWI.cpp
+++ b/msmcobalt/QCamera2/HAL3/QCamera3HWI.cpp
@@ -44,6 +44,7 @@
 #include "sys/ioctl.h"
 #include <sync/sync.h>
 #include "gralloc_priv.h"
+#include <map>
 
 // Display dependencies
 #include "qdMetaData.h"
@@ -86,6 +87,7 @@
 #define MIN_FPS_FOR_BATCH_MODE (120)
 #define PREVIEW_FPS_FOR_HFR    (30)
 #define DEFAULT_VIDEO_FPS      (30.0)
+#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
 #define MAX_HFR_BATCH_SIZE     (8)
 #define REGIONS_TUPLE_COUNT    5
 #define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
@@ -446,6 +448,9 @@
     memset(prop, 0, sizeof(prop));
     property_get("persist.camera.raw.dump", prop, "0");
     mEnableRawDump = atoi(prop);
+    property_get("persist.camera.hal3.force.hdr", prop, "0");
+    mForceHdrSnapshot = atoi(prop);
+
     if (mEnableRawDump)
         LOGD("Raw dump from Camera HAL enabled");
 
@@ -457,6 +462,10 @@
     m_bTnrPreview = (uint8_t)atoi(prop);
 
     memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.swtnr.preview", prop, "1");
+    m_bSwTnrPreview = (uint8_t)atoi(prop);
+
+    memset(prop, 0, sizeof(prop));
     property_get("persist.camera.tnr.video", prop, "0");
     m_bTnrVideo = (uint8_t)atoi(prop);
 
@@ -765,7 +774,7 @@
     int rc = 0;
     char value[PROPERTY_VALUE_MAX];
 
-    KPI_ATRACE_CALL();
+    KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
     if (mCameraHandle) {
         LOGE("Failure: Camera already opened");
         return ALREADY_EXISTS;
@@ -878,7 +887,7 @@
  *==========================================================================*/
 int QCamera3HardwareInterface::closeCamera()
 {
-    KPI_ATRACE_CALL();
+    KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
     int rc = NO_ERROR;
     char value[PROPERTY_VALUE_MAX];
 
@@ -887,7 +896,7 @@
 
     // unmap memory for related cam sync buffer
     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
-            CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF);
+            CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
     if (NULL != m_pDualCamCmdHeap) {
         m_pDualCamCmdHeap->deallocate();
         delete m_pDualCamCmdHeap;
@@ -943,7 +952,7 @@
 int QCamera3HardwareInterface::initialize(
         const struct camera3_callback_ops *callback_ops)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
     int rc;
 
     LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
@@ -1266,6 +1275,10 @@
                     |= CAM_QCOM_FEATURE_LLVD;
             LOGH("Added LLVD SeeMore to pp feature mask");
         }
+        if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
+                CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
+            mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
+        }
         break;
     }
     default:
@@ -1360,7 +1373,7 @@
 int QCamera3HardwareInterface::configureStreams(
         camera3_stream_configuration_t *streamList)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
     int rc = 0;
 
     // Acquire perfLock before configure streams
@@ -1386,7 +1399,7 @@
 int QCamera3HardwareInterface::configureStreamsPerfLocked(
         camera3_stream_configuration_t *streamList)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
     int rc = 0;
 
     // Sanity check stream_list
@@ -1953,6 +1966,10 @@
                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
                                 ~CAM_QCOM_FEATURE_CDS;
                     }
+                    if(!m_bSwTnrPreview) {
+                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
+                                ~CAM_QTI_FEATURE_SW_TNR;
+                    }
                     padding_info.width_padding = mSurfaceStridePadding;
                     padding_info.height_padding = CAM_PAD_TO_2;
                 }
@@ -2418,7 +2435,8 @@
  *
  *==========================================================================*/
 int QCamera3HardwareInterface::validateCaptureRequest(
-                    camera3_capture_request_t *request)
+                    camera3_capture_request_t *request,
+                    List<InternalRequest> &internallyRequestedStreams)
 {
     ssize_t idx = 0;
     const camera3_stream_buffer_t *b;
@@ -2436,7 +2454,8 @@
     }
 
     uint32_t frameNumber = request->frame_number;
-    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
+    if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
+            && (internallyRequestedStreams.size() == 0)) {
         LOGE("Request %d: No output buffers provided!",
                 __FUNCTION__, frameNumber);
         return BAD_VALUE;
@@ -2467,7 +2486,7 @@
 
     // Validate all buffers
     b = request->output_buffers;
-    do {
+    while (idx < (ssize_t)request->num_output_buffers) {
         QCamera3ProcessingChannel *channel =
                 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
         if (channel == NULL) {
@@ -2497,8 +2516,7 @@
         }
         idx++;
         b = request->output_buffers + idx;
-    } while (idx < (ssize_t)request->num_output_buffers);
-
+    }
     return NO_ERROR;
 }
 
@@ -2671,7 +2689,7 @@
     for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
             j != mPendingReprocessResultList.end(); j++) {
         if (j->frame_number == frame_number) {
-            mCallbackOps->notify(mCallbackOps, &j->notify_msg);
+            orchestrateNotify(&j->notify_msg);
 
             LOGD("Delayed reprocess notify %d",
                     frame_number);
@@ -2692,7 +2710,7 @@
                     result.input_buffer = k->input_buffer;
                     result.result = k->settings;
                     result.partial_result = PARTIAL_RESULT_COUNT;
-                    mCallbackOps->process_capture_result(mCallbackOps, &result);
+                    orchestrateResult(&result);
 
                     erasePendingRequest(k);
                     break;
@@ -2720,7 +2738,7 @@
 void QCamera3HardwareInterface::handleBatchMetadata(
         mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
 
     if (NULL == metadata_buf) {
         LOGE("metadata_buf is NULL");
@@ -2891,7 +2909,7 @@
     notify_msg.message.error.error_code = errorCode;
     notify_msg.message.error.error_stream = NULL;
     notify_msg.message.error.frame_number = frameNumber;
-    mCallbackOps->notify(mCallbackOps, &notify_msg);
+    orchestrateNotify(&notify_msg);
 
     return;
 }
@@ -2913,7 +2931,7 @@
     mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
     bool firstMetadataInBatch)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
     if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
         //during flush do not send metadata from this thread
         LOGD("not sending metadata during flush or when mState is error");
@@ -3016,7 +3034,7 @@
                 result.output_buffers = NULL;
                 result.partial_result = i->partial_result_cnt;
 
-                mCallbackOps->process_capture_result(mCallbackOps, &result);
+                orchestrateResult(&result);
                 LOGD("urgent frame_number = %u, capture_time = %lld",
                       result.frame_number, capture_time);
                 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
@@ -3094,7 +3112,7 @@
                     notify_msg.message.error.frame_number = i->frame_number;
                     notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
                     notify_msg.message.error.error_stream = j->stream;
-                    mCallbackOps->notify(mCallbackOps, &notify_msg);
+                    orchestrateNotify(&notify_msg);
                     if (p_cam_frame_drop) {
                         // Treat msg as error for system buffer drops
                         LOGE("End of reporting error frame#=%u, streamID=%u",
@@ -3144,7 +3162,7 @@
             notify_msg.type = CAMERA3_MSG_SHUTTER;
             notify_msg.message.shutter.frame_number = i->frame_number;
             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
-            mCallbackOps->notify(mCallbackOps, &notify_msg);
+            orchestrateNotify(&notify_msg);
 
             i->timestamp = capture_time;
 
@@ -3169,6 +3187,18 @@
                 }
             }
 
+            for (auto itr = i->internalRequestList.begin();
+                  itr != i->internalRequestList.end(); itr++) {
+                if (itr->need_metadata) {
+                    internalPproc = true;
+                    QCamera3ProcessingChannel *channel =
+                            (QCamera3ProcessingChannel *)itr->stream->priv;
+                    channel->queueReprocMetadata(metadata_buf);
+                    break;
+                }
+            }
+
+
             result.result = translateFromHalMetadata(metadata,
                     i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
                     i->capture_intent, internalPproc, i->fwkCacMode,
@@ -3247,7 +3277,7 @@
                 }
 
                 result.output_buffers = result_buffers;
-                mCallbackOps->process_capture_result(mCallbackOps, &result);
+                orchestrateResult(&result);
                 LOGD("meta frame_number = %u, capture_time = %lld",
                         result.frame_number, i->timestamp);
                 free_camera_metadata((camera_metadata_t *)result.result);
@@ -3256,7 +3286,7 @@
                 LOGE("Fatal error: out of memory");
             }
         } else {
-            mCallbackOps->process_capture_result(mCallbackOps, &result);
+            orchestrateResult(&result);
             LOGD("meta frame_number = %u, capture_time = %lld",
                     result.frame_number, i->timestamp);
             free_camera_metadata((camera_metadata_t *)result.result);
@@ -3328,7 +3358,7 @@
  *==========================================================================*/
 void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
     pendingRequestIterator i = mPendingRequestsList.begin();
     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
         i++;
@@ -3354,7 +3384,7 @@
             notify_msg.type = CAMERA3_MSG_SHUTTER;
             notify_msg.message.shutter.frame_number = frame_number;
             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
-            mCallbackOps->notify(mCallbackOps, &notify_msg);
+            orchestrateNotify(&notify_msg);
             i->shutter_notified = true;
             LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
                         i->frame_number, notify_msg.message.shutter.timestamp);
@@ -3375,7 +3405,7 @@
         result.input_buffer = i->input_buffer;
         result.partial_result = PARTIAL_RESULT_COUNT;
 
-        mCallbackOps->process_capture_result(mCallbackOps, &result);
+        orchestrateResult(&result);
         LOGD("Input request metadata and input buffer frame_number = %u",
                         i->frame_number);
         i = erasePendingRequest(i);
@@ -3398,7 +3428,7 @@
 void QCamera3HardwareInterface::handleBufferWithLock(
     camera3_stream_buffer_t *buffer, uint32_t frame_number)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
 
     if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
         mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
@@ -3454,7 +3484,7 @@
 
         mPendingBuffersMap.removeBuf(buffer->buffer);
 
-        mCallbackOps->process_capture_result(mCallbackOps, &result);
+        orchestrateResult(&result);
     } else {
         if (i->input_buffer) {
             CameraMetadata settings;
@@ -3495,8 +3525,8 @@
             result.output_buffers = buffer;
             result.partial_result = PARTIAL_RESULT_COUNT;
 
-            mCallbackOps->notify(mCallbackOps, &notify_msg);
-            mCallbackOps->process_capture_result(mCallbackOps, &result);
+            orchestrateNotify(&notify_msg);
+            orchestrateResult(&result);
             LOGD("Notify reprocess now %d!", frame_number);
             i = erasePendingRequest(i);
         } else {
@@ -3547,6 +3577,388 @@
    pthread_cond_signal(&mRequestCond);
 }
 
+/*===========================================================================
+ * FUNCTION   : isHdrSnapshotRequest
+ *
+ * DESCRIPTION: Function to determine if the request is for a HDR snapshot
+ *
+ * PARAMETERS : camera3 request structure
+ *
+ * RETURN     : boolean decision variable
+ *
+ *==========================================================================*/
+bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
+{
+    if (request == NULL) {
+        LOGE("Invalid request handle");
+        assert(0);
+        return false;
+    }
+
+    if (!mForceHdrSnapshot) {
+        CameraMetadata frame_settings;
+        frame_settings = request->settings;
+
+        if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
+            uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
+            if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
+                return false;
+            }
+        } else {
+            return false;
+        }
+
+        if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
+            uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
+            if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
+                return false;
+            }
+        } else {
+            return false;
+        }
+    }
+
+    for (uint32_t i = 0; i < request->num_output_buffers; i++) {
+        if (request->output_buffers[i].stream->format
+                == HAL_PIXEL_FORMAT_BLOB) {
+            return true;
+        }
+    }
+
+    return false;
+}
+/*===========================================================================
+ * FUNCTION   : orchestrateRequest
+ *
+ * DESCRIPTION: Orchestrates a capture request from camera service
+ *
+ * PARAMETERS :
+ *   @request : request from framework to process
+ *
+ * RETURN     : Error status codes
+ *
+ *==========================================================================*/
+int32_t QCamera3HardwareInterface::orchestrateRequest(
+        camera3_capture_request_t *request)
+{
+
+    uint32_t originalFrameNumber = request->frame_number;
+    uint32_t originalOutputCount = request->num_output_buffers;
+    const camera_metadata_t *original_settings = request->settings;
+    List<InternalRequest> internallyRequestedStreams;
+    List<InternalRequest> emptyInternalList;
+
+    if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
+        LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
+        uint32_t internalFrameNumber;
+        CameraMetadata modified_meta;
+
+
+        /* Add Blob channel to list of internally requested streams */
+        for (uint32_t i = 0; i < request->num_output_buffers; i++) {
+            if (request->output_buffers[i].stream->format
+                    == HAL_PIXEL_FORMAT_BLOB) {
+                InternalRequest streamRequested;
+                streamRequested.meteringOnly = 1;
+                streamRequested.need_metadata = 0;
+                streamRequested.stream = request->output_buffers[i].stream;
+                internallyRequestedStreams.push_back(streamRequested);
+            }
+        }
+        request->num_output_buffers = 0;
+        auto itr =  internallyRequestedStreams.begin();
+
+        /* Modify setting to set compensation */
+        modified_meta = request->settings;
+        int32_t expCompensation = GB_HDR_HALF_STEP_EV;
+        uint8_t aeLock = 1;
+        modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
+        modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
+        camera_metadata_t *modified_settings = modified_meta.release();
+        request->settings = modified_settings;
+
+        /* Capture Settling & -2x frame */
+        _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
+        request->frame_number = internalFrameNumber;
+        processCaptureRequest(request, internallyRequestedStreams);
+
+        request->num_output_buffers = originalOutputCount;
+        _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
+        request->frame_number = internalFrameNumber;
+        processCaptureRequest(request, emptyInternalList);
+        request->num_output_buffers = 0;
+
+        modified_meta = modified_settings;
+        expCompensation = 0;
+        aeLock = 1;
+        modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
+        modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
+        modified_settings = modified_meta.release();
+        request->settings = modified_settings;
+
+        /* Capture Settling & 0X frame */
+
+        itr =  internallyRequestedStreams.begin();
+        if (itr == internallyRequestedStreams.end()) {
+            LOGE("Error Internally Requested Stream list is empty");
+            assert(0);
+        } else {
+            itr->need_metadata = 0;
+            itr->meteringOnly = 1;
+        }
+
+        _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
+        request->frame_number = internalFrameNumber;
+        processCaptureRequest(request, internallyRequestedStreams);
+
+        itr =  internallyRequestedStreams.begin();
+        if (itr == internallyRequestedStreams.end()) {
+            ALOGE("Error Internally Requested Stream list is empty");
+            assert(0);
+        } else {
+            itr->need_metadata = 1;
+            itr->meteringOnly = 0;
+        }
+
+        _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
+        request->frame_number = internalFrameNumber;
+        processCaptureRequest(request, internallyRequestedStreams);
+
+        /* Capture 2X frame*/
+        modified_meta = modified_settings;
+        expCompensation = GB_HDR_2X_STEP_EV;
+        aeLock = 1;
+        modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
+        modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
+        modified_settings = modified_meta.release();
+        request->settings = modified_settings;
+
+        itr =  internallyRequestedStreams.begin();
+        if (itr == internallyRequestedStreams.end()) {
+            ALOGE("Error Internally Requested Stream list is empty");
+            assert(0);
+        } else {
+            itr->need_metadata = 0;
+            itr->meteringOnly = 1;
+        }
+        _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
+        request->frame_number = internalFrameNumber;
+        processCaptureRequest(request, internallyRequestedStreams);
+
+        itr =  internallyRequestedStreams.begin();
+        if (itr == internallyRequestedStreams.end()) {
+            ALOGE("Error Internally Requested Stream list is empty");
+            assert(0);
+        } else {
+            itr->need_metadata = 1;
+            itr->meteringOnly = 0;
+        }
+
+        _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
+        request->frame_number = internalFrameNumber;
+        processCaptureRequest(request, internallyRequestedStreams);
+
+
+        /* Capture 2X on original streaming config*/
+        internallyRequestedStreams.clear();
+
+        /* Restore original settings pointer */
+        request->settings = original_settings;
+    } else {
+        uint32_t internalFrameNumber;
+        _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
+        request->frame_number = internalFrameNumber;
+        return processCaptureRequest(request, internallyRequestedStreams);
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : orchestrateResult
+ *
+ * DESCRIPTION: Orchestrates a capture result to camera service
+ *
+ * PARAMETERS :
+ *   @request : request from framework to process
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::orchestrateResult(
+                    camera3_capture_result_t *result)
+{
+    uint32_t frameworkFrameNumber;
+    int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
+            frameworkFrameNumber);
+    if (rc != NO_ERROR) {
+        LOGE("Cannot find translated frameworkFrameNumber");
+        assert(0);
+    } else {
+        if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
+            LOGD("CAM_DEBUG Internal Request drop the result");
+        } else {
+            result->frame_number = frameworkFrameNumber;
+            mCallbackOps->process_capture_result(mCallbackOps, result);
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : orchestrateNotify
+ *
+ * DESCRIPTION: Orchestrates a notify to camera service
+ *
+ * PARAMETERS :
+ *   @request : request from framework to process
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
+{
+    uint32_t frameworkFrameNumber;
+    uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
+    int32_t rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
+                                                          frameworkFrameNumber);
+    if (rc != NO_ERROR) {
+        LOGE("Cannot find translated frameworkFrameNumber");
+        assert(0);
+    } else {
+        if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
+            LOGE("CAM_DEBUG Internal Request drop the notifyCb");
+        } else {
+            notify_msg->message.shutter.frame_number = frameworkFrameNumber;
+            mCallbackOps->notify(mCallbackOps, notify_msg);
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : FrameNumberRegistry
+ *
+ * DESCRIPTION: Constructor
+ *
+ * PARAMETERS :
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+FrameNumberRegistry::FrameNumberRegistry()
+{
+    _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~FrameNumberRegistry
+ *
+ * DESCRIPTION: Destructor
+ *
+ * PARAMETERS :
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+FrameNumberRegistry::~FrameNumberRegistry()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : PurgeOldEntriesLocked
+ *
+ * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : NONE
+ *
+ *==========================================================================*/
+void FrameNumberRegistry::purgeOldEntriesLocked()
+{
+    while (_register.begin() != _register.end()) {
+        auto itr = _register.begin();
+        if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
+            _register.erase(itr);
+        } else {
+            return;
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : allocStoreInternalFrameNumber
+ *
+ * DESCRIPTION: Method to note down a framework request and associate a new
+ *              internal request number against it
+ *
+ * PARAMETERS :
+ *   @fFrameNumber: Identifier given by framework
+ *   @internalFN  : Output parameter which will have the newly generated internal
+ *                  entry
+ *
+ * RETURN     : Error code
+ *
+ *==========================================================================*/
+int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
+                                                            uint32_t &internalFrameNumber)
+{
+    Mutex::Autolock lock(mRegistryLock);
+    internalFrameNumber = _nextFreeInternalNumber++;
+    LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
+    _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
+    purgeOldEntriesLocked();
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : generateStoreInternalFrameNumber
+ *
+ * DESCRIPTION: Method to associate a new internal request number independent
+ *              of any associate with framework requests
+ *
+ * PARAMETERS :
+ *   @internalFrame#: Output parameter which will have the newly generated internal
+ *
+ *
+ * RETURN     : Error code
+ *
+ *==========================================================================*/
+int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
+{
+    Mutex::Autolock lock(mRegistryLock);
+    internalFrameNumber = _nextFreeInternalNumber++;
+    LOGD("Generated internal framenumber:%d", internalFrameNumber);
+    _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
+    purgeOldEntriesLocked();
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFrameworkFrameNumber
+ *
+ * DESCRIPTION: Method to query the framework framenumber given an internal #
+ *
+ * PARAMETERS :
+ *   @internalFrame#: Internal reference
+ *   @frameworkframenumber: Output parameter holding framework frame entry
+ *
+ * RETURN     : Error code
+ *
+ *==========================================================================*/
+int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
+                                                     uint32_t &frameworkFrameNumber)
+{
+    Mutex::Autolock lock(mRegistryLock);
+    auto itr = _register.find(internalFrameNumber);
+    if (itr == _register.end()) {
+        LOGE("CAM_DEBUG: Cannot find internal#: %d", internalFrameNumber);
+        return -ENOENT;
+    }
+
+    frameworkFrameNumber = itr->second;
+    purgeOldEntriesLocked();
+    return NO_ERROR;
+}
 
 /*===========================================================================
  * FUNCTION   : processCaptureRequest
@@ -3560,9 +3972,10 @@
  *
  *==========================================================================*/
 int QCamera3HardwareInterface::processCaptureRequest(
-                    camera3_capture_request_t *request)
+                    camera3_capture_request_t *request,
+                    List<InternalRequest> &internallyRequestedStreams)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
     int rc = NO_ERROR;
     int32_t request_id;
     CameraMetadata meta;
@@ -3589,7 +4002,7 @@
             return -ENODEV;
     }
 
-    rc = validateCaptureRequest(request);
+    rc = validateCaptureRequest(request, internallyRequestedStreams);
     if (rc != NO_ERROR) {
         LOGE("incoming request is not valid");
         pthread_mutex_unlock(&mMutex);
@@ -4100,7 +4513,7 @@
         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
 
         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
-            //Call function to store local copy of jpeg data for encode params.
+            //FIXME??:Call function to store local copy of jpeg data for encode params.
             blob_request = 1;
             snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
         }
@@ -4123,8 +4536,20 @@
         }
     }
 
+    //FIXME: Add checks to ensure to dups in validateCaptureRequest
+    for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
+          itr++) {
+        QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
+        streamsArray.stream_request[streamsArray.num_streams++].streamID =
+            channel->getStreamID(channel->getStreamTypeMask());
+
+        if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
+            isVidBufRequested = true;
+        }
+    }
+
     if (blob_request) {
-        KPI_ATRACE_INT("SNAPSHOT", 1);
+        KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
         mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
     }
     if (blob_request && mRawDumpChannel) {
@@ -4309,7 +4734,7 @@
                       output.buffer, request->input_buffer, frameNumber);
             if(request->input_buffer != NULL){
                 rc = channel->request(output.buffer, frameNumber,
-                        pInputBuffer, &mReprocMeta, indexUsed);
+                        pInputBuffer, &mReprocMeta, indexUsed, false, false);
                 if (rc < 0) {
                     LOGE("Fail to request on picture channel");
                     pthread_mutex_unlock(&mMutex);
@@ -4354,8 +4779,8 @@
             bool needMetadata = false;
             QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
             rc = yuvChannel->request(output.buffer, frameNumber,
-                    pInputBuffer,
-                    (pInputBuffer ? &mReprocMeta : mParameters), needMetadata, indexUsed);
+                    pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
+                    needMetadata, indexUsed, false, false);
             if (rc < 0) {
                 LOGE("Fail to request on YUV channel");
                 pthread_mutex_unlock(&mMutex);
@@ -4421,6 +4846,76 @@
         pendingBufferIter++;
     }
 
+    for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
+          itr++) {
+        QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
+
+        if (channel == NULL) {
+            LOGE("invalid channel pointer for stream");
+            assert(0);
+            return BAD_VALUE;
+        }
+
+        InternalRequest requestedStream;
+        requestedStream = (*itr);
+
+
+        if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
+            LOGD("snapshot request internally input buffer %p, frame_number %d",
+                      request->input_buffer, frameNumber);
+            if(request->input_buffer != NULL){
+                rc = channel->request(NULL, frameNumber,
+                        pInputBuffer, &mReprocMeta, indexUsed, true, requestedStream.meteringOnly);
+                if (rc < 0) {
+                    LOGE("Fail to request on picture channel");
+                    pthread_mutex_unlock(&mMutex);
+                    return rc;
+                }
+            } else {
+                LOGD("snapshot request with frame_number %d", frameNumber);
+                if (!request->settings) {
+                    rc = channel->request(NULL, frameNumber,
+                            NULL, mPrevParameters, indexUsed, true, requestedStream.meteringOnly);
+                } else {
+                    rc = channel->request(NULL, frameNumber,
+                            NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
+                }
+                if (rc < 0) {
+                    LOGE("Fail to request on picture channel");
+                    pthread_mutex_unlock(&mMutex);
+                    return rc;
+                }
+
+                if ((*itr).meteringOnly != 1) {
+                    requestedStream.need_metadata = 1;
+                    streams_need_metadata++;
+                }
+            }
+
+            uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
+            uint32_t j = 0;
+            for (j = 0; j < streamsArray.num_streams; j++) {
+                if (streamsArray.stream_request[j].streamID == streamId) {
+                  if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
+                      streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
+                  else
+                      streamsArray.stream_request[j].buf_index = indexUsed;
+                    break;
+                }
+            }
+            if (j == streamsArray.num_streams) {
+                LOGE("Did not find matching stream to update index");
+                assert(0);
+            }
+
+        } else {
+            LOGE("Internal requests not supported on this stream type");
+            assert(0);
+            return INVALID_OPERATION;
+        }
+        latestRequest->internalRequestList.push_back(requestedStream);
+    }
+
     //If 2 streams have need_metadata set to true, fail the request, unless
     //we copy/reference count the metadata buffer
     if (streams_need_metadata > 1) {
@@ -4617,7 +5112,7 @@
  *==========================================================================*/
 int QCamera3HardwareInterface::flush(bool restartChannels)
 {
-    KPI_ATRACE_CALL();
+    KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_FLUSH);
     int32_t rc = NO_ERROR;
 
     LOGD("Unblocking Process Capture Request");
@@ -4727,7 +5222,7 @@
  *==========================================================================*/
 int QCamera3HardwareInterface::flushPerf()
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_FLUSH_PREF);
     int32_t rc = 0;
     struct timespec timeout;
     bool timed_wait = false;
@@ -4841,22 +5336,25 @@
 {
     int32_t rc = NO_ERROR;
 
-    pthread_mutex_lock(&mMutex);
-    if (mState != ERROR) {
-        //if mState != ERROR, nothing to be done
+    {
+        Mutex::Autolock lock(mFlushLock);
+        pthread_mutex_lock(&mMutex);
+        if (mState != ERROR) {
+            //if mState != ERROR, nothing to be done
+            pthread_mutex_unlock(&mMutex);
+            return NO_ERROR;
+        }
         pthread_mutex_unlock(&mMutex);
-        return NO_ERROR;
-    }
-    pthread_mutex_unlock(&mMutex);
 
-    rc = flush(false /* restart channels */);
-    if (NO_ERROR != rc) {
-        LOGE("internal flush to handle mState = ERROR failed");
-    }
+        rc = flush(false /* restart channels */);
+        if (NO_ERROR != rc) {
+            LOGE("internal flush to handle mState = ERROR failed");
+        }
 
-    pthread_mutex_lock(&mMutex);
-    mState = DEINIT;
-    pthread_mutex_unlock(&mMutex);
+        pthread_mutex_lock(&mMutex);
+        mState = DEINIT;
+        pthread_mutex_unlock(&mMutex);
+    }
 
     camera3_notify_msg_t notify_msg;
     memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
@@ -4864,7 +5362,7 @@
     notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
     notify_msg.message.error.error_stream = NULL;
     notify_msg.message.error.frame_number = 0;
-    mCallbackOps->notify(mCallbackOps, &notify_msg);
+    orchestrateNotify(&notify_msg);
 
     return rc;
 }
@@ -8445,7 +8943,7 @@
 int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
         struct camera_info *info)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
     int rc = 0;
 
     pthread_mutex_lock(&gCamLock);
@@ -8794,11 +9292,16 @@
     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
 
     /*target fps range: use maximum range for picture, and maximum fixed range for video*/
+    /* Restrict template max_fps to 30 */
     float max_range = 0.0;
     float max_fixed_fps = 0.0;
     int32_t fps_range[2] = {0, 0};
     for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
             i++) {
+        if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
+                TEMPLATE_MAX_PREVIEW_FPS) {
+            continue;
+        }
         float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
             gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
         if (type == CAMERA3_TEMPLATE_PREVIEW ||
@@ -9559,6 +10062,7 @@
             expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
         if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
             expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
+        ALOGE("CAM_DEBUG: Setting compensation:%d", expCompensation);
         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
                 expCompensation)) {
             rc = BAD_VALUE;
@@ -10487,6 +10991,7 @@
                     camera3_capture_request_t *request)
 {
     LOGD("E");
+    CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
     QCamera3HardwareInterface *hw =
         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
     if (!hw) {
@@ -10494,7 +10999,7 @@
         return -EINVAL;
     }
 
-    int rc = hw->processCaptureRequest(request);
+    int rc = hw->orchestrateRequest(request);
     LOGD("X");
     return rc;
 }
@@ -10602,6 +11107,7 @@
     LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
     delete hw;
     LOGI("[KPI Perf]: X");
+    CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
     return ret;
 }
 
@@ -10702,6 +11208,17 @@
                 rc = BAD_VALUE;
             }
         }
+
+        if (fwk_sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
+            cam_hdr_param_t hdr_params;
+            hdr_params.hdr_enable = 1;
+            hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
+            hdr_params.hdr_need_1x = false;
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
+                    CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
+                rc = BAD_VALUE;
+            }
+        }
     } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
             (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
         uint8_t sceneMode = CAM_SCENE_MODE_OFF;
@@ -10711,6 +11228,18 @@
             rc = BAD_VALUE;
         }
     }
+
+    if (mForceHdrSnapshot) {
+        cam_hdr_param_t hdr_params;
+        hdr_params.hdr_enable = 1;
+        hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
+        hdr_params.hdr_need_1x = false;
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
+                CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
+            rc = BAD_VALUE;
+        }
+    }
+
     return rc;
 }
 
@@ -10834,7 +11363,7 @@
  *==========================================================================*/
 bool QCamera3HardwareInterface::needJpegExifRotation()
 {
-   /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
+    /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
        LOGD("Need use Jpeg EXIF Rotation");
        return true;
@@ -10891,6 +11420,18 @@
         pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
     }
 
+    if (config.hdr_param.hdr_enable) {
+        pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
+        pp_config.hdr_param = config.hdr_param;
+    }
+
+    if (mForceHdrSnapshot) {
+        pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
+        pp_config.hdr_param.hdr_enable = 1;
+        pp_config.hdr_param.hdr_need_1x = 0;
+        pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
+    }
+
     rc = pChannel->addReprocStreamsFromSource(pp_config,
             config,
             IS_TYPE_NONE,
@@ -11119,7 +11660,7 @@
  *==========================================================================*/
 int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
     int rc = NO_ERROR;
 
     LOGD("E");
@@ -11336,7 +11877,7 @@
                 pStream_Buf[index].buffer = info->buffer;
                 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
                 pStream_Buf[index].stream = info->stream;
-                mCallbackOps->notify(mCallbackOps, &notify_msg);
+                orchestrateNotify(&notify_msg);
                 index++;
                 // Remove buffer from list
                 info = req->mPendingBufferList.erase(info);
@@ -11347,7 +11888,7 @@
                 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
             req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
 
-            mCallbackOps->process_capture_result(mCallbackOps, &result);
+            orchestrateResult(&result);
 
             delete [] pStream_Buf;
         } else {
@@ -11364,7 +11905,7 @@
             notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
             notify_msg.message.error.error_stream = NULL;
             notify_msg.message.error.frame_number = req->frame_number;
-            mCallbackOps->notify(mCallbackOps, &notify_msg);
+            orchestrateNotify(&notify_msg);
 
             pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
             if (NULL == pStream_Buf) {
@@ -11397,7 +11938,7 @@
                 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
             req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
 
-            mCallbackOps->process_capture_result(mCallbackOps, &result);
+            orchestrateResult(&result);
             delete [] pStream_Buf;
             i = erasePendingRequest(i);
         }
@@ -11420,8 +11961,10 @@
         const cam_dimension_t max_viewfinder_size,
         uint32_t width, uint32_t height)
 {
-    return (width > (uint32_t)max_viewfinder_size.width ||
-            height > (uint32_t)max_viewfinder_size.height);
+    return ((width > (uint32_t)max_viewfinder_size.width) ||
+            (height > (uint32_t)max_viewfinder_size.height) ||
+            (width > (uint32_t)VIDEO_4K_WIDTH) ||
+            (height > (uint32_t)VIDEO_4K_HEIGHT));
 }
 
 /*===========================================================================
@@ -11623,8 +12166,8 @@
     case CAM_FILTER_ARRANGEMENT_GRBG:
     case CAM_FILTER_ARRANGEMENT_GBRG:
     case CAM_FILTER_ARRANGEMENT_BGGR:
-        if ((stream_type == CAM_STREAM_TYPE_CALLBACK) ||
-                (stream_type == CAM_STREAM_TYPE_PREVIEW) ||
+        if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
+                (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
                 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
             feature_mask |= CAM_QCOM_FEATURE_PAAF;
         }
diff --git a/msmcobalt/QCamera2/HAL3/QCamera3HWI.h b/msmcobalt/QCamera2/HAL3/QCamera3HWI.h
index 1ce3952..86e1480 100644
--- a/msmcobalt/QCamera2/HAL3/QCamera3HWI.h
+++ b/msmcobalt/QCamera2/HAL3/QCamera3HWI.h
@@ -35,7 +35,7 @@
 #include <pthread.h>
 #include <utils/KeyedVector.h>
 #include <utils/List.h>
-
+#include <map>
 // Camera dependencies
 #include "hardware/camera3.h"
 #include "QCamera3Channel.h"
@@ -69,6 +69,15 @@
 #define NSEC_PER_USEC 1000LLU
 #define NSEC_PER_33MSEC 33000000LLU
 
+/*Orchestrate Macros */
+#define EV_COMP_SETTLE_DELAY   2
+#define GB_HDR_HALF_STEP_EV -6
+#define GB_HDR_2X_STEP_EV 6
+
+#define FRAME_REGISTER_LRU_SIZE 256
+#define INTERNAL_FRAME_STARTING_NUMBER 800
+#define EMPTY_FRAMEWORK_FRAME_NUMBER 0xFFFFFFFF
+
 typedef enum {
     SET_ENABLE,
     SET_CONTROLENABLE,
@@ -121,6 +130,23 @@
     int32_t getBufErrStatus(buffer_handle_t *buffer);
 };
 
+class FrameNumberRegistry {
+public:
+
+    FrameNumberRegistry();
+    ~FrameNumberRegistry();
+    int32_t allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
+            uint32_t &internalFrameNumber);
+    int32_t generateStoreInternalFrameNumber(uint32_t &internalFrameNumber);
+    int32_t freeInternalFrameNumber(uint32_t internalFrameNumber);
+    int32_t getFrameworkFrameNumber(uint32_t internalFrameNumber, uint32_t &frameworkFrameNumber);
+    void purgeOldEntriesLocked();
+
+private:
+    std::map<uint32_t, uint32_t> _register;
+    uint32_t _nextFreeInternalNumber;
+    Mutex mRegistryLock;
+};
 
 class QCamera3HardwareInterface {
 public:
@@ -150,6 +176,12 @@
     int openCamera(struct hw_device_t **hw_device);
     camera_metadata_t* translateCapabilityToMetadata(int type);
 
+    typedef struct {
+        camera3_stream_t *stream;
+        bool need_metadata;
+        bool meteringOnly;
+    } InternalRequest;
+
     static int getCamInfo(uint32_t cameraId, struct camera_info *info);
     static cam_capability_t *getCapabilities(mm_camera_ops_t *ops,
             uint32_t cam_handle);
@@ -181,7 +213,12 @@
     int initialize(const camera3_callback_ops_t *callback_ops);
     int configureStreams(camera3_stream_configuration_t *stream_list);
     int configureStreamsPerfLocked(camera3_stream_configuration_t *stream_list);
-    int processCaptureRequest(camera3_capture_request_t *request);
+    int processCaptureRequest(camera3_capture_request_t *request,
+                              List<InternalRequest> &internalReqs);
+    int orchestrateRequest(camera3_capture_request_t *request);
+    void orchestrateResult(camera3_capture_result_t *result);
+    void orchestrateNotify(camera3_notify_msg_t *notify_msg);
+
     void dump(int fd);
     int flushPerf();
 
@@ -290,7 +327,8 @@
             int32_t scalar_format, const cam_dimension_t &dim,
             int32_t config_type);
 
-    int validateCaptureRequest(camera3_capture_request_t *request);
+    int validateCaptureRequest(camera3_capture_request_t *request,
+                               List<InternalRequest> &internallyRequestedStreams);
     int validateStreamDimensions(camera3_stream_configuration_t *streamList);
     int validateStreamRotations(camera3_stream_configuration_t *streamList);
     void deriveMinFrameDuration();
@@ -316,6 +354,7 @@
 
     bool isSupportChannelNeeded(camera3_stream_configuration_t *streamList,
             cam_stream_size_info_t stream_config_info);
+    bool isHdrSnapshotRequest(camera3_capture_request *request);
     int32_t setMobicat();
 
     int32_t getSensorOutputSize(cam_dimension_t &sensor_dim);
@@ -385,6 +424,7 @@
     bool mFlush;
     bool mFlushPerf;
     bool mEnableRawDump;
+    bool mForceHdrSnapshot;
     QCamera3HeapMemory *mParamHeap;
     metadata_buffer_t* mParameters;
     metadata_buffer_t* mPrevParameters;
@@ -405,6 +445,7 @@
     uint8_t m_bTnrEnabled;
     int8_t  mSupportedFaceDetectMode;
     uint8_t m_bTnrPreview;
+    uint8_t m_bSwTnrPreview;
     uint8_t m_bTnrVideo;
     uint8_t m_debug_avtimer;
 
@@ -416,11 +457,13 @@
         // in order to generate the buffer.
         bool need_metadata;
     } RequestedBufferInfo;
+
     typedef struct {
         uint32_t frame_number;
         uint32_t num_buffers;
         int32_t request_id;
         List<RequestedBufferInfo> buffers;
+        List<InternalRequest> internalRequestList;
         int blob_request;
         uint8_t bUrgentReceived;
         nsecs_t timestamp;
@@ -444,6 +487,7 @@
         uint32_t frame_number;
     } PendingReprocessResult;
 
+    class FrameNumberRegistry _orchestrationDb;
     typedef KeyedVector<uint32_t, Vector<PendingBufferInfo> > FlushMap;
     typedef List<QCamera3HardwareInterface::PendingRequestInfo>::iterator
             pendingRequestIterator;
@@ -568,7 +612,7 @@
     QCamera3HeapMemory *m_pDualCamCmdHeap;
     cam_dual_camera_cmd_info_t *m_pDualCamCmdPtr;
     cam_sync_related_sensors_event_info_t m_relCamSyncInfo;
-
+    Mutex mFlushLock;
 };
 
 }; // namespace qcamera
diff --git a/msmcobalt/QCamera2/HAL3/QCamera3PostProc.cpp b/msmcobalt/QCamera2/HAL3/QCamera3PostProc.cpp
index 37c5701..4fc6c7d 100644
--- a/msmcobalt/QCamera2/HAL3/QCamera3PostProc.cpp
+++ b/msmcobalt/QCamera2/HAL3/QCamera3PostProc.cpp
@@ -119,7 +119,7 @@
  *==========================================================================*/
 int32_t QCamera3PostProcessor::init(QCamera3StreamMem *memory)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PPROC_INIT);
     mOutputMem = memory;
     m_dataProcTh.launch(dataProcessRoutine, this);
 
@@ -178,7 +178,7 @@
         cam_dimension_t* max_pic_dim,
         void *user_data)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PPROC_INIT_JPEG);
     mJpegCB = jpeg_cb;
     mJpegUserData = user_data;
     mm_dimension max_size;
@@ -1002,7 +1002,7 @@
  *==========================================================================*/
 void QCamera3PostProcessor::releaseJpegJobData(qcamera_hal3_jpeg_data_t *job)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PPROC_REL_JPEG_JOB_DATA);
     int32_t rc = NO_ERROR;
     LOGD("E");
     if (NULL != job) {
@@ -1066,7 +1066,7 @@
  *==========================================================================*/
 void QCamera3PostProcessor::releasePPJobData(qcamera_hal3_pp_data_t *pp_job)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PPROC_REL_PP_JOB_DATA);
     LOGD("E");
     if (NULL != pp_job) {
         if (NULL != pp_job->src_frame) {
@@ -1556,7 +1556,7 @@
 int32_t QCamera3PostProcessor::encodeData(qcamera_hal3_jpeg_data_t *jpeg_job_data,
                           uint8_t &needNewSess)
 {
-    ATRACE_CALL();
+    ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PPROC_ENCODEDATA);
     LOGD("E");
     int32_t ret = NO_ERROR;
     mm_jpeg_job_t jpg_job;
diff --git a/msmcobalt/QCamera2/QCamera2Factory.cpp b/msmcobalt/QCamera2/QCamera2Factory.cpp
index cd1edd9..7a04411 100644
--- a/msmcobalt/QCamera2/QCamera2Factory.cpp
+++ b/msmcobalt/QCamera2/QCamera2Factory.cpp
@@ -45,6 +45,7 @@
 #include "HAL3/QCamera3HWI.h"
 #include "util/QCameraFlash.h"
 #include "QCamera2Factory.h"
+#include "QCameraTrace.h"
 extern "C" {
 #include "mm_camera_dbg.h"
 }
@@ -405,6 +406,7 @@
             camera_id, mHalDescriptors[camera_id].device_version);
 
     if ( mHalDescriptors[camera_id].device_version == CAMERA_DEVICE_API_VERSION_3_0 ) {
+        CAMSCOPE_INIT(CAMSCOPE_SECTION_HAL);
         QCamera3HardwareInterface *hw = new QCamera3HardwareInterface(mHalDescriptors[camera_id].cameraId,
                 mCallbacks);
         if (!hw) {
@@ -507,6 +509,7 @@
 #ifdef QCAMERA_HAL1_SUPPORT
         case CAMERA_DEVICE_API_VERSION_1_0:
         {
+            CAMSCOPE_INIT(CAMSCOPE_SECTION_HAL);
             QCamera2HardwareInterface *hw =
                 new QCamera2HardwareInterface((uint32_t)cameraId);
             if (!hw) {
diff --git a/msmcobalt/QCamera2/stack/common/cam_intf.h b/msmcobalt/QCamera2/stack/common/cam_intf.h
index 0ac5f50..e978bfd 100644
--- a/msmcobalt/QCamera2/stack/common/cam_intf.h
+++ b/msmcobalt/QCamera2/stack/common/cam_intf.h
@@ -90,6 +90,14 @@
     CAM_ROLE_TELE
 } cam_dual_camera_role_t;
 
+/* Enum to define different low performance modes in dual camera*/
+typedef enum {
+    CAM_PERF_SENSOR_SUSPEND,
+    CAM_PERF_ISPIF_FRAME_DROP,
+    CAM_PERF_ISPIF_FRAME_SKIP,
+    CAM_PERF_STATS_FPS_CONTROL
+} cam_dual_camera_perf_mode_t;
+
 /* Payload for sending bundling info to backend */
 typedef struct {
     cam_sync_related_sensors_control_t sync_control;
@@ -102,6 +110,8 @@
        backend */
     uint32_t related_sensor_session_id;
     uint8_t is_frame_sync_enabled;
+    /*Low power mode type. Static info per device*/
+    cam_dual_camera_perf_mode_t perf_mode;
 } cam_dual_camera_bundle_info_t;
 typedef cam_dual_camera_bundle_info_t cam_sync_related_sensors_event_info_t;
 
@@ -112,8 +122,9 @@
 
 /* Structrue to control performance info in dual camera case*/
 typedef struct {
-    uint8_t low_fps; /*Control perf using FPS if set*/
+    cam_dual_camera_perf_mode_t perf_mode; /*Control perf using FPS if set*/
     uint8_t enable;  /*Enable or diable Low power mode*/
+    uint8_t priority; /*Can be used to make LPM forcefully*/
 } cam_dual_camera_perf_control_t;
 
 /* dual camera event payload */
@@ -587,6 +598,16 @@
     /* Dual cam calibration data */
     cam_related_system_calibration_data_t related_cam_calibration;
 
+    /* Maximum degree of rotation along X axis for tele sensor with respect to the wide sensor*/
+    float      max_roll_degrees;
+    /* Maximum degree of rotation along Y axis for tele sensor with respect to the wide sensor*/
+    float      max_pitch_degrees;
+    /* Maximum degree of rotation along Z axis for tele sensor with respect to the wide sensor*/
+    float      max_yaw_degrees;
+
+    /* Pixel pitch in micrometer*/
+    float      pixel_pitch_um;
+
     /* Meta_RAW capability */
     uint8_t meta_raw_channel_count;
     uint8_t vc[MAX_SIZES_CNT];
@@ -605,6 +626,9 @@
     struct cam_capability *main_cam_cap;
     struct cam_capability *aux_cam_cap;
     cam_sync_type_t cam_sensor_mode;
+
+    /*Available Spatial Alignment solutions*/
+    uint32_t avail_spatial_align_solns;
 } cam_capability_t;
 
 typedef enum {
@@ -1079,6 +1103,9 @@
     INCLUDE(CAM_INTF_PARM_JPEG_ENCODE_CROP,             cam_stream_crop_info_t,      1);
     INCLUDE(CAM_INTF_PARM_JPEG_SCALE_DIMENSION,         cam_dimension_t,             1);
     INCLUDE(CAM_INTF_META_FOCUS_DEPTH_INFO,             uint8_t,                     1);
+    INCLUDE(CAM_INTF_PARM_HAL_BRACKETING_HDR,           cam_hdr_param_t,             1);
+    INCLUDE(CAM_INTF_META_DC_LOW_POWER_ENABLE,          uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_DC_SAC_OUTPUT_INFO,           cam_sac_output_info_t,       1);
 } metadata_data_t;
 
 /* Update clear_metadata_buffer() function when a new is_xxx_valid is added to
diff --git a/msmcobalt/QCamera2/stack/common/cam_types.h b/msmcobalt/QCamera2/stack/common/cam_types.h
index 833b567..05117b0 100644
--- a/msmcobalt/QCamera2/stack/common/cam_types.h
+++ b/msmcobalt/QCamera2/stack/common/cam_types.h
@@ -38,7 +38,7 @@
 #define MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES 8096
 #define AWB_DEBUG_DATA_SIZE               (45000)
 #define AEC_DEBUG_DATA_SIZE               (5000)
-#define AF_DEBUG_DATA_SIZE                (50000)
+#define AF_DEBUG_DATA_SIZE                (60000)
 #define ASD_DEBUG_DATA_SIZE               (100)
 #define STATS_BUFFER_DEBUG_DATA_SIZE      (75000)
 #define BESTATS_BUFFER_DEBUG_DATA_SIZE    (150000)
@@ -107,7 +107,7 @@
 #define EXIF_IMAGE_DESCRIPTION_SIZE 100
 
 #define MAX_INFLIGHT_REQUESTS  6
-#define MAX_INFLIGHT_BLOB      3
+#define MAX_INFLIGHT_BLOB      10
 #define MIN_INFLIGHT_REQUESTS  3
 #define MIN_INFLIGHT_60FPS_REQUESTS (6)
 #define MAX_INFLIGHT_REPROCESS_REQUESTS 1
@@ -180,6 +180,14 @@
     CAM_POSITION_FRONT_AUX
 } cam_position_t;
 
+// Counter clock wise
+typedef enum {
+    ROTATE_0 = 1<<0,
+    ROTATE_90 = 1<<1,
+    ROTATE_180 = 1<<2,
+    ROTATE_270 = 1<<3,
+} cam_rotation_t;
+
 typedef enum {
     CAM_LENS_NORMAL,
     CAM_LENS_WIDE,
@@ -1701,6 +1709,11 @@
 } cam_3a_sync_mode_t;
 
 typedef struct {
+    float widthMargins;  /*Width margin in %*/
+    float heightMargins; /*Height margin in %*/
+} cam_frame_margins_t;
+
+typedef struct {
     cam_dimension_t stream_sizes[MAX_NUM_STREAMS];
     uint32_t num_streams;
     cam_stream_type_t type[MAX_NUM_STREAMS];
@@ -1709,6 +1722,7 @@
     cam_is_type_t is_type[MAX_NUM_STREAMS];
     cam_hfr_mode_t hfr_mode;
     cam_format_t format[MAX_NUM_STREAMS];
+    cam_rotation_t rotation[MAX_NUM_STREAMS];
     uint32_t buf_alignment;
     uint32_t min_stride;
     uint32_t min_scanline;
@@ -1717,6 +1731,8 @@
     uint32_t dt[MAX_NUM_STREAMS];
     uint32_t vc[MAX_NUM_STREAMS];
     cam_sub_format_type_t sub_format_type[MAX_NUM_STREAMS];
+    cam_frame_margins_t margins[MAX_NUM_STREAMS];
+    cam_dimension_t stream_sz_plus_margin[MAX_NUM_STREAMS]; /*stream sizes + margin*/
 } cam_stream_size_info_t;
 
 typedef enum {
@@ -1781,6 +1797,32 @@
     uint32_t stream_id[MAX_NUM_STREAMS];
 } cam_buf_divert_info_t;
 
+typedef enum {
+    CAM_SPATIAL_ALIGN_QCOM = 1 << 0,
+    CAM_SPATIAL_ALIGN_OEM  = 1 << 1
+} cam_spatial_align_type_t;
+
+typedef struct {
+    uint32_t shift_horz;
+    uint32_t shift_vert;
+} cam_sac_output_shift_t;
+
+typedef struct {
+    uint8_t                is_master_preview_valid;
+    uint8_t                master_preview;
+    uint8_t                is_master_3A_valid;
+    uint8_t                master_3A;
+    uint8_t                is_ready_status_valid;
+    uint8_t                ready_status;
+    uint8_t                is_output_shift_valid;
+    cam_sac_output_shift_t output_shift;
+    uint8_t                is_wide_focus_roi_shift_valid;
+    cam_sac_output_shift_t wide_focus_roi_shift;
+    uint8_t                is_tele_focus_roi_shift_valid;
+    cam_sac_output_shift_t tele_focus_roi_shift;
+} cam_sac_output_info_t;
+
+
 typedef  struct {
     uint8_t is_stats_valid;               /* if histgram data is valid */
     cam_hist_stats_t stats_data;          /* histogram data */
@@ -2284,6 +2326,12 @@
     CAM_INTF_META_FOCUS_VALUE,
     /*Spot light detection result output from af core*/
     CAM_INTF_META_SPOT_LIGHT_DETECT,
+    /* HAL based HDR*/
+    CAM_INTF_PARM_HAL_BRACKETING_HDR,
+    /* Dual camera - Spatial Alignment Compute/Correction output info*/
+    CAM_INTF_META_DC_SAC_OUTPUT_INFO,
+    /* Dual camera - enable low power mode for the slave camera */
+    CAM_INTF_META_DC_LOW_POWER_ENABLE,
     CAM_INTF_PARM_MAX
 } cam_intf_parm_type_t;
 
@@ -2510,6 +2558,7 @@
 #define CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR (((cam_feature_mask_t)1UL)<<35)
 #define CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR (((cam_feature_mask_t)1UL)<<36)
 #define CAM_QCOM_FEATURE_METADATA_BYPASS (((cam_feature_mask_t)1UL)<<37)
+#define CAM_QTI_FEATURE_SAT             (((cam_feature_mask_t)1UL)<<38)
 #define CAM_QCOM_FEATURE_PP_SUPERSET    (CAM_QCOM_FEATURE_DENOISE2D|CAM_QCOM_FEATURE_CROP|\
                                          CAM_QCOM_FEATURE_ROTATION|CAM_QCOM_FEATURE_SHARPNESS|\
                                          CAM_QCOM_FEATURE_SCALE|CAM_QCOM_FEATURE_CAC|\
@@ -2519,14 +2568,6 @@
 #define CAM_QCOM_FEATURE_PP_PASS_1      CAM_QCOM_FEATURE_PP_SUPERSET
 #define CAM_QCOM_FEATURE_PP_PASS_2      CAM_QCOM_FEATURE_SCALE | CAM_QCOM_FEATURE_CROP;
 
-// Counter clock wise
-typedef enum {
-    ROTATE_0 = 1<<0,
-    ROTATE_90 = 1<<1,
-    ROTATE_180 = 1<<2,
-    ROTATE_270 = 1<<3,
-} cam_rotation_t;
-
 typedef struct {
    cam_rotation_t rotation;         /* jpeg rotation */
    cam_rotation_t device_rotation;  /* device rotation */
diff --git a/msmcobalt/QCamera2/stack/common/camscope_packet_type.h b/msmcobalt/QCamera2/stack/common/camscope_packet_type.h
new file mode 100644
index 0000000..7e35b81
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/common/camscope_packet_type.h
@@ -0,0 +1,270 @@
+/* Copyright (c) 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __CAMSCOPE_PACKET_TYPE_H__
+#define __CAMSCOPE_PACKET_TYPE_H__
+
+#include <stdint.h>
+#include <time.h>
+
+#define CAMSCOPE_OFF_FLAG 0x00000000
+#define CAMSCOPE_ON_FLAG 0xFFFFFFFF
+#define CAMSCOPE_OFF_MASK 0x00000000
+#define CAMSCOPE_KPI_MASK 0x00000001
+#define CAMSCOPE_KPI_DBG_MASK 0x00000002
+#define CAMSCOPE_ALWAYS_ON_MASK 0xFFFFFFFF
+
+extern volatile uint32_t kpi_camscope_flags;
+extern volatile uint32_t kpi_camscope_frame_count;
+
+typedef enum {
+    CAMSCOPE_SECTION_MMCAMERA,
+    CAMSCOPE_SECTION_HAL,
+    CAMSCOPE_SECTION_JPEG,
+    CAMSCOPE_SECTION_SIZE,
+} camscope_section_type;
+
+typedef enum {
+    CAMSCOPE_BASE,
+    CAMSCOPE_SOFTWARE_BASE,
+    CAMSCOPE_SYNC_BEGIN,
+    CAMSCOPE_SYNC_END,
+    CAMSCOPE_ASYNC_BEGIN,
+    CAMSCOPE_ASYNC_END,
+    CAMSCOPE_SYNC_EVENT,
+    CAMSCOPE_ASYNC_EVENT,
+    CAMSCOPE_RESERVED = 0xFFFFFFFF
+} camscope_packet_type;
+
+typedef enum {
+    CAMSCOPE_MCT_SOF,
+    CAMSCOPE_MCT_SUP_PARAMS,
+    CAMSCOPE_MCT_SPEC_EVT,
+    CAMSCOPE_MCT_BUS_PROC,
+    CAMSCOPE_AFD,
+    CAMSCOPE_ASD,
+    CAMSCOPE_AEC,
+    CAMSCOPE_AWB,
+    CAMSCOPE_AF,
+    CAMSCOPE_CPP,
+    CAMSCOPE_CPP_CAPTURE,
+    CAMSCOPE_CPP_CLK_REQ,
+    CAMSCOPE_CPP_HOLDING,
+    CAMSCOPE_CPP_HW_ON,
+    CAMSCOPE_SNAPSHOT,
+    CAMSCOPE_ISP_HW_UPDATE,
+    CAMSCOPE_JPEG,
+    CAMSCOPE_FACEPROC,
+    CAMSCOPE_SENSOR_PROCESS,
+    CAMSCOPE_FD_NUM_DETECTED,
+    CAMSCOPE_CAM_ALLOC,
+    CAMSCOPE_IFACE_STREAMON_FWD,
+    CAMSCOPE_IFACE_STREAMON_THREAD,
+    CAMSCOPE_IFACE_STREAMOFF_FWD,
+    CAMSCOPE_IFACE_STREAMOFF_THREAD,
+    CAMSCOPE_IFACE_CFG_ISP,
+    CAMSCOPE_IFACE_HW_CFG,
+    CAMSCOPE_IFACE_CREATE_AXI_HW,
+    CAMSCOPE_IFACE_CFG_AXI_HW,
+    CAMSCOPE_IFACE_STREAMON,
+    CAMSCOPE_IFACE_STREAMOFF,
+    CAMSCOPE_AF_START,
+    CAMSCOPE_AF_SET,
+    CAMSCOPE_IS,
+    CAMSCOPE_ISP_STREAMON,
+    CAMSCOPE_ISP_STREAMOFF,
+    CAMSCOPE_ISP_SET_STRM_CFG,
+    CAMSCOPE_VFE_HW_UPDATE,
+    CAMSCOPE_ISP_STREAMON_FWD,
+    CAMSCOPE_SENSOR_SD_OPEN,
+    CAMSCOPE_SENSOR_START_SESSION,
+    CAMSCOPE_SENSOR_SET_RESOLUTION,
+    CAMSCOPE_SENSOR_SET_STRM_CFG,
+    CAMSCOPE_SENSOR_CFG_PDAF,
+    CAMSCOPE_SENSOR_LOAD_CHROMATIX,
+    CAMSCOPE_SENSOR_START_STREAM,
+    CAMSCOPE_SENSOR_SET_FPS,
+    CAMSCOPE_SENSOR_STREAMOFF,
+    CAMSCOPE_WNR,
+    CAMSCOPE_WNR_MEMCPY,
+    CAMSCOPE_PPROC_STREAMOFF,
+    CAMSCOPE_CPP_STREAMON,
+    CAMSCOPE_CAC,
+    CAMSCOPE_CPP_CREATE_HW_FRAME,
+    CAMSCOPE_CPP_SET_STRM_CFG,
+    CAMSCOPE_MCT_START_SESSION,
+    CAMSCOPE_MCT_STOP_SESSION,
+    CAMSCOPE_IMGLIB_STREAMON,
+    CAMSCOPE_MCT_CREATE_BUF,
+    CAMSCOPE_HAL1_START_PREVIEW,
+    CAMSCOPE_HAL1_STOP_PREVIEW,
+    CAMSCOPE_HAL1_TAKE_PICTURE,
+    CAMSCOPE_HAL1_CLOSECAMERA,
+    CAMSCOPE_HAL1_OPENCAMERA,
+    CAMSCOPE_HAL1_STARTPREVIEW,
+    CAMSCOPE_HAL1_STOPPREVIEW,
+    CAMSCOPE_HAL1_CAPTURE_CH_CB,
+    CAMSCOPE_HAL1_PREVIEW_STRM_CB,
+    CAMSCOPE_HAL3_SNAPSHOT,
+    CAMSCOPE_HAL3_GETSTREAMBUFS,
+    CAMSCOPE_HAL3_OPENCAMERA,
+    CAMSCOPE_HAL3_CLOSECAMERA,
+    CAMSCOPE_HAL3_FLUSH,
+    CAMSCOPE_HAL1_ZSL_CH_CB,
+    CAMSCOPE_HAL1_PP_CH_CB,
+    CAMSCOPE_HAL1_SYNC_STRM_CB,
+    CAMSCOPE_HAL1_NODIS_PREVIEW_STRMCB,
+    CAMSCOPE_HAL1_RDI_MODE_STRM_CB,
+    CAMSCOPE_HAL1_POSTVIEW_STRM_CB,
+    CAMSCOPE_HAL1_VIDEO_STRM_CB,
+    CAMSCOPE_HAL1_SNAPSHOT_CH_CB,
+    CAMSCOPE_HAL1_RAW_STRM_CB,
+    CAMSCOPE_HAL1_RAW_CH_CB,
+    CAMSCOPE_HAL1_PREVIEW_RAW_STRM_CB,
+    CAMSCOPE_HAL1_SNAPSHOT_RAW_STRM_CB,
+    CAMSCOPE_HAL1_METADATA_STRM_CB,
+    CAMSCOPE_HAL1_REPROC_STRM_CB,
+    CAMSCOPE_HAL1_CB_STRM_CB,
+    CAMSCOPE_HAL1_SET_PREVIEW_WINDOW,
+    CAMSCOPE_HAL1_SET_CALLBACKS,
+    CAMSCOPE_HAL1_ENABLE_MSG_TYPE,
+    CAMSCOPE_HAL1_DISABLE_MSG_TYPE,
+    CAMSCOPE_HAL1_MSG_TYPE_ENABLED,
+    CAMSCOPE_HAL1_PREPARE_PREVIEW,
+    CAMSCOPE_HAL1_PREVIEW_ENABLED,
+    CAMSCOPE_HAL1_RESTART_START_PREVIEW,
+    CAMSCOPE_HAL1_RESTART_STOP_PREVIEW,
+    CAMSCOPE_HAL1_PRE_START_RECORDING,
+    CAMSCOPE_HAL1_START_RECORDING,
+    CAMSCOPE_HAL1_STOP_RECORDING,
+    CAMSCOPE_HAL1_RECORDING_ENABLED,
+    CAMSCOPE_HAL1_REL_REC_FRAME,
+    CAMSCOPE_HAL1_CANCEL_AF,
+    CAMSCOPE_HAL1_PRE_TAKE_PICTURE,
+    CAMSCOPE_HAL1_CANCEL_PICTURE,
+    CAMSCOPE_HAL1_SET_PARAMETERS,
+    CAMSCOPE_HAL1_STOP_AFTER_SET_PARAMS,
+    CAMSCOPE_HAL1_COMMIT_PARAMS,
+    CAMSCOPE_HAL1_RESTART_AFTER_SET_PARAMS,
+    CAMSCOPE_HAL1_GET_PARAMETERS,
+    CAMSCOPE_HAL1_PUT_PARAMETERS,
+    CAMSCOPE_HAL1_SEND_COMMAND,
+    CAMSCOPE_HAL1_SEND_COMMAND_RESTART,
+    CAMSCOPE_HAL1_RELEASE,
+    CAMSCOPE_HAL1_REGISTER_FACE_IMAGE,
+    CAMSCOPE_HAL1_PREPARE_SNAPSHOT,
+    CAMSCOPE_HAL1_QCAMERA2HWI,
+    CAMSCOPE_HAL1_INIT_CAP,
+    CAMSCOPE_HAL1_GET_CAP,
+    CAMSCOPE_HAL1_PREPAREPREVIEW,
+    CAMSCOPE_HAL1_PREPARE_HW_FOR_SNAPSHOT,
+    CAMSCOPE_HAL3_INIT,
+    CAMSCOPE_HAL3_CFG_STRMS,
+    CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD,
+    CAMSCOPE_HAL3_HANDLE_BATCH_METADATA,
+    CAMSCOPE_HAL3_HANDLE_METADATA_LKD,
+    CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD,
+    CAMSCOPE_HAL3_HANDLE_BUF_LKD,
+    CAMSCOPE_HAL3_PROC_CAP_REQ,
+    CAMSCOPE_HAL3_FLUSH_PREF,
+    CAMSCOPE_HAL3_GET_CAM_INFO,
+    CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO,
+    CAMSCOPE_HAL3_CH_START,
+    CAMSCOPE_HAL3_CH_STOP,
+    CAMSCOPE_HAL3_CH_FLUSH,
+    CAMSCOPE_HAL3_PROC_CH_STRM_CB,
+    CAMSCOPE_HAL3_PROC_CH_REG_BUF,
+    CAMSCOPE_HAL3_PROC_CH_REPROC_CB,
+    CAMSCOPE_HAL3_REG_CH_INIT,
+    CAMSCOPE_HAL3_REG_CH_REQ,
+    CAMSCOPE_HAL3_METADATA_CH_INIT,
+    CAMSCOPE_HAL3_RAW_CH_STRM_CB,
+    CAMSCOPE_HAL3_YUV_CH_INIT,
+    CAMSCOPE_HAL3_YUV_CH_STRM_CB,
+    CAMSCOPE_HAL3_PIC_CH_JPEG_EVT_HANDLE,
+    CAMSCOPE_HAL3_PIC_CH_REQ,
+    CAMSCOPE_HAL3_PIC_CH_DATA_NOTIFY_CB,
+    CAMSCOPE_HAL3_PIC_CH_STRM_CB,
+    CAMSCOPE_HAL3_REPROC_CH_REG_BUF,
+    CAMSCOPE_HAL3_REPROC_CH_START,
+    CAMSCOPE_HAL3_REPROC_CH_STOP,
+    CAMSCOPE_HAL3_PPROC_INIT,
+    CAMSCOPE_HAL3_PPROC_INIT_JPEG,
+    CAMSCOPE_HAL3_PPROC_REL_JPEG_JOB_DATA,
+    CAMSCOPE_HAL3_PPROC_REL_PP_JOB_DATA,
+    CAMSCOPE_HAL3_PPROC_ENCODEDATA,
+    CAMSCOPE_EVENT_NAME_SIZE
+} camscope_event_name;
+
+extern const char * camscope_atrace_names[CAMSCOPE_EVENT_NAME_SIZE];
+
+typedef struct {
+    uint32_t packet_type;
+    uint32_t size;
+} camscope_base;
+
+typedef struct {
+    camscope_base base;
+    struct timeval timestamp;
+    int32_t thread_id;
+    uint32_t event_name;
+} camscope_sw_base;
+
+typedef struct {
+    camscope_sw_base sw_base;
+    uint32_t frame_id;
+} camscope_timing;
+
+typedef struct {
+    camscope_sw_base sw_base;
+    struct timeval in_timestamp;
+    struct timeval out_timestamp;
+    uint32_t frame_id;
+} camscope_in_out_timing;
+
+void camscope_base_log(uint32_t camscope_section,
+                       uint32_t camscope_enable_mask,
+                       uint32_t packet_type);
+
+void camscope_sw_base_log(uint32_t camscope_section,
+                          uint32_t camscope_enable_mask, uint32_t packet_type,
+                          uint32_t event_name);
+
+void camscope_timing_log(uint32_t camscope_section,
+                         uint32_t camscope_enable_mask, uint32_t packet_type,
+                         uint32_t event_name, uint32_t frame_id);
+
+void camscope_in_out_timing_log(uint32_t camscope_section,
+                                uint32_t camscope_enable_mask,
+                                uint32_t packet_type, uint32_t event_name,
+                                struct timeval in_timestamp,
+                                struct timeval out_timestamp,
+                                uint32_t frame_id);
+
+#endif /* __CAMSCOPE_PACKET_TYPE_H__ */
diff --git a/msmcobalt/QCamera2/stack/common/mm_camera_interface.h b/msmcobalt/QCamera2/stack/common/mm_camera_interface.h
index 4734315..eb0316b 100644
--- a/msmcobalt/QCamera2/stack/common/mm_camera_interface.h
+++ b/msmcobalt/QCamera2/stack/common/mm_camera_interface.h
@@ -194,7 +194,7 @@
     mm_camera_req_buf_type_t type;
     uint32_t num_buf_requested;
     uint32_t num_retro_buf_requested;
-    uint8_t primary_only;
+    uint8_t cam_num;    //Frame from which camera
     uint32_t frame_idx; //Client can request frameId to pick from ZSL queue
 } mm_camera_req_buf_t;
 
@@ -402,7 +402,15 @@
     uint8_t user_expected_frame_id;
 } mm_camera_channel_attr_t;
 
-/** mm_camera_intf_frame_sync_t: structure to register frame sync
+/** mm_camera_cb_req_type: Callback request type**/
+typedef enum {
+    MM_CAMERA_CB_REQ_TYPE_DEFAULT,
+    MM_CAMERA_CB_REQ_TYPE_SWITCH,
+    MM_CAMERA_CB_REQ_TYPE_FRAME_SYNC,
+    MM_CAMERA_CB_REQ_TYPE_ALL_CB,
+} mm_camera_cb_req_type;
+
+/** mm_camera_intf_cb_req_type: structure to request different mode of stream callback
 *    @camera_handle  : camera handle to be syced
 *    @ch_id          : channel id to be synced
 *    @stream_id      : stream id to be synced
@@ -414,7 +422,7 @@
     uint32_t camera_handle;
     uint32_t ch_id;
     uint32_t stream_id;
-    uint8_t max_unmatched_frames;
+    mm_camera_channel_attr_t attr;
     mm_camera_buf_notify_t buf_cb;
     void *userdata;
 } mm_camera_intf_frame_sync_t;
@@ -916,35 +924,17 @@
             uint32_t ch_id, uint32_t stream_id,
             mm_camera_intf_frame_sync_t *sync_attr);
 
-   /** start_stream_frame_sync:  function definition to start frame buffer sync
+   /** handle_frame_sync_cb: function to handle frame sync
      *    @camera_handle : camer handler
      *    @ch_id : channel handler
      *    @stream_id : stream handler
+     *    @req_type : Frame sync request type
      *  Return value: 0 -- success
      *                -1 -- failure
      **/
-    int32_t (*start_stream_frame_sync) (uint32_t camera_handle,
-            uint32_t ch_id, uint32_t stream_id);
-
-   /** stop_stream_frame_sync:  function definition to stop frame buffer sync
-     *    @camera_handle : camer handler
-     *    @ch_id : channel handler
-     *    @stream_id : stream handler
-     *  Return value: 0 -- success
-     *                -1 -- failure
-     **/
-    int32_t (*stop_stream_frame_sync) (uint32_t camera_handle,
-            uint32_t ch_id, uint32_t stream_id);
-
-   /** switch_stream: function definition to switch stream frame
-     *    @camera_handle : camer handler
-     *    @ch_id : channel handler
-     *    @stream_id : stream handler
-     *  Return value: 0 -- success
-     *                -1 -- failure
-     **/
-    int32_t (*switch_stream_callback) (uint32_t camera_handle,
-            uint32_t ch_id, uint32_t stream_id);
+    int32_t (*handle_frame_sync_cb) (uint32_t camera_handle,
+            uint32_t ch_id, uint32_t stream_id,
+            mm_camera_cb_req_type req_type);
 } mm_camera_ops_t;
 
 /** mm_camera_vtbl_t: virtual table for camera operations
diff --git a/msmcobalt/QCamera2/stack/mm-camera-interface/Android.mk b/msmcobalt/QCamera2/stack/mm-camera-interface/Android.mk
index 1c55393..69b485d 100644
--- a/msmcobalt/QCamera2/stack/mm-camera-interface/Android.mk
+++ b/msmcobalt/QCamera2/stack/mm-camera-interface/Android.mk
@@ -25,11 +25,11 @@
     LOCAL_CFLAGS += -DUSE_ION
 endif
 
-ifneq (,$(filter msm8974 msm8916 msm8226 msm8610 msm8916 apq8084 msm8084 msm8994 msm8992 msm8952 msm8937 msm8953 msm8996 msmcobalt msmfalcon, $(TARGET_BOARD_PLATFORM)))
+ifneq (,$(filter msm8974 msm8916 msm8226 msm8610 msm8916 apq8084 msm8084 msm8994 msm8992 msm8952 msm8937 msm8953 msm8996 msmcobalt msmfalcon msm8998, $(TARGET_BOARD_PLATFORM)))
     LOCAL_CFLAGS += -DVENUS_PRESENT
 endif
 
-ifneq (,$(filter msm8996 msmcobalt msmfalcon,$(TARGET_BOARD_PLATFORM)))
+ifneq (,$(filter msm8996 msmcobalt msmfalcon msm8998,$(TARGET_BOARD_PLATFORM)))
     LOCAL_CFLAGS += -DUBWC_PRESENT
 endif
 
diff --git a/msmcobalt/QCamera2/stack/mm-camera-interface/inc/mm_camera.h b/msmcobalt/QCamera2/stack/mm-camera-interface/inc/mm_camera.h
index 2e0f15d..9ebf523 100644
--- a/msmcobalt/QCamera2/stack/mm-camera-interface/inc/mm_camera.h
+++ b/msmcobalt/QCamera2/stack/mm-camera-interface/inc/mm_camera.h
@@ -216,7 +216,6 @@
     MM_STREAM_EVT_GET_QUEUED_BUF_COUNT,
     MM_STREAM_EVT_REG_FRAME_SYNC,
     MM_STREAM_EVT_TRIGGER_FRAME_SYNC,
-    MM_STREAM_EVT_SWITCH_STREAM_CB,
     MM_STREAM_EVT_CANCEL_BUF,
     MM_STREAM_EVT_MAX
 } mm_stream_evt_type_t;
@@ -256,14 +255,13 @@
     cam_queue_t que;
 
     /*queue attributed*/
-    /*Number of frames to wait before process unmatched cb*/
-    uint8_t max_unmatched_frames;
+    mm_camera_channel_attr_t attr;
 
     /*Expected frame for this queue*/
     uint32_t expected_frame_id;
 
-    /*Parameter to consider during frame sync*/
-    mm_camera_super_buf_priority_t priority;
+    /*Total match count*/
+    uint32_t match_cnt;
 } mm_frame_sync_queue_t;
 
 /*Structure definition to carry frame sync details*/
@@ -275,7 +273,7 @@
     pthread_mutex_t sync_lock;
 
     /*Limited number of synced frame request*/
-    uint32_t num_buf_requested;
+    mm_camera_req_buf_t req_buf;
 
     /*Queue to hold super buffers*/
     mm_frame_sync_queue_t superbuf_queue;
@@ -395,7 +393,6 @@
     MM_CHANNEL_EVT_REG_STREAM_BUF_CB,
     MM_CHANNEL_EVT_REG_FRAME_SYNC,
     MM_CHANNEL_EVT_TRIGGER_FRAME_SYNC,
-    MM_CHANNEL_EVT_SWITCH_STREAM_CB,
 } mm_channel_evt_type_t;
 
 typedef struct {
@@ -577,7 +574,7 @@
     uint32_t cam_hdl;
     uint32_t ch_hdl;
     uint32_t stream_hdl;
-    mm_channel_queue_node_t* super_buf;
+    mm_channel_queue_node_t *super_buf;
 } mm_channel_pp_info_t;
 
 /* mm_camera */
@@ -643,10 +640,10 @@
     mm_camera_obj_t *a_cam_obj;
     uint32_t a_ch_id;
     uint32_t a_stream_id;
-    uint8_t max_unmatched_frames;
+    uint8_t is_active;
+    mm_camera_channel_attr_t attr;
     mm_camera_buf_notify_t buf_cb;
     uint8_t is_res_shared;
-    mm_camera_super_buf_priority_t priority;
     void *userdata;
 } mm_camera_frame_sync_t;
 
@@ -658,12 +655,13 @@
     mm_camera_frame_sync_t *sync_attr;
 } mm_evt_paylod_reg_frame_sync;
 
-/*Payload for strart/stop frame sync event in MCI*/
+/*Payload to handle frame sync */
 typedef struct {
     uint32_t stream_id;
-    uint8_t enable_frame_sync;
+    mm_camera_cb_req_type type;
 } mm_evt_paylod_trigger_frame_sync;
 
+
 /**********************************************************************************
 * external function declare
 ***********************************************************************************/
@@ -811,13 +809,8 @@
 extern int32_t mm_camera_reg_frame_sync(mm_camera_obj_t *my_obj,
         uint32_t ch_id, uint32_t stream_id,
         mm_camera_frame_sync_t *sync_attr);
-extern int32_t mm_camera_start_frame_sync(mm_camera_obj_t *my_obj,
-        uint32_t ch_id, uint32_t stream_id);
-extern int32_t mm_camera_stop_frame_sync(mm_camera_obj_t *my_obj,
-        uint32_t ch_id, uint32_t stream_id);
-extern int32_t mm_camera_switch_stream_cb(mm_camera_obj_t *my_obj,
-        uint32_t ch_id, uint32_t stream_id);
-
+extern int32_t mm_camera_handle_frame_sync_cb(mm_camera_obj_t *my_obj,
+        uint32_t ch_id, uint32_t stream_id, mm_camera_cb_req_type req_type);
 
 /* mm_channel */
 extern int32_t mm_channel_fsm_fn(mm_channel_t *my_obj,
diff --git a/msmcobalt/QCamera2/stack/mm-camera-interface/inc/mm_camera_muxer.h b/msmcobalt/QCamera2/stack/mm-camera-interface/inc/mm_camera_muxer.h
index 6fb4d2a..40c7286 100644
--- a/msmcobalt/QCamera2/stack/mm-camera-interface/inc/mm_camera_muxer.h
+++ b/msmcobalt/QCamera2/stack/mm-camera-interface/inc/mm_camera_muxer.h
@@ -155,13 +155,6 @@
 int32_t mm_camera_muxer_reg_frame_sync(mm_camera_obj_t *cam_obj,
         uint32_t ch_id, uint32_t stream_id,
         mm_camera_intf_frame_sync_t *sync_attr);
-int32_t mm_camera_muxer_start_frame_sync(mm_camera_obj_t *cam_obj,
-        uint32_t ch_id, uint32_t stream_id);
-int32_t mm_camera_muxer_stop_frame_sync(mm_camera_obj_t *cam_obj,
-        uint32_t ch_id, uint32_t stream_id);
-int32_t mm_camera_muxer_switch_stream(uint32_t camera_handle,
-        uint32_t ch_id, uint32_t stream_id,
-        mm_camera_obj_t *cam_obj);
 int32_t mm_camera_muxer_set_dual_cam_cmd(uint32_t camera_handle,
         mm_camera_obj_t *cam_obj);
 
@@ -180,6 +173,10 @@
 int32_t mm_camera_muxer_put_stream_bufs(mm_stream_t *my_obj);
 int32_t mm_camera_muxer_stream_frame_sync_flush(mm_stream_t *str_obj);
 int32_t mm_camera_muxer_channel_frame_sync_flush(mm_channel_t *my_obj);
+mm_frame_sync_queue_node_t *mm_camera_muxer_frame_sync_dequeue(
+        mm_frame_sync_queue_t *queue, uint8_t matched_only);
+int32_t mm_camera_muxer_channel_req_data_cb(mm_camera_req_buf_t *req_buf,
+        mm_channel_t *ch_obj);
 int32_t mm_camera_map_stream_buf_ops(uint32_t buf_idx,
         int32_t plane_idx, int fd, size_t size,
         void *buffer, cam_mapping_buf_type type,
diff --git a/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera.c b/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera.c
index 05677ca..c434e26 100644
--- a/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera.c
+++ b/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera.c
@@ -1074,7 +1074,6 @@
     } else {
         pthread_mutex_unlock(&my_obj->cam_lock);
     }
-
     return s_hdl;
 }
 
@@ -2487,83 +2486,9 @@
 }
 
 /*===========================================================================
- * FUNCTION   : mm_camera_start_frame_sync
+ * FUNCTION   : mm_camera_handle_frame_sync_cb
  *
- * DESCRIPTION: start frame sync under this camera
- *
- * PARAMETERS :
- *   @my_obj    : camera object
- *   @ch_id     : channel handle
- *   @stream_id : stream that will be linked
- *
- * RETURN    : int32_t type of status
- *             0  -- success
- *             1 --  failure
- *==========================================================================*/
-int32_t mm_camera_start_frame_sync(mm_camera_obj_t *my_obj,
-        uint32_t ch_id, uint32_t stream_id)
-{
-    int32_t rc = -1;
-
-    mm_channel_t *ch_obj =
-            mm_camera_util_get_channel_by_handler(my_obj, ch_id);
-    if (NULL != ch_obj) {
-        pthread_mutex_lock(&ch_obj->ch_lock);
-        pthread_mutex_unlock(&my_obj->cam_lock);
-        mm_evt_paylod_trigger_frame_sync payload;
-        payload.enable_frame_sync = 1;
-        payload.stream_id = stream_id;
-        rc = mm_channel_fsm_fn(ch_obj,
-                MM_CHANNEL_EVT_TRIGGER_FRAME_SYNC,
-                (void*)&payload, NULL);
-    } else {
-        pthread_mutex_unlock(&my_obj->cam_lock);
-    }
-    return rc;
-}
-
-/*===========================================================================
- * FUNCTION   : mm_camera_stop_frame_sync
- *
- * DESCRIPTION: stop frame sync under this camera
- *
- * PARAMETERS :
- *   @my_obj    : camera object
- *   @ch_id     : channel handle
- *   @stream_id : stream that will be linked
- *
- * RETURN    : int32_t type of status
- *             0  -- success
- *             1 --  failure
- *==========================================================================*/
-int32_t mm_camera_stop_frame_sync(mm_camera_obj_t *my_obj,
-        uint32_t ch_id, uint32_t stream_id)
-{
-    int32_t rc = -1;
-
-    mm_channel_t *ch_obj =
-            mm_camera_util_get_channel_by_handler(my_obj, ch_id);
-
-    if (NULL != ch_obj) {
-        pthread_mutex_lock(&ch_obj->ch_lock);
-        pthread_mutex_unlock(&my_obj->cam_lock);
-        mm_evt_paylod_trigger_frame_sync payload;
-        payload.enable_frame_sync = 0;
-        payload.stream_id = stream_id;
-        rc = mm_channel_fsm_fn(ch_obj,
-                MM_CHANNEL_EVT_TRIGGER_FRAME_SYNC,
-                (void*)&payload, NULL);
-
-    } else {
-        pthread_mutex_unlock(&my_obj->cam_lock);
-    }
-    return rc;
-}
-
-/*===========================================================================
- * FUNCTION   : mm_camera_switch_stream_cb
- *
- * DESCRIPTION: switch stream callbacks in case of multiple instance of streams
+ * DESCRIPTION: enable or disable callbacks in case of frame sync
  *
  * PARAMETERS :
  *   @my_obj    : camera object
@@ -2574,8 +2499,8 @@
  *             0  -- success
  *             1 --  failure
  *==========================================================================*/
-int32_t mm_camera_switch_stream_cb(mm_camera_obj_t *my_obj,
-        uint32_t ch_id, uint32_t stream_id)
+int32_t mm_camera_handle_frame_sync_cb(mm_camera_obj_t *my_obj,
+        uint32_t ch_id, uint32_t stream_id, mm_camera_cb_req_type req_type)
 {
     int rc = -1;
     mm_channel_t *ch_obj = NULL;
@@ -2584,14 +2509,15 @@
     if (NULL != ch_obj) {
         pthread_mutex_lock(&ch_obj->ch_lock);
         pthread_mutex_unlock(&my_obj->cam_lock);
+        mm_evt_paylod_trigger_frame_sync payload;
+        payload.type = req_type;
+        payload.stream_id = stream_id;
         rc = mm_channel_fsm_fn(ch_obj,
-                MM_CHANNEL_EVT_SWITCH_STREAM_CB,
-                (void *)&stream_id,
-                NULL);
+                MM_CHANNEL_EVT_TRIGGER_FRAME_SYNC,
+                (void*)&payload, NULL);
     } else {
         pthread_mutex_unlock(&my_obj->cam_lock);
     }
-
     return rc;
 }
 
diff --git a/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c b/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
index 88d2c4f..2e7e751 100644
--- a/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
+++ b/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
@@ -66,8 +66,6 @@
         mm_evt_paylod_reg_frame_sync *sync);
 int32_t mm_channel_trigger_frame_sync(mm_channel_t *my_obj,
         mm_evt_paylod_trigger_frame_sync *payload);
-int32_t mm_channel_switch_stream_callback(mm_channel_t *my_obj,
-        uint32_t stream_id);
 int32_t mm_channel_config_stream(mm_channel_t *my_obj,
                                  uint32_t stream_id,
                                  mm_camera_stream_config_t *config);
@@ -206,13 +204,6 @@
         return;
     }
 
-    if (MM_CAMERA_CMD_TYPE_SUPER_BUF_DATA_CB != cmd_cb->cmd_type) {
-        LOGE("Wrong cmd_type (%d) for super buf dataCB",
-                    cmd_cb->cmd_type);
-        return;
-    }
-
-
     if (m_obj->master_ch_obj != NULL) {
         //get master object
         m_obj = m_obj->master_ch_obj;
@@ -221,7 +212,25 @@
     pthread_mutex_lock(&m_obj->frame_sync.sync_lock);
     if(m_obj->frame_sync.is_active) {
         //Frame sync enabled on master
-        mm_camera_muxer_channel_frame_sync(&cmd_cb->u.superbuf, m_obj);
+        switch(cmd_cb->cmd_type) {
+            case MM_CAMERA_CMD_TYPE_SUPER_BUF_DATA_CB: {
+                mm_camera_muxer_channel_frame_sync(&cmd_cb->u.superbuf, my_obj);
+            }
+            break;
+            case MM_CAMERA_CMD_TYPE_REQ_DATA_CB : {
+                mm_camera_muxer_channel_req_data_cb(&cmd_cb->u.req_buf, my_obj);
+            }
+            break;
+            case MM_CAMERA_CMD_TYPE_FLUSH_QUEUE : {
+                mm_camera_muxer_channel_frame_sync_flush(my_obj);
+                cam_sem_post(&my_obj->cb_thread.sync_sem);
+            }
+            break;
+            default : {
+                LOGW("Invalid cmd type %d", cmd_cb->cmd_type);
+            }
+            break;
+        }
     } else if (my_obj->bundle.super_buf_notify_cb && my_obj->bundle.is_cb_active) {
         //Super buffer channel callback is active
         my_obj->bundle.super_buf_notify_cb(&cmd_cb->u.superbuf, my_obj->bundle.user_data);
@@ -238,6 +247,62 @@
 }
 
 /*===========================================================================
+ * FUNCTION   : mm_channel_send_frame_sync_req_buf
+ *
+ * DESCRIPTION: Request buffer from super buffer sync queue
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_send_frame_sync_req_buf(mm_channel_t* my_obj)
+{
+    int32_t rc = 0;
+    mm_channel_t *m_obj = my_obj;
+    mm_camera_req_buf_type_t type = my_obj->req_type;
+
+    if (m_obj->master_ch_obj != NULL) {
+        m_obj = m_obj->master_ch_obj;
+    }
+
+    if (!m_obj->frame_sync.is_active) {
+        return rc;
+    }
+
+    if (MM_CAMERA_REQ_FRAME_SYNC_BUF != type) {
+        m_obj = my_obj;
+    } else if (m_obj != my_obj) {
+        /*Issue sync request only using master channel*/
+        return rc;
+    }
+
+    mm_camera_cmdcb_t* cb_node = NULL;
+
+    /* send cam_sem_post to wake up cb thread to flush sync queue */
+    cb_node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+    if (NULL != cb_node) {
+        memset(cb_node, 0, sizeof(mm_camera_cmdcb_t));
+        cb_node->cmd_type = MM_CAMERA_CMD_TYPE_REQ_DATA_CB;
+
+        cb_node->u.req_buf.type = type;
+        cb_node->u.req_buf.num_buf_requested = 1;
+        cb_node->u.req_buf.cam_num = m_obj->cam_obj->my_num;
+
+        /* enqueue to cb thread */
+        cam_queue_enq(&(m_obj->cb_thread.cmd_queue), cb_node);
+        /* wake up cb thread */
+        cam_sem_post(&(m_obj->cb_thread.cmd_sem));
+    } else {
+        LOGE("No memory for mm_camera_node_t");
+        rc = -1;
+    }
+    return rc;
+}
+
+/*===========================================================================
  * FUNCTION   : mm_channel_process_stream_buf
  *
  * DESCRIPTION: handle incoming buffer from stream in a bundle. In this function,
@@ -258,6 +323,7 @@
     mm_camera_super_buf_notify_mode_t notify_mode;
     mm_channel_queue_node_t *node = NULL;
     mm_channel_t *ch_obj = (mm_channel_t *)user_data;
+    mm_channel_t *m_obj = ch_obj;
     uint32_t i = 0;
     /* Set expected frame id to a future frame idx, large enough to wait
     * for good_frame_idx_range, and small enough to still capture an image */
@@ -266,6 +332,12 @@
     if (NULL == ch_obj) {
         return;
     }
+
+    //get Master object incase of multiple channel
+    if (m_obj->master_ch_obj != NULL) {
+        m_obj = m_obj->master_ch_obj;
+    }
+
     if (MM_CAMERA_CMD_TYPE_DATA_CB  == cmd_cb->cmd_type) {
         /* comp_and_enqueue */
         mm_channel_superbuf_comp_and_enqueue(
@@ -274,7 +346,7 @@
                         &cmd_cb->u.buf);
     } else if (MM_CAMERA_CMD_TYPE_REQ_DATA_CB  == cmd_cb->cmd_type) {
         /* skip frames if needed */
-        ch_obj->pending_cnt += cmd_cb->u.req_buf.num_buf_requested;
+        ch_obj->pending_cnt = cmd_cb->u.req_buf.num_buf_requested;
         ch_obj->pending_retro_cnt = cmd_cb->u.req_buf.num_retro_buf_requested;
         ch_obj->req_type = cmd_cb->u.req_buf.type;
         ch_obj->bWaitForPrepSnapshotDone = 0;
@@ -309,6 +381,9 @@
         ch_obj->bundle.superbuf_queue.expected_frame_id = cmd_cb->u.flush_cmd.frame_idx;
         mm_channel_superbuf_flush(ch_obj,
                 &ch_obj->bundle.superbuf_queue, cmd_cb->u.flush_cmd.stream_type);
+        if (m_obj->frame_sync.is_active) {
+            cam_sem_wait(&(m_obj->cb_thread.sync_sem));
+        }
         cam_sem_post(&(ch_obj->cmd_thread.sync_sem));
         return;
     } else if (MM_CAMERA_CMD_TYPE_GENERAL == cmd_cb->cmd_type) {
@@ -468,15 +543,18 @@
             ch_obj->bundle.superbuf_queue.expected_frame_id);
 
     /* dispatch frame if pending_cnt>0 or is in continuous streaming mode */
-    while (((ch_obj->pending_cnt > 0) ||
+    while ((((ch_obj->pending_cnt > 0) ||
              (MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS == notify_mode)) &&
-             (!ch_obj->bWaitForPrepSnapshotDone)) {
+             (!ch_obj->bWaitForPrepSnapshotDone))
+             || (m_obj->frame_sync.is_active)) {
+        uint8_t trigger_cb = 0;
 
         /* dequeue */
         mm_channel_node_info_t info;
         memset(&info, 0x0, sizeof(info));
 
-        if (ch_obj->req_type == MM_CAMERA_REQ_FRAME_SYNC_BUF) {
+        if (ch_obj->req_type == MM_CAMERA_REQ_FRAME_SYNC_BUF
+                && !m_obj->frame_sync.is_active) {
             // Lock the Queues
             mm_frame_sync_lock_queues();
             uint32_t match_frame = mm_frame_sync_find_matched(FALSE);
@@ -548,8 +626,13 @@
             }
         }
         if (info.num_nodes > 0) {
+            if ((m_obj->frame_sync.is_active)
+                    && (ch_obj->pending_cnt > 0)) {
+                trigger_cb = 1;
+            }
             /* decrease pending_cnt */
-            if (MM_CAMERA_SUPER_BUF_NOTIFY_BURST == notify_mode) {
+            if (MM_CAMERA_SUPER_BUF_NOTIFY_BURST == notify_mode
+                    && ch_obj->pending_cnt > 0) {
                 ch_obj->pending_cnt--;
                 if (ch_obj->pending_retro_cnt > 0) {
                   if (ch_obj->pending_retro_cnt == 1) {
@@ -604,6 +687,11 @@
             }
             /* dispatch superbuf */
             mm_channel_send_super_buf(&info);
+
+            if (trigger_cb) {
+                trigger_cb = 0;
+                mm_channel_send_frame_sync_req_buf(ch_obj);
+            }
         } else {
             /* no superbuf avail, break the loop */
             break;
@@ -1170,12 +1258,6 @@
                     (mm_evt_paylod_trigger_frame_sync *)in_val);
         }
         break;
-    case MM_CHANNEL_EVT_SWITCH_STREAM_CB:
-        {
-            uint32_t stream_id = *((uint32_t *)in_val);
-            rc = mm_channel_switch_stream_callback(my_obj, stream_id);
-        }
-        break;
      default:
         LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
                     my_obj->state, evt, in_val, out_val);
@@ -1372,6 +1454,10 @@
         //Frame sync reg for stream
         stream_obj = mm_channel_util_get_stream_by_handler(
                 my_obj, sync->stream_id);
+        if (stream_obj == NULL) {
+            LOGE("Invalid Stream ID %d", sync->stream_id);
+            return -1;
+        }
         sync->a_str_obj = mm_channel_util_get_stream_by_handler(
                 sync->a_ch_obj, sync->sync_attr->a_stream_id);
         return mm_stream_fsm_fn(stream_obj,
@@ -1396,10 +1482,9 @@
     } else {
         frame_sync->user_data = sync->sync_attr->userdata;
     }
+    frame_sync->is_active = sync->sync_attr->is_active;
     queue = &frame_sync->superbuf_queue;
-    queue->max_unmatched_frames =
-            sync->sync_attr->max_unmatched_frames;
-    queue->priority = sync->sync_attr->priority;
+    queue->attr = sync->sync_attr->attr;
     queue->num_objs = 0;
     memset(&queue->bundled_objs, 0, sizeof(queue->bundled_objs));
     queue->bundled_objs[queue->num_objs] = my_obj->my_hdl;
@@ -1442,45 +1527,24 @@
     if (payload->stream_id != 0) {
         stream_obj = mm_channel_util_get_stream_by_handler(
                 my_obj, payload->stream_id);
+        if (stream_obj == NULL) {
+            LOGE("Invalid Stream ID %d", payload->stream_id);
+            return -1;
+        }
         return mm_stream_fsm_fn(stream_obj,
                 MM_STREAM_EVT_TRIGGER_FRAME_SYNC,
-                &payload->enable_frame_sync,
+                &payload->type,
                 NULL);
     }
 
-    uint8_t start_sync = payload->enable_frame_sync;
-    mm_frame_sync_t *frame_sync = &m_obj->frame_sync;
-    pthread_mutex_lock(&frame_sync->sync_lock);
-    frame_sync->is_active = start_sync;
-    pthread_mutex_unlock(&frame_sync->sync_lock);
-    return rc;
-}
-
-/*===========================================================================
- * FUNCTION   : mm_channel_switch_stream_callback
- *
- * DESCRIPTION: switch stream callbacks in case of multiple instance of streams
- *
- * PARAMETERS :
- *   @my_obj       : camera object
- *   @ch_id        : channel handle
- *   @stream_id : stream id
- *
- * RETURN    : int32_t type of status
- *             0  -- success
- *             1 --  failure
- *==========================================================================*/
-int32_t mm_channel_switch_stream_callback(mm_channel_t *my_obj,
-        uint32_t stream_id)
-{
-    int32_t rc = -1;
-    mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj, stream_id);
-
-    if (NULL != s_obj) {
-        rc = mm_stream_fsm_fn(s_obj,
-                MM_STREAM_EVT_SWITCH_STREAM_CB,
-                NULL,
-                NULL);
+    if (payload->type == MM_CAMERA_CB_REQ_TYPE_FRAME_SYNC) {
+        mm_frame_sync_t *frame_sync = &m_obj->frame_sync;
+        pthread_mutex_lock(&frame_sync->sync_lock);
+        frame_sync->is_active = 1;
+        pthread_mutex_unlock(&frame_sync->sync_lock);
+    } else {
+        LOGE("Not supported for Channel");
+        rc = -1;
     }
     return rc;
 }
@@ -2003,7 +2067,6 @@
             continue;
         }
     }
-
     return rc;
 }
 
@@ -2038,10 +2101,6 @@
     /* send cam_sem_post to wake up cmd thread to dispatch super buffer */
     node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
     if (NULL != node) {
-        if(my_obj->frame_sync.is_active) {
-            my_obj->frame_sync.num_buf_requested =
-                    buf->num_buf_requested;
-        }
         memset(node, 0, sizeof(mm_camera_cmdcb_t));
         node->cmd_type = MM_CAMERA_CMD_TYPE_REQ_DATA_CB;
         node->u.req_buf = *buf;
@@ -3452,6 +3511,47 @@
 }
 
 /*===========================================================================
+ * FUNCTION   : mm_channel_send_frame_sync_flush
+ *
+ * DESCRIPTION: flush the superbuf frame sync queue.
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_send_frame_sync_flush(mm_channel_t* my_obj)
+{
+    int32_t rc = 0;
+    mm_channel_t *m_obj = my_obj;
+
+    if (m_obj->master_ch_obj != NULL) {
+        m_obj = m_obj->master_ch_obj;
+    }
+
+    if (m_obj->frame_sync.is_active) {
+        mm_camera_cmdcb_t* cb_node = NULL;
+
+        /* send cam_sem_post to wake up cb thread to flush sync queue */
+        cb_node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+        if (NULL != cb_node) {
+            memset(cb_node, 0, sizeof(mm_camera_cmdcb_t));
+            cb_node->cmd_type = MM_CAMERA_CMD_TYPE_FLUSH_QUEUE;
+            /* enqueue to cb thread */
+            cam_queue_enq(&(m_obj->cb_thread.cmd_queue), cb_node);
+            /* wake up cb thread */
+            cam_sem_post(&(m_obj->cb_thread.cmd_sem));
+        } else {
+            LOGE("No memory for mm_camera_node_t");
+            rc = -1;
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
  * FUNCTION   : mm_channel_superbuf_flush
  *
  * DESCRIPTION: flush the superbuf queue.
@@ -3490,6 +3590,9 @@
     }
     pthread_mutex_unlock(&queue->que.lock);
 
+    /*Flush Super buffer frame sync queue*/
+    mm_channel_send_frame_sync_flush(my_obj);
+
     return rc;
 }
 
diff --git a/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c b/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
index 5a8a631..3dbd060 100644
--- a/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
+++ b/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
@@ -56,19 +56,8 @@
 
 // 16th (starting from 0) bit tells its a BACK or FRONT camera
 #define CAM_SENSOR_FACING_MASK       (1U<<16)
-
-#ifdef DUAL_CAM_TEST //Temporary macro. Will be removed once we finalize sensor change.
-// 24th (starting from 0) bit tells its a MAIN or AUX camera
-#define CAM_SENSOR_MODE_MASK_MAIN    (1U<<24)
-#define CAM_SENSOR_MODE_MASK_AUX     (1U<<25)
-#define CAM_SENSOR_MODE_MASK_SECURE  (1U<<26)
-
-// 28th (starting from 0) bit tells its YUV sensor or not
-#define CAM_SENSOR_FORMAT_MASK       (1U<<28)
-#else
 #define CAM_SENSOR_TYPE_MASK         (1U<<24)
 #define CAM_SENSOR_FORMAT_MASK       (1U<<25)
-#endif
 
 /*===========================================================================
  * FUNCTION   : mm_camera_util_generate_handler
@@ -280,7 +269,7 @@
         }
     }
 
-    LOGH("camera_handle = %d rc = %d X", camera_handle, rc);
+    LOGH("camera_handle = %u rc = %u X", camera_handle, rc);
     return rc;
 }
 
@@ -437,7 +426,7 @@
             pthread_mutex_unlock(&g_intf_lock);
         }
     }
-    LOGH("rc = %d camera_handle = %d X", rc, camera_handle);
+    LOGH("rc = %d camera_handle = %u X", rc, camera_handle);
     return rc;
 }
 
@@ -483,7 +472,7 @@
             pthread_mutex_unlock(&g_intf_lock);
         }
     }
-    LOGH("rc = %d camera_handle = %d X", rc, camera_handle);
+    LOGH("rc = %d camera_handle = %u X", rc, camera_handle);
     return rc;
 }
 
@@ -535,7 +524,7 @@
         }
         return rc;
     }
-    LOGH("rc = %d camera_handle = %d X", rc, camera_handle);
+    LOGH("rc = %d camera_handle = %u X", rc, camera_handle);
     return rc;
 }
 
@@ -659,7 +648,7 @@
         pthread_mutex_unlock(&g_intf_lock);
     }
 
-    LOGH("camera_handler = %d rc = %d", camera_handle, rc);
+    LOGH("camera_handler = %u rc = %d", camera_handle, rc);
 #ifdef QCAMERA_REDEFINE_LOG
     mm_camera_debug_close();
 #endif
@@ -722,12 +711,12 @@
                 mm_camera_del_channel(my_obj, ch_id);
             } else {
                 ch_id |= aux_ch_id;
-            }
+           }
         } else {
             pthread_mutex_unlock(&g_intf_lock);
         }
     }
-    LOGH("camera_handle = %d ch_id = %d X", ch_id);
+    LOGH("camera_handle = %u ch_id = %u X", ch_id);
     return ch_id;
 }
 
@@ -782,7 +771,7 @@
             pthread_mutex_unlock(&g_intf_lock);
         }
     }
-    LOGH("rc = %d ch_id = %d X", rc, ch_id);
+    LOGH("rc = %d ch_id = %u X", rc, ch_id);
     return rc;
 }
 
@@ -1103,7 +1092,7 @@
         }
     }
 
-    LOGH("X ch_id = %d stream_id = %d linked_ch_id = %d id = %d",
+    LOGH("X ch_id = %u stream_id = %u linked_ch_id = %u id = %u",
             ch_id, stream_id, linked_ch_id, id);
     return (int32_t)id;
 }
@@ -1164,7 +1153,7 @@
             pthread_mutex_unlock(&g_intf_lock);
         }
     }
-    LOGH("X ch_id = %d stream_id = %d", ch_id, stream_id);
+    LOGH("X ch_id = %u stream_id = %u", ch_id, stream_id);
     return stream_id;
 }
 
@@ -1224,7 +1213,7 @@
             pthread_mutex_unlock(&g_intf_lock);
         }
     }
-    LOGH("X stream_id = %d rc = %d", stream_id, rc);
+    LOGH("X stream_id = %u rc = %d", stream_id, rc);
     return rc;
 }
 
@@ -1286,7 +1275,7 @@
             pthread_mutex_unlock(&g_intf_lock);
         }
     }
-    LOGH("X stream_id = %d rc = %d", stream_id, rc);
+    LOGH("X stream_id = %u rc = %d", stream_id, rc);
     return rc;
 }
 
@@ -1338,7 +1327,7 @@
             pthread_mutex_unlock(&g_intf_lock);
         }
     }
-    LOGH("X ch_id = %d rc = %d", ch_id, rc);
+    LOGH("X ch_id = %u rc = %d", ch_id, rc);
     return rc;
 }
 
@@ -1376,7 +1365,6 @@
             pthread_mutex_unlock(&g_intf_lock);
         }
     }
-
     if (chid) {
         uint32_t handle = get_main_camera_handle(camera_handle);
         pthread_mutex_lock(&g_intf_lock);
@@ -1390,7 +1378,7 @@
             pthread_mutex_unlock(&g_intf_lock);
         }
     }
-    LOGH("X ch_id = %d rc = %d", ch_id, rc);
+    LOGH("X ch_id = %u rc = %d", ch_id, rc);
     return rc;
 
 }
@@ -1456,7 +1444,7 @@
         }
     }
 
-    LOGH("X ch_id = %d rc = %d", ch_id, rc);
+    LOGH("X ch_id = %u rc = %d", ch_id, rc);
     return rc;
 }
 
@@ -1519,7 +1507,7 @@
         }
     }
 
-    LOGH("X ch_id = %d rc = %d", ch_id, rc);
+    LOGH("X ch_id = %u rc = %d", ch_id, rc);
     return rc;
 }
 
@@ -1574,7 +1562,7 @@
         }
     }
 
-    LOGH("X ch_id = %d rc = %d", ch_id, rc);
+    LOGH("X ch_id = %u rc = %d", ch_id, rc);
     return rc;
 }
 
@@ -1717,7 +1705,7 @@
         uint32_t aux_handle = get_aux_camera_handle(camera_handle);
         my_obj = mm_camera_util_get_camera_head(aux_handle);
         if(my_obj) {
-            pthread_mutex_lock(&my_obj->cam_lock);
+            pthread_mutex_lock(&my_obj->muxer_lock);
             pthread_mutex_unlock(&g_intf_lock);
             rc = mm_camera_muxer_configure_notify_mode(aux_handle, aux_ch_id,
                     notify_mode, my_obj);
@@ -1731,7 +1719,7 @@
         uint32_t handle = get_main_camera_handle(camera_handle);
         my_obj = mm_camera_util_get_camera_by_handler(handle);
         if(my_obj) {
-            pthread_mutex_lock(&my_obj->muxer_lock);
+            pthread_mutex_lock(&my_obj->cam_lock);
             pthread_mutex_unlock(&g_intf_lock);
             rc = mm_camera_config_channel_notify(my_obj, chid,
                     notify_mode);
@@ -2826,7 +2814,7 @@
             pthread_mutex_unlock(&g_intf_lock);
         }
     }
-    LOGH("X rc = %d ch_id = %d", rc, ch_id);
+    LOGH("X rc = %d ch_id = %u", rc, ch_id);
     return rc;
 }
 
@@ -2933,101 +2921,22 @@
 }
 
 /*===========================================================================
- * FUNCTION   : mm_camera_intf_start_stream_frame_sync
+ * FUNCTION   : mm_camera_intf_handle_frame_sync_cb
  *
- * DESCRIPTION: start frame buffer sync for the stream
+ * DESCRIPTION: Handle callback request type incase of frame sync mode
  *
  * PARAMETERS :
  *   @camera_handle: camera handle
  *   @ch_id        : channel handle
  *   @stream_id    : stream handle
+ *   @req_type    : callback request type
  *
  * RETURN     : int32_t type of status
  *              0  -- success
  *              1 -- failure
  *==========================================================================*/
-static int32_t mm_camera_intf_start_stream_frame_sync(uint32_t camera_handle,
-        uint32_t ch_id, uint32_t stream_id)
-{
-    int32_t rc = 0;
-    mm_camera_obj_t * my_obj = NULL;
-
-    LOGD("E handle = %u ch_id = %u stream_id = %u",
-            camera_handle, ch_id, stream_id);
-
-    pthread_mutex_lock(&g_intf_lock);
-    uint32_t handle = get_main_camera_handle(camera_handle);
-    uint32_t m_chid = get_main_camera_handle(ch_id);
-    uint32_t m_stream = get_main_camera_handle(stream_id);
-    my_obj = mm_camera_util_get_camera_by_handler(handle);
-    if(my_obj) {
-        pthread_mutex_lock(&my_obj->muxer_lock);
-        pthread_mutex_unlock(&g_intf_lock);
-        rc = mm_camera_muxer_start_frame_sync(my_obj,
-                 m_chid, m_stream);
-    } else {
-        pthread_mutex_unlock(&g_intf_lock);
-    }
-    LOGH("stream_id = %d rc = %d", stream_id, rc);
-    return (int32_t)rc;
-}
-
-/*===========================================================================
- * FUNCTION   : mm_camera_intf_stop_stream_frame_sync
- *
- * DESCRIPTION: stop frame buffer sync for the stream
- *
- * PARAMETERS :
- *   @camera_handle: camera handle
- *   @ch_id        : channel handle
- *   @stream_id    : stream handle
- *
- * RETURN     : int32_t type of status
- *              0  -- success
- *              1 -- failure
- *==========================================================================*/
-static int32_t mm_camera_intf_stop_stream_frame_sync(uint32_t camera_handle,
-        uint32_t ch_id, uint32_t stream_id)
-{
-    int32_t rc = 0;
-    mm_camera_obj_t * my_obj = NULL;
-
-    LOGD("E handle = %u ch_id = %u stream_id = %u",
-            camera_handle, ch_id, stream_id);
-
-    pthread_mutex_lock(&g_intf_lock);
-    uint32_t handle = get_main_camera_handle(camera_handle);
-    uint32_t m_chid = get_main_camera_handle(ch_id);
-    uint32_t m_stream = get_main_camera_handle(stream_id);
-    my_obj = mm_camera_util_get_camera_by_handler(handle);
-    if(my_obj) {
-        pthread_mutex_lock(&my_obj->muxer_lock);
-        pthread_mutex_unlock(&g_intf_lock);
-        rc = mm_camera_muxer_stop_frame_sync(my_obj,
-                 m_chid, m_stream);
-    } else {
-        pthread_mutex_unlock(&g_intf_lock);
-    }
-    LOGH("stream_id = %d rc = %d", stream_id, rc);
-    return (int32_t)rc;
-}
-
-/*===========================================================================
- * FUNCTION   : mm_camera_intf_switch_stream
- *
- * DESCRIPTION: switch between stream in case of multi streams
- *
- * PARAMETERS :
- *   @camera_handle: camera handle
- *   @ch_id        : channel handle
- *   @stream_id    : stream handle
- *
- * RETURN     : int32_t type of status
- *              0  -- success
- *              1 -- failure
- *==========================================================================*/
-static int32_t mm_camera_intf_switch_stream_cb(uint32_t camera_handle,
-        uint32_t ch_id, uint32_t stream_id)
+static int32_t mm_camera_intf_handle_frame_sync_cb(uint32_t camera_handle,
+        uint32_t ch_id, uint32_t stream_id, mm_camera_cb_req_type req_type)
 {
     int32_t rc = 0;
     mm_camera_obj_t * my_obj = NULL;
@@ -3043,11 +2952,11 @@
     if(my_obj) {
         pthread_mutex_lock(&my_obj->cam_lock);
         pthread_mutex_unlock(&g_intf_lock);
-        rc = mm_camera_switch_stream_cb(my_obj, m_chid, m_strid);
+        rc = mm_camera_handle_frame_sync_cb(my_obj, m_chid, m_strid, req_type);
     } else {
         pthread_mutex_unlock(&g_intf_lock);
     }
-    LOGH("stream_id = %d rc = %d", stream_id, rc);
+    LOGH("stream_id = %u rc = %d", stream_id, rc);
     return (int32_t)rc;
 }
 
@@ -3086,6 +2995,9 @@
 
 uint8_t validate_handle(uint32_t src_handle, uint32_t handle)
 {
+    if ((src_handle == 0) || (handle == 0)) {
+        return 0;
+    }
     return ((src_handle == handle)
             || (get_main_camera_handle(src_handle) == handle)
             || (get_aux_camera_handle(src_handle) == handle)
@@ -3135,9 +3047,7 @@
     .flush = mm_camera_intf_flush,
     .register_stream_buf_cb = mm_camera_intf_register_stream_buf_cb,
     .register_frame_sync = mm_camera_intf_reg_frame_sync,
-    .start_stream_frame_sync = mm_camera_intf_start_stream_frame_sync,
-    .stop_stream_frame_sync = mm_camera_intf_stop_stream_frame_sync,
-    .switch_stream_callback = mm_camera_intf_switch_stream_cb
+    .handle_frame_sync_cb = mm_camera_intf_handle_frame_sync_cb
 };
 
 /*===========================================================================
diff --git a/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_muxer.c b/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_muxer.c
index dc74b11..3315229 100644
--- a/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_muxer.c
+++ b/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_muxer.c
@@ -757,9 +757,11 @@
             frame_sync.a_ch_id = ch_id;
             frame_sync.userdata = userdata;
             frame_sync.a_stream_id = 0;
-            frame_sync.max_unmatched_frames = 0;
-            frame_sync.priority = MM_CAMERA_SUPER_BUF_PRIORITY_LOW;
-            frame_sync.buf_cb = channel_cb;
+            frame_sync.is_res_shared = 1;
+            if (attr != NULL) {
+                frame_sync.attr = *attr;
+                frame_sync.is_active = 1;
+            }
             pthread_mutex_lock(&cam_obj->cam_lock);
             mm_camera_reg_frame_sync(cam_obj, m_ch_id,
                     0, &frame_sync);
@@ -851,7 +853,7 @@
 uint32_t mm_camera_muxer_add_stream(uint32_t camera_handle,
         uint32_t ch_id, uint32_t src__ch_id, uint32_t src_stream_id, mm_camera_obj_t *cam_obj)
 {
-    int32_t stream_id = 0;
+    uint32_t stream_id = 0;
     int32_t rc = 0;
     mm_camera_obj_t *my_obj = NULL;
 
@@ -867,9 +869,9 @@
             frame_sync.a_ch_id = ch_id;
             frame_sync.userdata = NULL;
             frame_sync.a_stream_id = stream_id;
-            frame_sync.max_unmatched_frames = 0;
             frame_sync.buf_cb = NULL;
             frame_sync.is_res_shared = 1;
+            frame_sync.is_active = 0;
             pthread_mutex_lock(&cam_obj->cam_lock);
             rc = mm_camera_reg_frame_sync(cam_obj, src__ch_id,
                     src_stream_id, &frame_sync);
@@ -1334,10 +1336,7 @@
     if(my_obj && buf) {
         pthread_mutex_lock(&my_obj->cam_lock);
         pthread_mutex_unlock(&cam_obj->muxer_lock);
-        rc = mm_camera_start_frame_sync(my_obj,
-                chID, 0);
-        LOGH("Start Frame Sync chid = %d rc = %d", chID, rc);
-        pthread_mutex_lock(&my_obj->cam_lock);
+        buf->type = MM_CAMERA_REQ_FRAME_SYNC_BUF;
         rc = mm_camera_request_super_buf (my_obj, chID, buf);
     } else {
         pthread_mutex_unlock(&cam_obj->muxer_lock);
@@ -1374,10 +1373,6 @@
         pthread_mutex_lock(&my_obj->cam_lock);
         pthread_mutex_unlock(&cam_obj->muxer_lock);
         rc = mm_camera_cancel_super_buf_request(my_obj, ch_id);
-        pthread_mutex_lock(&my_obj->cam_lock);
-        mm_camera_stop_frame_sync(my_obj,
-                ch_id, 0);
-        
     } else {
         pthread_mutex_unlock(&cam_obj->muxer_lock);
     }
@@ -1645,8 +1640,7 @@
         frame_sync.a_ch_id = aux_chid;
         frame_sync.userdata = sync_attr->userdata;
         frame_sync.buf_cb = sync_attr->buf_cb;
-        frame_sync.max_unmatched_frames = sync_attr->max_unmatched_frames;
-        frame_sync.is_res_shared = 1;
+        frame_sync.attr = sync_attr->attr;
         pthread_mutex_lock(&cam_obj->cam_lock);
         pthread_mutex_unlock(&cam_obj->muxer_lock);
         rc = mm_camera_reg_frame_sync(cam_obj, chid, strid, &frame_sync);
@@ -1657,103 +1651,6 @@
 }
 
 /*===========================================================================
- * FUNCTION   : mm_camera_muxer_start_frame_sync
- *
- * DESCRIPTION: start frame buffer sync for the stream
- *
- * PARAMETERS :
- *   @camera_handle: camera handle
- *   @ch_id        : channel handle
- *   @stream_id    : stream handle
- *
- * RETURN     : int32_t type of status
- *              0  -- success
- *              1 -- failure
- *==========================================================================*/
-int32_t mm_camera_muxer_start_frame_sync(mm_camera_obj_t *my_obj,
-        uint32_t ch_id, uint32_t stream_id)
-{
-    int32_t rc = 0;
-
-    if(my_obj) {
-        pthread_mutex_lock(&my_obj->cam_lock);
-        pthread_mutex_unlock(&my_obj->muxer_lock);
-        LOGD("ch_id = %d stream_id = %d", ch_id, stream_id);
-        rc = mm_camera_start_frame_sync(my_obj,
-                ch_id, stream_id);
-    } else {
-        pthread_mutex_unlock(&my_obj->muxer_lock);
-    }
-    return rc;
-}
-
-/*===========================================================================
- * FUNCTION   : mm_camera_muxer_stop_frame_sync
- *
- * DESCRIPTION: stop frame buffer sync for the stream
- *
- * PARAMETERS :
- *   @camera_handle: camera handle
- *   @ch_id        : channel handle
- *   @stream_id    : stream handle
- *
- * RETURN     : int32_t type of status
- *              0  -- success
- *              1 -- failure
- *==========================================================================*/
-int32_t mm_camera_muxer_stop_frame_sync(mm_camera_obj_t *my_obj,
-        uint32_t ch_id, uint32_t stream_id)
-{
-    int32_t rc = 0;
-
-    if(my_obj) {
-        pthread_mutex_lock(&my_obj->cam_lock);
-        pthread_mutex_unlock(&my_obj->muxer_lock);
-        LOGD("ch_id = %d stream_id = %d", ch_id, stream_id);
-        rc = mm_camera_stop_frame_sync(my_obj,
-                ch_id, stream_id);
-    } else {
-        pthread_mutex_unlock(&my_obj->muxer_lock);
-    }
-    return rc;
-}
-
-/*===========================================================================
- * FUNCTION   : mm_camera_muxer_switch_stream
- *
- * DESCRIPTION: switch between stream in case of multi streams
- *
- * PARAMETERS :
- *   @camera_handle: camera handle
- *   @ch_id        : channel handle
- *   @stream_id    : stream handle
- *
- * RETURN     : int32_t type of status
- *              0  -- success
- *              1 -- failure
- *==========================================================================*/
-int32_t mm_camera_muxer_switch_stream(uint32_t camera_handle,
-        uint32_t ch_id, uint32_t stream_id,
-        mm_camera_obj_t *cam_obj)
-{
-    int32_t rc = 0;
-    mm_camera_obj_t * my_obj = NULL;
-    my_obj = mm_muxer_util_get_camera_by_obj(camera_handle, cam_obj);
-
-    if(my_obj) {
-        pthread_mutex_lock(&my_obj->cam_lock);
-        pthread_mutex_unlock(&cam_obj->muxer_lock);
-        LOGD("ch_id = %d stream_id = %d", ch_id, stream_id);
-        //TODO
-        //rc = mm_camera_reg_stream_buf_cb(my_obj, ch_id, stream_id,
-        //        buf_cb, cb_type, userdata);
-    } else {
-        pthread_mutex_unlock(&cam_obj->muxer_lock);
-    }
-    return rc;
-}
-
-/*===========================================================================
  * FUNCTION   : mm_camera_muxer_set_dual_cam_cmd
  *
  * DESCRIPTION: send event to trigger read on dual camera cmd buffer
@@ -1801,6 +1698,10 @@
     mm_stream_t *my_obj = (mm_stream_t *)user_data;
     mm_frame_sync_queue_node_t dispatch_buf;
 
+    if ((super_buf == NULL) || (super_buf->num_bufs == 0)) {
+        return;
+    }
+
     if (my_obj->master_str_obj != NULL) {
         my_obj = my_obj->master_str_obj;
     }
@@ -1820,19 +1721,14 @@
             if (dispatch_buf.super_buf[i].num_bufs == 1) {
                 super_buf.bufs[super_buf.num_bufs++] =
                         dispatch_buf.super_buf[i].bufs[0];
-                super_buf.camera_handle |= dispatch_buf.super_buf[i].camera_handle;
-                super_buf.ch_id |= dispatch_buf.super_buf[i].ch_id;
+                super_buf.camera_handle = my_obj->ch_obj->cam_obj->my_hdl;
+                super_buf.ch_id = my_obj->ch_obj->my_hdl;
             }
         }
+        pthread_mutex_lock(&my_obj->cb_lock);
         my_obj->frame_sync.super_buf_notify_cb(&super_buf,
                 my_obj->frame_sync.user_data);
-
-        if (my_obj->frame_sync.num_buf_requested != 0) {
-            my_obj->frame_sync.num_buf_requested--;
-            if (my_obj->frame_sync.num_buf_requested == 0) {
-                my_obj->frame_sync.is_active = 0;
-            }
-        }
+        pthread_mutex_unlock(&my_obj->cb_lock);
     }
 }
 
@@ -1850,13 +1746,11 @@
 void mm_camera_muxer_channel_frame_sync(mm_camera_super_buf_t *super_buf,
         void *user_data)
 {
-    int32_t rc = 0, i = 0;
-    mm_camera_super_buf_notify_mode_t notify_mode;
-    mm_channel_t *m_obj = (mm_channel_t *)user_data;
-    mm_channel_t *s_obj = m_obj;
-    mm_frame_sync_queue_node_t dispatch_buf;
+    int32_t rc = 0;
+    mm_channel_t *ch_obj = (mm_channel_t *)user_data;
+    mm_channel_t *m_obj = ch_obj;
 
-    if ((super_buf == NULL) && (super_buf->num_bufs == 0)) {
+    if ((super_buf == NULL) || (super_buf->num_bufs == 0)) {
         return;
     }
 
@@ -1864,40 +1758,85 @@
         m_obj = m_obj->master_ch_obj;
     }
 
-    notify_mode = m_obj->bundle.superbuf_queue.attr.notify_mode;
-    if (notify_mode == MM_CAMERA_SUPER_BUF_NOTIFY_BURST
-            && (super_buf->ch_id == m_obj->my_hdl)) {
-        mm_camera_req_buf_t req_buf;
-        memset(&req_buf, 0, sizeof(req_buf));
-        req_buf.num_buf_requested = 1;
-        req_buf.frame_idx = super_buf->bufs[0]->frame_idx;
-        s_obj = m_obj->aux_ch_obj[0];
-        pthread_mutex_lock(&s_obj->cam_obj->cam_lock);
-        mm_camera_request_super_buf(s_obj->cam_obj,
-                s_obj->my_hdl, &req_buf);
-    }
-
-    memset(&dispatch_buf, 0, sizeof(dispatch_buf));
     rc = mm_camera_muxer_do_frame_sync(&m_obj->frame_sync.superbuf_queue,
-            super_buf, &dispatch_buf);
+            super_buf, NULL);
+    mm_camera_muxer_channel_req_data_cb(NULL,
+                ch_obj);
+}
 
-    if (m_obj->frame_sync.super_buf_notify_cb && rc == 0 && dispatch_buf.num_objs > 1) {
-        for (i = 0; i < MAX_OBJS_FOR_FRAME_SYNC; i++) {
-            if (dispatch_buf.super_buf[i].num_bufs != 0) {
-                LOGH("Super buffer frameID : %d",
-                        dispatch_buf.super_buf[i].bufs[0]->frame_idx);
-                m_obj->frame_sync.super_buf_notify_cb(&dispatch_buf.super_buf[i],
-                        m_obj->frame_sync.user_data);
-            }
-        }
-        if (m_obj->frame_sync.num_buf_requested > 0) {
-            m_obj->frame_sync.num_buf_requested--;
-            if (m_obj->frame_sync.num_buf_requested == 0) {
-                LOGH("Stop Frame Sync chid = %d", m_obj->my_hdl);
-                m_obj->frame_sync.is_active = 0;
-            }
-        }
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_muxer_channel_req_data_cb
+ *
+ * DESCRIPTION: Issue super buffer callback based on request setting
+ *
+ * PARAMETERS :
+ *   @req_buf: buffer request setting
+ *   @ch_obj        : channel object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+int32_t mm_camera_muxer_channel_req_data_cb(mm_camera_req_buf_t *req_buf,
+        mm_channel_t *ch_obj)
+{
+    int32_t rc = 0, i;
+    mm_channel_t *m_obj = (mm_channel_t *)ch_obj;
+    mm_frame_sync_queue_node_t* super_obj = NULL;
+    mm_frame_sync_t *frame_sync = NULL;
+    uint8_t trigger_cb = 0;
+
+    if (m_obj->master_ch_obj != NULL) {
+        m_obj = m_obj->master_ch_obj;
     }
+
+    frame_sync = &m_obj->frame_sync;
+    if (req_buf != NULL) {
+        frame_sync->req_buf.num_buf_requested +=
+                req_buf->num_buf_requested;
+        frame_sync->req_buf.type = req_buf->type;
+    }
+
+    while ((frame_sync->req_buf.num_buf_requested > 0)
+            || (frame_sync->superbuf_queue.attr.notify_mode ==
+            MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS)) {
+        super_obj = mm_camera_muxer_frame_sync_dequeue(
+                &frame_sync->superbuf_queue, frame_sync->req_buf.type);
+        if (super_obj == NULL) {
+            break;
+        }
+        if (frame_sync->super_buf_notify_cb && super_obj->num_objs != 0) {
+            if (frame_sync->req_buf.type == MM_CAMERA_REQ_FRAME_SYNC_BUF) {
+                for (i = 0; i < MAX_OBJS_FOR_FRAME_SYNC; i++) {
+                    if (super_obj->super_buf[i].num_bufs != 0) {
+                        frame_sync->super_buf_notify_cb(
+                                &super_obj->super_buf[i],
+                                frame_sync->user_data);
+                    }
+                }
+                trigger_cb = 1;
+            } else {
+                for (i = 0; i < MAX_OBJS_FOR_FRAME_SYNC; i++) {
+                    if (super_obj->super_buf[i].num_bufs != 0) {
+                        if (super_obj->super_buf[i].ch_id ==
+                                ch_obj->my_hdl) {
+                            frame_sync->super_buf_notify_cb(
+                                    &super_obj->super_buf[i],
+                                    frame_sync->user_data);
+                            trigger_cb = 1;
+                        } else {
+                            mm_camera_muxer_buf_done(&super_obj->super_buf[i]);
+                        }
+                    }
+                }
+            }
+            if ((m_obj->frame_sync.req_buf.num_buf_requested > 0)
+                    && trigger_cb) {
+                m_obj->frame_sync.req_buf.num_buf_requested--;
+            }
+        }
+        free(super_obj);
+    }
+    return rc;
 }
 
 /*===========================================================================
@@ -1907,16 +1846,12 @@
  *
  * PARAMETERS :
  *   @queue: ptr to queue to dequeue object
- *   @dispatch_buf        : Ptr to carry dequeued node
  *
- * RETURN     : int32_t type of status
- *              0  -- success
- *              1 -- failure
+ * RETURN     : ptr to a node from superbuf queue
  *==========================================================================*/
-int32_t mm_camera_muxer_frame_sync_dequeue(
-        mm_frame_sync_queue_t *queue, mm_frame_sync_queue_node_t *dispatch_buf)
+mm_frame_sync_queue_node_t *mm_camera_muxer_frame_sync_dequeue(
+        mm_frame_sync_queue_t *queue, uint8_t matched_only)
 {
-    int32_t rc = 0;
     cam_node_t* node = NULL;
     struct cam_list *head = NULL;
     struct cam_list *pos = NULL;
@@ -1929,16 +1864,23 @@
         /* get the first node */
         node = member_of(pos, cam_node_t, list);
         super_buf = (mm_frame_sync_queue_node_t*)node->data;
+        if ( (NULL != super_buf) &&
+             (matched_only == TRUE) &&
+             (super_buf->matched == FALSE) ) {
+            super_buf = NULL;
+        }
+
         if (NULL != super_buf) {
-            *dispatch_buf = *super_buf;
             queue->que.size--;
             cam_list_del_node(&node->list);
             free(node);
-            free(super_buf);
+            if (super_buf->matched) {
+                queue->match_cnt--;
+            }
         }
     }
     pthread_mutex_unlock(&queue->que.lock);
-    return rc;
+    return super_buf;
 }
 
 /*===========================================================================
@@ -1960,10 +1902,10 @@
         mm_frame_sync_queue_node_t *dispatch_buf)
 {
     cam_node_t* node = NULL;
-    uint8_t buf_s_idx, found_super_buf, unmatched_bundles;
+    uint8_t buf_s_idx, i, found_super_buf, unmatched_bundles;
     struct cam_list *head = NULL;
     struct cam_list *pos = NULL;
-    mm_frame_sync_queue_node_t* super_buf = NULL;
+    mm_frame_sync_queue_node_t* super_obj = NULL;
     struct cam_list *last_buf = NULL, *insert_before_buf = NULL;
 
     if (buffer == NULL || buffer->num_bufs == 0) {
@@ -1999,25 +1941,29 @@
 
     while (pos != head) {
         node = member_of(pos, cam_node_t, list);
-        super_buf = (mm_frame_sync_queue_node_t *)node->data;
-
-        if (NULL != super_buf) {
-            if (buffer->bufs[0]->frame_idx == super_buf->frame_idx) {
+        super_obj = (mm_frame_sync_queue_node_t *)node->data;
+        if (NULL != super_obj) {
+            if (super_obj->matched == 1) {
+                /* find a matched super buf, move to next one */
+                pos = pos->next;
+                continue;
+            } else if (buffer->bufs[0]->frame_idx == super_obj->frame_idx) {
                 found_super_buf = 1;
                 break;
-            } else if ((buffer->bufs[0]->frame_idx >= super_buf->frame_idx)
-                    && (queue->priority == MM_CAMERA_SUPER_BUF_PRIORITY_LOW)) {
+            } else if ((buffer->bufs[0]->frame_idx >= super_obj->frame_idx)
+                    && (queue->attr.priority ==
+                    MM_CAMERA_SUPER_BUF_PRIORITY_LOW)) {
                 found_super_buf = 1;
                 break;
             } else {
                 unmatched_bundles++;
                 if ( NULL == last_buf ) {
-                    if ( super_buf->frame_idx < buffer->bufs[0]->frame_idx) {
+                    if ( super_obj->frame_idx < buffer->bufs[0]->frame_idx) {
                         last_buf = pos;
                     }
                 }
                 if ( NULL == insert_before_buf ) {
-                    if ( super_buf->frame_idx > buffer->bufs[0]->frame_idx) {
+                    if ( super_obj->frame_idx > buffer->bufs[0]->frame_idx) {
                         insert_before_buf = pos;
                     }
                 }
@@ -2028,61 +1974,110 @@
 
     LOGD("found_super_buf = %d id = %d unmatched = %d max = %d", found_super_buf,
             buffer->bufs[0]->frame_idx, unmatched_bundles,
-            queue->max_unmatched_frames);
-    if ( found_super_buf ) {
-        super_buf->super_buf[buf_s_idx] = *buffer;
-        super_buf->num_objs++;
-        if (super_buf->num_objs == queue->num_objs) {
-            super_buf->matched = 1;
-            *dispatch_buf = *super_buf;
-            queue->que.size--;
-            cam_list_del_node(&node->list);
-            free(node);
-            free(super_buf);
+            queue->attr.max_unmatched_frames);
+    if (found_super_buf) {
+        super_obj->super_buf[buf_s_idx] = *buffer;
+        super_obj->num_objs++;
+        if (super_obj->num_objs == queue->num_objs) {
+            super_obj->matched = 1;
+            queue->expected_frame_id = super_obj->frame_idx;
+            if (dispatch_buf != NULL) {
+                *dispatch_buf = *super_obj;
+                queue->que.size--;
+                cam_list_del_node(&node->list);
+                free(node);
+                free(super_obj);
+            } else {
+                queue->match_cnt++;
+            }
+        }
+        /* Any older unmatched buffer need to be released */
+        if ( last_buf ) {
+            while (last_buf != pos ) {
+                node = member_of(last_buf, cam_node_t, list);
+                super_obj = (mm_frame_sync_queue_node_t*)node->data;
+                if (NULL != super_obj) {
+                    for (i = 0; i < MAX_OBJS_FOR_FRAME_SYNC; i++) {
+                        if (super_obj->super_buf[i].num_bufs != 0) {
+                            mm_camera_muxer_buf_done(&super_obj->super_buf[i]);
+                        }
+                    }
+                    queue->que.size--;
+                    last_buf = last_buf->next;
+                    cam_list_del_node(&node->list);
+                    free(node);
+                    free(super_obj);
+                }
+            }
         }
     } else {
-        if ((queue->max_unmatched_frames < unmatched_bundles)
+        if ((queue->attr.max_unmatched_frames < unmatched_bundles)
                 && (NULL == last_buf)) {
             //incoming frame is older than the last bundled one
             mm_camera_muxer_buf_done(buffer);
-        } else if (queue->max_unmatched_frames < unmatched_bundles) {
+            pthread_mutex_unlock(&queue->que.lock);
+            return 0;
+        } else if (queue->attr.max_unmatched_frames < unmatched_bundles) {
             //dispatch old buffer. Cannot sync for configured unmatch value
             node = member_of(last_buf, cam_node_t, list);
-            super_buf = (mm_frame_sync_queue_node_t*)node->data;
-            *dispatch_buf = *super_buf;
+            super_obj = (mm_frame_sync_queue_node_t*)node->data;
+            queue->expected_frame_id = super_obj->frame_idx;
+            if (dispatch_buf != NULL && super_obj != NULL) {
+                //Dispatch unmatched buffer
+                *dispatch_buf = *super_obj;
+            } else if (super_obj != NULL){
+                //release unmatched buffers
+                for (i = 0; i < MAX_OBJS_FOR_FRAME_SYNC; i++) {
+                    if (super_obj->super_buf[i].num_bufs != 0) {
+                        mm_camera_muxer_buf_done(&super_obj->super_buf[i]);
+                    }
+                }
+            }
             queue->que.size--;
             cam_list_del_node(&node->list);
             free(node);
-            free(super_buf);
+            free(super_obj);
         }
 
         //insert the new frame at the appropriate position.
         mm_frame_sync_queue_node_t *new_buf = NULL;
         cam_node_t* new_node = NULL;
-
-        new_buf = (mm_frame_sync_queue_node_t *)malloc(sizeof(mm_frame_sync_queue_node_t));
+        new_buf = (mm_frame_sync_queue_node_t *)
+                malloc(sizeof(mm_frame_sync_queue_node_t));
         if (NULL != new_buf) {
-            memset(new_buf, 0, sizeof(mm_channel_queue_node_t));
+            memset(new_buf, 0, sizeof(mm_frame_sync_queue_node_t));
             new_buf->super_buf[buf_s_idx] = *buffer;
             new_buf->num_objs++;
             new_buf->frame_idx = buffer->bufs[0]->frame_idx;
+            new_buf->matched = 0;
             if (new_buf->num_objs == queue->num_objs) {
                 new_buf->matched = 1;
-                *dispatch_buf = *new_buf;
-                queue->que.size--;
-                free(new_buf);
-                free(new_node);
+                queue->expected_frame_id = super_obj->frame_idx;
+                if (dispatch_buf != NULL) {
+                    *dispatch_buf = *new_buf;
+                    queue->que.size--;
+                    free(new_buf);
+                    free(new_node);
+                } else {
+                    queue->match_cnt++;
+                }
             } else {
                 /* enqueue */
                 new_node = (cam_node_t *)malloc(sizeof(cam_node_t));
-                memset(new_node, 0, sizeof(cam_node_t));
-                new_node->data = (void *)new_buf;
-                if ( insert_before_buf ) {
-                    cam_list_insert_before_node(&new_node->list, insert_before_buf);
+                if (new_node != NULL) {
+                    memset(new_node, 0, sizeof(cam_node_t));
+                    new_node->data = (void *)new_buf;
+                    if ( insert_before_buf ) {
+                        cam_list_insert_before_node(&new_node->list, insert_before_buf);
+                    } else {
+                        cam_list_add_tail_node(&new_node->list, &queue->que.head.list);
+                    }
+                    queue->que.size++;
                 } else {
-                    cam_list_add_tail_node(&new_node->list, &queue->que.head.list);
+                    LOGE("Out of memory");
+                    free(new_buf);
+                    mm_camera_muxer_buf_done(buffer);
                 }
-                queue->que.size++;
             }
         } else {
             if (NULL != new_buf) {
@@ -2091,11 +2086,20 @@
             mm_camera_muxer_buf_done(buffer);
         }
     }
-
-    if (dispatch_buf != NULL && dispatch_buf->num_objs != 0) {
-       queue->expected_frame_id = queue->expected_frame_id;
-    }
     pthread_mutex_unlock(&queue->que.lock);
+
+    /* bufdone overflowed bufs */
+    while (queue->match_cnt > queue->attr.water_mark) {
+        super_obj = mm_camera_muxer_frame_sync_dequeue(queue, FALSE);
+        if (NULL != super_obj) {
+            for (i = 0; i < MAX_OBJS_FOR_FRAME_SYNC; i++) {
+                if (super_obj->super_buf[i].num_bufs != 0) {
+                    mm_camera_muxer_buf_done(&super_obj->super_buf[i]);
+                }
+            }
+            free(super_obj);
+        }
+    }
     return 0;
 }
 
@@ -2122,9 +2126,12 @@
     }
 
     my_obj = mm_camera_util_get_camera_by_handler(buffer->camera_handle);
-    for (i=0; i < buffer->num_bufs; i++) {
-        if (buffer->bufs[i] != NULL) {
-            mm_camera_qbuf(my_obj, buffer->ch_id, buffer->bufs[i]);
+    if (my_obj != NULL) {
+        for (i=0; i < buffer->num_bufs; i++) {
+            if (buffer->bufs[i] != NULL) {
+                pthread_mutex_lock(&my_obj->cam_lock);
+                mm_camera_qbuf(my_obj, buffer->ch_id, buffer->bufs[i]);
+            }
         }
     }
 }
@@ -2145,6 +2152,7 @@
 {
     int32_t rc = 0;
     queue->expected_frame_id = 0;
+    queue->match_cnt = 0;
     queue->num_objs = 0;
     memset(&queue->bundled_objs, 0, sizeof(queue->bundled_objs));
     rc = cam_queue_init(&queue->que);
@@ -2171,6 +2179,37 @@
 }
 
 /*===========================================================================
+ * FUNCTION   : mm_camera_muxer_frame_sync_flush
+ *
+ * DESCRIPTION: function to flush frame sync queue
+ *
+ * PARAMETERS :
+ *   @queue: ptr to frame sync queue
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_muxer_frame_sync_flush(mm_frame_sync_queue_t *queue)
+{
+    int32_t rc = 0, i = 0;
+    mm_frame_sync_queue_node_t *super_obj = NULL;
+
+    super_obj = mm_camera_muxer_frame_sync_dequeue(queue, FALSE);
+    while (super_obj != NULL) {
+        for (i = 0; i < MAX_OBJS_FOR_FRAME_SYNC; i++) {
+            if (super_obj->super_buf[i].num_bufs != 0) {
+                mm_camera_muxer_buf_done(&super_obj->super_buf[i]);
+            }
+        }
+        free(super_obj);
+        super_obj = NULL;
+        super_obj = mm_camera_muxer_frame_sync_dequeue(queue, FALSE);
+    }
+    return rc;
+}
+
+/*===========================================================================
  * FUNCTION   : mm_camera_muxer_stream_frame_sync_flush
  *
  * DESCRIPTION: function to flush frame sync queue
@@ -2184,31 +2223,14 @@
  *==========================================================================*/
 int32_t mm_camera_muxer_stream_frame_sync_flush(mm_stream_t *str_obj)
 {
-    int32_t rc = 0, i = 0;
+    int32_t rc = 0;
     mm_stream_t *my_obj = str_obj;
-    mm_frame_sync_queue_node_t super_buf;
-    memset(&super_buf, 0, sizeof(super_buf));
 
     if (my_obj->master_str_obj) {
         my_obj = my_obj->master_str_obj;
     }
 
-    rc = mm_camera_muxer_frame_sync_dequeue(&my_obj->frame_sync.superbuf_queue, &super_buf);
-    while (super_buf.num_objs != 0 && rc != 0) {
-        for (i = 0; i < MAX_OBJS_FOR_FRAME_SYNC; i++) {
-            mm_camera_super_buf_t dispatch_buf;
-            memset(&dispatch_buf, 0, sizeof(dispatch_buf));
-            if (super_buf.super_buf[i].num_bufs == 1) {
-                dispatch_buf.bufs[dispatch_buf.num_bufs++] =
-                        super_buf.super_buf[i].bufs[0];
-                dispatch_buf.camera_handle |= super_buf.super_buf[i].camera_handle;
-                dispatch_buf.ch_id |= super_buf.super_buf[i].ch_id;
-                my_obj->frame_sync.super_buf_notify_cb(&dispatch_buf,
-                        my_obj->frame_sync.user_data);
-            }
-        }
-        rc = mm_camera_muxer_frame_sync_dequeue(&my_obj->frame_sync.superbuf_queue, &super_buf);
-    }
+    rc = mm_camera_muxer_frame_sync_flush(&my_obj->frame_sync.superbuf_queue);
     return rc;
 }
 
@@ -2224,26 +2246,16 @@
  *              0  -- success
  *              1 -- failure
  *==========================================================================*/
-int32_t mm_camera_muxer_channel_frame_sync_flush(mm_channel_t *my_obj)
+int32_t mm_camera_muxer_channel_frame_sync_flush(mm_channel_t *ch_obj)
 {
-    int32_t rc = 0, i = 0;
-    mm_frame_sync_queue_node_t super_buf;
-    memset(&super_buf, 0, sizeof(super_buf));
+    int32_t rc = 0;
+    mm_channel_t *my_obj = ch_obj;
 
-    if (my_obj->master_ch_obj) {
-        my_obj = my_obj->master_ch_obj;
+    if (ch_obj->master_ch_obj != NULL) {
+        my_obj = ch_obj->master_ch_obj;
     }
 
-    rc = mm_camera_muxer_frame_sync_dequeue(&my_obj->frame_sync.superbuf_queue, &super_buf);
-    while (super_buf.num_objs != 0 && rc != 0) {
-        for (i = 0; i < MAX_OBJS_FOR_FRAME_SYNC; i++) {
-            if (super_buf.super_buf[i].num_bufs == 1) {
-                mm_camera_muxer_buf_done(&super_buf.super_buf[i]);
-            }
-        }
-        rc = mm_camera_muxer_frame_sync_dequeue(&my_obj->frame_sync.superbuf_queue, &super_buf);
-    }
-
+    rc = mm_camera_muxer_frame_sync_flush(&my_obj->frame_sync.superbuf_queue);
     return rc;
 }
 
diff --git a/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c b/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
index 26a2fbb..200946a 100644
--- a/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
+++ b/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
@@ -142,11 +142,10 @@
 uint32_t mm_stream_get_v4l2_fmt(cam_format_t fmt);
 int32_t mm_stream_reg_frame_sync(mm_stream_t *my_obj,
         mm_evt_paylod_reg_frame_sync *sync);
-int32_t mm_stream_trigger_frame_sync(mm_stream_t *my_obj, uint8_t start_sync);
-int32_t mm_stream_switch_stream_callback(mm_stream_t *my_obj);
 int32_t mm_stream_handle_cache_ops(mm_stream_t* my_obj,
         mm_camera_buf_def_t* buf, bool deque);
-
+int32_t mm_stream_trigger_frame_sync(mm_stream_t *my_obj,
+        mm_camera_cb_req_type type);
 
 /*===========================================================================
  * FUNCTION   : mm_stream_notify_channel
@@ -270,19 +269,25 @@
          mm_stream_data_cb_t *buf_cb, mm_camera_buf_info_t *buf_info)
 {
     mm_camera_super_buf_t super_buf;
+    mm_stream_t *m_obj = my_obj;
 
     if (NULL == my_obj || buf_info == NULL ||
             buf_cb == NULL) {
         return;
     }
 
+    if (m_obj->master_str_obj != NULL) {
+        m_obj = m_obj->master_str_obj;
+    }
+
     memset(&super_buf, 0, sizeof(mm_camera_super_buf_t));
     super_buf.num_bufs = 1;
     super_buf.bufs[0] = buf_info->buf;
     super_buf.camera_handle = my_obj->ch_obj->cam_obj->my_hdl;
     super_buf.ch_id = my_obj->ch_obj->my_hdl;
     if ((buf_cb != NULL) && (buf_cb->cb_type == MM_CAMERA_STREAM_CB_TYPE_SYNC)
-            && (buf_cb->cb_count != 0)) {
+            && (buf_cb->cb_count != 0)
+            && my_obj->is_cb_active) {
         /* callback */
         buf_cb->cb(&super_buf, buf_cb->user_data);
 
@@ -665,8 +670,9 @@
         break;
     case MM_STREAM_EVT_TRIGGER_FRAME_SYNC:
         {
-            uint8_t trigger = *((uint8_t *)in_val);
-            rc = mm_stream_trigger_frame_sync(my_obj, trigger);
+            mm_camera_cb_req_type type =
+                    *((mm_camera_cb_req_type *)in_val);
+            rc = mm_stream_trigger_frame_sync(my_obj, type);
         }
         break;
     default:
@@ -741,8 +747,9 @@
         break;
     case MM_STREAM_EVT_TRIGGER_FRAME_SYNC:
         {
-            uint8_t trigger = *((uint8_t *)in_val);
-            rc = mm_stream_trigger_frame_sync(my_obj, trigger);
+            mm_camera_cb_req_type type =
+                    *((mm_camera_cb_req_type *)in_val);
+            rc = mm_stream_trigger_frame_sync(my_obj, type);
         }
         break;
     default:
@@ -981,13 +988,9 @@
         break;
     case MM_STREAM_EVT_TRIGGER_FRAME_SYNC:
         {
-            uint8_t trigger = *((uint8_t *)in_val);
-            rc = mm_stream_trigger_frame_sync(my_obj, trigger);
-        }
-        break;
-    case MM_STREAM_EVT_SWITCH_STREAM_CB:
-        {
-            rc = mm_stream_switch_stream_callback(my_obj);
+            mm_camera_cb_req_type type =
+                    *((mm_camera_cb_req_type *)in_val);
+            rc = mm_stream_trigger_frame_sync(my_obj, type);
         }
         break;
     default:
@@ -1147,8 +1150,7 @@
     queue->bundled_objs[queue->num_objs] = sync->a_str_obj->my_hdl;
     queue->num_objs++;
     queue->expected_frame_id = 0;
-    queue->max_unmatched_frames = sync->sync_attr->max_unmatched_frames;
-    queue->priority = sync->sync_attr->priority;
+    queue->attr = sync->sync_attr->attr;
 
     sync->a_str_obj->is_res_shared = sync->sync_attr->is_res_shared;
     my_obj->aux_str_obj[my_obj->num_s_cnt++] = sync->a_str_obj;
@@ -1164,72 +1166,65 @@
  *
  * PARAMETERS :
  *   @my_obj  : stream object
- *   @start_sync  : flag to start/stop frame sync.
+ *   @type  : flag to start/stop frame sync.
  *
  * RETURN     : uint32_t type of stream handle
  *              0  -- invalid stream handle, meaning the op failed
  *              >0 -- successfully added a stream with a valid handle
  *==========================================================================*/
-int32_t mm_stream_trigger_frame_sync(mm_stream_t *my_obj, uint8_t start_sync)
+int32_t mm_stream_trigger_frame_sync(mm_stream_t *my_obj,
+        mm_camera_cb_req_type type)
 {
     int32_t rc = 0;
     mm_stream_t *m_obj = my_obj;
+    mm_stream_t *s_obj = NULL;
     mm_frame_sync_t *frame_sync = NULL;
 
     if (m_obj->master_str_obj != NULL) {
         m_obj = m_obj->master_str_obj;
     }
-    frame_sync = &m_obj->frame_sync;
-    pthread_mutex_lock(&frame_sync->sync_lock);
-    if (start_sync == 0 && frame_sync->is_active) {
-        mm_camera_muxer_stream_frame_sync_flush(m_obj);
-    }
-    frame_sync->is_active = start_sync;
-    pthread_mutex_unlock(&frame_sync->sync_lock);
-    return rc;
-}
-
-/*===========================================================================
- * FUNCTION   : mm_stream_switch_stream_callback
- *
- * DESCRIPTION: switch stream callbacks
- *
- * PARAMETERS :
- *   @my_obj  : stream object
- *
- * RETURN     : uint32_t type of stream handle
- *              0  -- invalid stream handle, meaning the op failed
- *              >0 -- successfully added a stream with a valid handle
- *==========================================================================*/
-int32_t mm_stream_switch_stream_callback(mm_stream_t *my_obj)
-{
-    int32_t rc = 0;
-    mm_stream_t *m_obj = my_obj;
-    mm_stream_t *s_obj = NULL;
-
-    if (my_obj->master_str_obj != NULL) {
-        m_obj = my_obj->master_str_obj;
-    }
-    if (m_obj->num_s_cnt == 0) {
-        LOGE("No slave stream to switch");
-        return -1;
-    }
     s_obj = m_obj->aux_str_obj[0];
 
-    pthread_mutex_lock(&m_obj->frame_sync.sync_lock);
-    if (m_obj->frame_sync.is_active) {
-        mm_camera_muxer_stream_frame_sync_flush(m_obj);
+    frame_sync = &m_obj->frame_sync;
+    pthread_mutex_lock(&frame_sync->sync_lock);
+    switch (type) {
+        case MM_CAMERA_CB_REQ_TYPE_SWITCH:
+            if (m_obj->frame_sync.is_active) {
+                mm_camera_muxer_stream_frame_sync_flush(m_obj);
+            }
+            m_obj->frame_sync.is_active = 0;
+
+            pthread_mutex_lock(&s_obj->cb_lock);
+            s_obj->is_cb_active = !s_obj->is_cb_active;
+            pthread_mutex_unlock(&s_obj->cb_lock);
+
+            pthread_mutex_lock(&m_obj->cb_lock);
+            m_obj->is_cb_active = !m_obj->is_cb_active;
+            if (s_obj->is_cb_active == 0
+                    && m_obj->is_cb_active == 0) {
+                m_obj->is_cb_active = 1;
+            }
+            pthread_mutex_unlock(&m_obj->cb_lock);
+        break;
+
+        case MM_CAMERA_CB_REQ_TYPE_FRAME_SYNC:
+            m_obj->frame_sync.is_active = 1;
+        break;
+
+        case MM_CAMERA_CB_REQ_TYPE_ALL_CB:
+            pthread_mutex_lock(&m_obj->cb_lock);
+            m_obj->is_cb_active = 1;
+            pthread_mutex_unlock(&m_obj->cb_lock);
+
+            pthread_mutex_lock(&s_obj->cb_lock);
+            s_obj->is_cb_active = 1;
+            pthread_mutex_unlock(&s_obj->cb_lock);
+        break;
+        default:
+            //no-op
+            break;
     }
-    m_obj->frame_sync.is_active = 0;
-    pthread_mutex_lock(&m_obj->cb_lock);
-    m_obj->is_cb_active = !m_obj->is_cb_active;
-    pthread_mutex_unlock(&m_obj->cb_lock);
-
-    pthread_mutex_lock(&s_obj->cb_lock);
-    s_obj->is_cb_active = !s_obj->is_cb_active;
-    pthread_mutex_unlock(&s_obj->cb_lock);
-
-    pthread_mutex_unlock(&m_obj->frame_sync.sync_lock);
+    pthread_mutex_unlock(&frame_sync->sync_lock);
     return rc;
 }
 
diff --git a/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_main_menu.c b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_main_menu.c
index a310b40..d5286bb 100644
--- a/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_main_menu.c
+++ b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_main_menu.c
@@ -1040,7 +1040,7 @@
     saturation = CAMERA_MAX_SATURATION;
     printf("Reached max saturation. \n");
   }
-  printf("Increase saturation to %d\n", contrast);
+  printf("Increase saturation to %d\n", saturation);
   return mm_camera_lib_send_command(lib_handle,
                                        MM_CAMERA_LIB_SATURATION,
                                        &saturation,
@@ -1067,7 +1067,7 @@
     saturation = CAMERA_MIN_SATURATION;
     printf("Reached min saturation. \n");
   }
-  printf("decrease saturation to %d\n", contrast);
+  printf("decrease saturation to %d\n", saturation);
   return mm_camera_lib_send_command(lib_handle,
                                        MM_CAMERA_LIB_SATURATION,
                                        &saturation,
@@ -1635,7 +1635,7 @@
     uint8_t previewing = 0;
     int isZSL = 0;
     int isezTune = 0;
-    int isirmode = 0;
+    int curr_irmode = 0;
     int isshdrmode = 0;
     uint8_t wnr_enabled = 0;
     mm_camera_lib_handle lib_handle;
@@ -1824,16 +1824,13 @@
             case ACTION_TOGGLE_IR_MODE:
                 LOGE("Select for IR Mode");
                 printf("IR Mode Toggle\n");
-                isirmode = !isirmode;
-                if (isirmode) {
-                    printf("IR Mode On !!!");
-                } else {
-                    printf("IR Mode Off !!!");
-                }
+                curr_irmode ++;
+                curr_irmode %= CAM_IR_MODE_MAX;
+                printf("IR Mode %s !!!",curr_irmode==0?"Off":(curr_irmode==1?"On":"Auto"));
 
                 rc = mm_camera_lib_send_command(&lib_handle,
                                       MM_CAMERA_LIB_IRMODE,
-                                      &isirmode,
+                                      &curr_irmode,
                                       NULL);
                 if (rc != MM_CAMERA_OK) {
                     LOGE("mm_camera_lib_send_command() err=%d\n",  rc);
diff --git a/msmcobalt/QCamera2/stack/mm-jpeg-interface/Android.mk b/msmcobalt/QCamera2/stack/mm-jpeg-interface/Android.mk
index 175796b..5916b26 100644
--- a/msmcobalt/QCamera2/stack/mm-jpeg-interface/Android.mk
+++ b/msmcobalt/QCamera2/stack/mm-jpeg-interface/Android.mk
@@ -49,6 +49,7 @@
 JPEG_PIPELINE_TARGET_LIST := msm8994
 JPEG_PIPELINE_TARGET_LIST += msm8992
 JPEG_PIPELINE_TARGET_LIST += msm8996
+JPEG_PIPELINE_TARGET_LIST += msm8998
 JPEG_PIPELINE_TARGET_LIST += msmcobalt
 
 ifneq (,$(filter  $(JPEG_PIPELINE_TARGET_LIST),$(TARGET_BOARD_PLATFORM)))
diff --git a/msmcobalt/QCamera2/util/QCameraFOVControl.cpp b/msmcobalt/QCamera2/util/QCameraFOVControl.cpp
index a7c739d..19e3500 100644
--- a/msmcobalt/QCamera2/util/QCameraFOVControl.cpp
+++ b/msmcobalt/QCamera2/util/QCameraFOVControl.cpp
@@ -55,14 +55,21 @@
     memset(&mFovControlData,   0, sizeof(fov_control_data_t));
     memset(&mFovControlResult, 0, sizeof(fov_control_result_t));
 
-    mFovControlData.spatialAlign.status       = NOT_READY;
-    mFovControlData.status3A.camMain.statusAF = AF_INVALID;
-    mFovControlData.status3A.camAux.statusAF  = AF_INVALID;
+    mFovControlData.camcorderMode             = false;
+    mFovControlData.status3A.main.af.status   = AF_INVALID;
+    mFovControlData.status3A.aux.af.status    = AF_INVALID;
 
     mFovControlResult.camMasterPreview  = CAM_TYPE_MAIN;
     mFovControlResult.camMaster3A       = CAM_TYPE_MAIN;
-    mFovControlResult.camState          = (uint32_t)CAM_TYPE_MAIN;
-    mFovControlResult.snapshotFusion    = false;
+    mFovControlResult.activeCamState    = (uint32_t)CAM_TYPE_MAIN;
+    mFovControlResult.snapshotPostProcess = false;
+
+    mFovControlData.spatialAlignResult.status           = 0;
+    mFovControlData.spatialAlignResult.camMasterPreview = CAM_ROLE_WIDE;
+    mFovControlData.spatialAlignResult.camMaster3A      = CAM_ROLE_WIDE;
+    mFovControlData.spatialAlignResult.activeCamState   = (uint32_t)CAM_TYPE_MAIN;
+    mFovControlData.spatialAlignResult.shiftHorz        = 0;
+    mFovControlData.spatialAlignResult.shiftVert        = 0;
 }
 
 
@@ -96,8 +103,9 @@
  *              NULL if fails
  *
  *==========================================================================*/
-QCameraFOVControl* QCameraFOVControl::create(cam_capability_t *capsMainCam,
-                                             cam_capability_t* capsAuxCam)
+QCameraFOVControl* QCameraFOVControl::create(
+        cam_capability_t *capsMainCam,
+        cam_capability_t *capsAuxCam)
 {
     QCameraFOVControl *pFovControl  = NULL;
 
@@ -106,46 +114,8 @@
         pFovControl = new QCameraFOVControl();
 
         if (pFovControl) {
-            float fovAdjustBasic;
-            float fovAdjustFromDisparity;
-            float fovAdjustFromRollPitchYaw;
-            float zoomTranslationFactor;
-
-            // TODO : Replace the hardcoded values for mFovControlConfig and mDualCamParams below
-            // with the ones extracted from capabilities when available in eeprom.
-            pFovControl->mFovControlConfig.percentMarginHysterisis  = 5;
-            pFovControl->mFovControlConfig.percentMarginMain        = 10;
-            pFovControl->mFovControlConfig.percentMarginAux         = 15;
-            pFovControl->mFovControlConfig.waitTimeForHandoffMs     = 1000;
-
-            pFovControl->mDualCamParams.paramsMain.sensorStreamWidth  = 4208;
-            pFovControl->mDualCamParams.paramsMain.sensorStreamHeight = 3120;
-            pFovControl->mDualCamParams.paramsMain.pixelPitchUm       = 1.12;
-            pFovControl->mDualCamParams.paramsMain.focalLengthMm      = 3.5;
-            pFovControl->mDualCamParams.paramsAux.sensorStreamWidth   = 4208;
-            pFovControl->mDualCamParams.paramsAux.sensorStreamHeight  = 3120;
-            pFovControl->mDualCamParams.paramsAux.pixelPitchUm        = 1.12;
-            pFovControl->mDualCamParams.paramsAux.focalLengthMm       = 7;
-            pFovControl->mDualCamParams.baselineMm                    = 9.5;
-            pFovControl->mDualCamParams.minFocusDistanceCm            = 30;
-            pFovControl->mDualCamParams.rollDegrees                   = 1.0;
-            pFovControl->mDualCamParams.pitchDegrees                  = 1.0;
-            pFovControl->mDualCamParams.yawDegrees                    = 1.0;
-            pFovControl->mDualCamParams.positionAux                   = CAM_POSITION_LEFT;
-
-            fovAdjustBasic            = pFovControl->calculateBasicFovRatio();
-            fovAdjustFromDisparity    = pFovControl->calculateFovAdjustmentWorstCaseDisparity();
-            fovAdjustFromRollPitchYaw = pFovControl->calculateFovAdjustmentRollPitchYaw();
-
-            if (fovAdjustBasic            >= 1.0 &&
-                fovAdjustFromDisparity    >= 1.0 &&
-                fovAdjustFromRollPitchYaw >= 1.0) {
-                zoomTranslationFactor = pFovControl->combineFovAdjustment(fovAdjustBasic,
-                                            fovAdjustFromDisparity, fovAdjustFromRollPitchYaw);
-
-                pFovControl->calculateDualCamTransitionParams(fovAdjustBasic,
-                                                                zoomTranslationFactor);
-
+            bool  success = false;
+            if (pFovControl->validateAndExtractParameters(capsMainCam, capsAuxCam)) {
                 if (pFovControl->mDualCamParams.paramsMain.focalLengthMm <
                     pFovControl->mDualCamParams.paramsAux.focalLengthMm) {
                     pFovControl->mFovControlData.camWide  = CAM_TYPE_MAIN;
@@ -156,10 +126,10 @@
                     pFovControl->mFovControlData.camTele  = CAM_TYPE_MAIN;
                     pFovControl->mFovControlData.camState = STATE_TELE;
                 }
+                success = true;
+            }
 
-                pFovControl->mFovControlData.status3A.camAux.statusAF  = AF_VALID;
-                pFovControl->mFovControlData.status3A.camMain.statusAF = AF_VALID;
-            } else {
+            if (!success) {
                 LOGE("FOV-control: Failed to create an object");
                 delete pFovControl;
                 pFovControl = NULL;
@@ -177,7 +147,7 @@
  * FUNCTION    : consolidateCapabilities
  *
  * DESCRIPTION : Combine the capabilities from main and aux cameras to return
- *              the consolidated capabilities.
+ *               the consolidated capabilities.
  *
  * PARAMETERS  :
  * @capsMainCam: Capabilities for the main camera
@@ -187,8 +157,8 @@
  *
  *==========================================================================*/
 cam_capability_t QCameraFOVControl::consolidateCapabilities(
-                                        cam_capability_t *capsMainCam,
-                                        cam_capability_t *capsAuxCam)
+        cam_capability_t *capsMainCam,
+        cam_capability_t *capsAuxCam)
 {
     cam_capability_t capsConsolidated;
     memcpy(&capsConsolidated, capsMainCam, sizeof(cam_capability_t));
@@ -346,6 +316,89 @@
 
 
 /*===========================================================================
+ * FUNCTION    : updateConfigSettings
+ *
+ * DESCRIPTION : Update the config settings such as margins and preview size
+ *               and recalculate the transition parameters.
+ *
+ * PARAMETERS  :
+ * @capsMainCam: Capabilities for the main camera
+ * @capsAuxCam : Capabilities for the aux camera
+ *
+ * RETURN :
+ * NO_ERROR           : Success
+ * INVALID_OPERATION  : Failure
+ *
+ *==========================================================================*/
+int32_t QCameraFOVControl::updateConfigSettings(
+        parm_buffer_t* paramsMainCam,
+        parm_buffer_t* paramsAuxCam)
+{
+    int32_t rc = INVALID_OPERATION;
+
+    if (paramsMainCam &&
+        paramsAuxCam  &&
+        paramsMainCam->is_valid[CAM_INTF_META_STREAM_INFO] &&
+        paramsAuxCam->is_valid[CAM_INTF_META_STREAM_INFO]) {
+
+        cam_stream_size_info_t camMainStreamInfo;
+        READ_PARAM_ENTRY(paramsMainCam, CAM_INTF_META_STREAM_INFO, camMainStreamInfo);
+        mFovControlData.camcorderMode = false;
+        for (int i = 0; i < MAX_NUM_STREAMS; ++i) {
+            if (camMainStreamInfo.type[i] == CAM_STREAM_TYPE_VIDEO) {
+                mFovControlData.camcorderMode = true;
+            }
+        }
+
+        for (int i = 0; i < MAX_NUM_STREAMS; ++i) {
+            if (camMainStreamInfo.type[i] == CAM_STREAM_TYPE_VIDEO) {
+                mFovControlData.camMainWidthMargin  = camMainStreamInfo.margins[i].widthMargins;
+                mFovControlData.camMainHeightMargin = camMainStreamInfo.margins[i].heightMargins;
+            }
+            if (camMainStreamInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
+                // Update the preview dimension
+                mFovControlData.previewSize = camMainStreamInfo.stream_sizes[i];
+                if (!mFovControlData.camcorderMode) {
+                    mFovControlData.camMainWidthMargin  =
+                            camMainStreamInfo.margins[i].widthMargins;
+                    mFovControlData.camMainHeightMargin =
+                            camMainStreamInfo.margins[i].heightMargins;
+                    break;
+                }
+            }
+        }
+
+        cam_stream_size_info_t camAuxStreamInfo;
+        READ_PARAM_ENTRY(paramsAuxCam, CAM_INTF_META_STREAM_INFO, camAuxStreamInfo);
+
+        for (int i = 0; i < MAX_NUM_STREAMS; ++i) {
+            if (camAuxStreamInfo.type[i] == CAM_STREAM_TYPE_VIDEO) {
+                mFovControlData.camAuxWidthMargin  = camAuxStreamInfo.margins[i].widthMargins;
+                mFovControlData.camAuxHeightMargin = camAuxStreamInfo.margins[i].heightMargins;
+            }
+            if (camAuxStreamInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
+                // Update the preview dimension
+                mFovControlData.previewSize = camAuxStreamInfo.stream_sizes[i];
+                if (!mFovControlData.camcorderMode) {
+                    mFovControlData.camAuxWidthMargin  = camAuxStreamInfo.margins[i].widthMargins;
+                    mFovControlData.camAuxHeightMargin = camAuxStreamInfo.margins[i].heightMargins;
+                    break;
+                }
+            }
+        }
+
+        // Recalculate the transition parameters
+        if (calculateBasicFovRatio() && combineFovAdjustment()) {
+            calculateDualCamTransitionParams();
+            rc = NO_ERROR;
+        }
+    }
+
+    return rc;
+}
+
+
+/*===========================================================================
  * FUNCTION   : translateInputParams
  *
  * DESCRIPTION: Translate a subset of input parameters from main camera. As main
@@ -361,8 +414,9 @@
  * INVALID_OPERATION  : Failure
  *
  *==========================================================================*/
-int32_t QCameraFOVControl::translateInputParams(parm_buffer_t* paramsMainCam,
-                                                parm_buffer_t *paramsAuxCam)
+int32_t QCameraFOVControl::translateInputParams(
+        parm_buffer_t* paramsMainCam,
+        parm_buffer_t* paramsAuxCam)
 {
     int32_t rc = INVALID_OPERATION;
     if (paramsMainCam && paramsAuxCam) {
@@ -374,8 +428,7 @@
             uint32_t userZoom = 0;
             READ_PARAM_ENTRY(paramsMainCam, CAM_INTF_PARM_ZOOM, userZoom);
             convertUserZoomToMainAndAux(userZoom);
-            ADD_SET_PARAM_ENTRY_TO_BATCH(paramsAuxCam, CAM_INTF_PARM_ZOOM,
-                                            mFovControlData.zoomAux);
+            ADD_SET_PARAM_ENTRY_TO_BATCH(paramsAuxCam, CAM_INTF_PARM_ZOOM, mFovControlData.zoomAux);
         }
 
         if (paramsMainCam->is_valid[CAM_INTF_PARM_AF_ROI] ||
@@ -404,18 +457,6 @@
                 ADD_SET_PARAM_ENTRY_TO_BATCH(paramsAuxCam, CAM_INTF_PARM_AEC_ROI, roiAecAux);
             }
         }
-
-        // Book-keep the preview dimension
-        if (paramsMainCam->is_valid[CAM_INTF_META_STREAM_INFO]) {
-            cam_stream_size_info_t camStreamInfo;
-            READ_PARAM_ENTRY(paramsMainCam, CAM_INTF_META_STREAM_INFO, camStreamInfo);
-            for (int i = 0; i < MAX_NUM_STREAMS; ++i) {
-                if (camStreamInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
-                    mFovControlData.previewSize = camStreamInfo.stream_sizes[i];
-                    break;
-                }
-            }
-        }
         rc = NO_ERROR;
     }
     return rc;
@@ -441,25 +482,26 @@
  * based on which one was the master. In case of failure, it returns NULL.
  *==========================================================================*/
 metadata_buffer_t* QCameraFOVControl::processResultMetadata(
-                                                    metadata_buffer_t*  metaMain,
-                                                    metadata_buffer_t*  metaAux)
+        metadata_buffer_t*  metaMain,
+        metadata_buffer_t*  metaAux)
 {
     metadata_buffer_t* metaResult = NULL;
 
     if (metaMain || metaAux) {
+        metadata_buffer_t *meta = metaMain ? metaMain : metaAux;
         // Temporary code to determine the master camera.
         // This code will be replaced once we have the logic
         // to determine master based on the frame number in HAL.
-        cam_type master = (cam_type)mFovControlResult.camMasterPreview;
+        cam_sync_type_t master = mFovControlResult.camMasterPreview;
 
         if ((master == CAM_TYPE_AUX) && metaAux) {
             // Translate face detection ROI
             IF_META_AVAILABLE(cam_face_detection_data_t, metaFD,
-                                    CAM_INTF_META_FACE_DETECTION, metaAux) {
+                    CAM_INTF_META_FACE_DETECTION, metaAux) {
                 cam_face_detection_data_t metaFDTranslated;
                 metaFDTranslated = translateRoiFD(*metaFD);
                 ADD_SET_PARAM_ENTRY_TO_BATCH(metaAux, CAM_INTF_META_FACE_DETECTION,
-                                                metaFDTranslated);
+                        metaFDTranslated);
             }
             metaResult = metaAux;
         } else if (metaMain) {
@@ -470,21 +512,68 @@
             return metaResult;
         }
 
-        // Book-keep the needed metadata from main camera and aux camera
         mMutex.lock();
+
+        // Book-keep the needed metadata from main camera and aux camera
+
+        IF_META_AVAILABLE(cam_sac_output_info_t, spatialAlignOutput,
+                CAM_INTF_META_DC_SAC_OUTPUT_INFO, meta) {
+            // Get master camera for preview
+            if (spatialAlignOutput->is_master_preview_valid) {
+                uint8_t master = spatialAlignOutput->master_preview;
+                if ((master == CAM_ROLE_WIDE) ||
+                    (master == CAM_ROLE_TELE)) {
+                    mFovControlData.spatialAlignResult.camMasterPreview = master;
+                }
+            }
+
+            // Get master camera for 3A
+            if (spatialAlignOutput->is_master_3A_valid) {
+                uint8_t master = spatialAlignOutput->master_3A;
+                if ((master == CAM_ROLE_WIDE) ||
+                    (master == CAM_ROLE_TELE)) {
+                    mFovControlData.spatialAlignResult.camMaster3A = master;
+                }
+            }
+
+            // Get spatial alignment ready status
+            if (spatialAlignOutput->is_ready_status_valid) {
+                mFovControlData.spatialAlignResult.status = spatialAlignOutput->ready_status;
+            }
+            // temp code: Always set it to 1 until spatial align functionality is in place
+            mFovControlData.spatialAlignResult.status = 1;
+
+            // Get spatial alignment output shift
+            if (spatialAlignOutput->is_output_shift_valid) {
+                mFovControlData.spatialAlignResult.shiftHorz =
+                        spatialAlignOutput->output_shift.shift_horz;
+                mFovControlData.spatialAlignResult.shiftVert =
+                        spatialAlignOutput->output_shift.shift_vert;
+            }
+        }
+
+        if (mFovControlData.availableSpatialAlignSolns & CAM_SPATIAL_ALIGN_OEM) {
+            // Get low power mode info
+            IF_META_AVAILABLE(uint8_t, enableLPM, CAM_INTF_META_DC_LOW_POWER_ENABLE, meta) {
+                if (*enableLPM) {
+                    mFovControlData.spatialAlignResult.activeCamState =
+                            (uint32_t)mFovControlResult.camMasterPreview;
+                }
+            }
+        }
+
+        // Get AF status
         if (metaMain) {
             IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metaMain) {
                 if (((*afState) == CAM_AF_STATE_FOCUSED_LOCKED)     ||
                     ((*afState) == CAM_AF_STATE_NOT_FOCUSED_LOCKED) ||
                     ((*afState) == CAM_AF_STATE_PASSIVE_FOCUSED)    ||
                     ((*afState) == CAM_AF_STATE_PASSIVE_UNFOCUSED)) {
-                    mFovControlData.status3A.camMain.statusAF = AF_VALID;
+                    mFovControlData.status3A.main.af.status = AF_VALID;
                 } else {
-                    mFovControlData.status3A.camMain.statusAF = AF_INVALID;
+                    mFovControlData.status3A.main.af.status = AF_INVALID;
                 }
             }
-            // TODO : Copy the spatial alignment metadata to mFovControlData.spatialAlign
-            // Code for spatial alignment lib / metadata is not added yet
         }
         if (metaAux) {
             IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metaAux) {
@@ -492,14 +581,13 @@
                     ((*afState) == CAM_AF_STATE_NOT_FOCUSED_LOCKED) ||
                     ((*afState) == CAM_AF_STATE_PASSIVE_FOCUSED)    ||
                     ((*afState) == CAM_AF_STATE_PASSIVE_UNFOCUSED)) {
-                    mFovControlData.status3A.camAux.statusAF = AF_VALID;
+                    mFovControlData.status3A.aux.af.status = AF_VALID;
                 } else {
-                    mFovControlData.status3A.camAux.statusAF = AF_INVALID;
+                    mFovControlData.status3A.aux.af.status = AF_INVALID;
                 }
             }
-            // TODO : Copy the spatial alignment metadata to mFovControlData.spatialAlign
-            // Code for spatial alignment lib / metadata is not added yet
         }
+
         mMutex.unlock();
     }
     return metaResult;
@@ -521,12 +609,12 @@
     cam_sync_type_t camWide = mFovControlData.camWide;
     cam_sync_type_t camTele = mFovControlData.camTele;
 
-    float zoom = findZoomRatio(mFovControlData.zoomMain) / 4096.0f;
+    float zoom = findZoomRatio(mFovControlData.zoomMain) / (float)mFovControlData.zoomRatioTable[0];
 
     // Read AF status with mutex lock
     mMutex.lock();
-    af_status afStatusMain = mFovControlData.status3A.camMain.statusAF;
-    af_status afStatusAux  = mFovControlData.status3A.camAux.statusAF;
+    af_status afStatusMain = mFovControlData.status3A.main.af.status;
+    af_status afStatusAux  = mFovControlData.status3A.aux.af.status;
     mMutex.unlock();
 
     // Update the dual camera state based on the current zoom
@@ -570,10 +658,10 @@
     // Generate the result using updated dual camera state
     switch (mFovControlData.camState) {
         case STATE_WIDE:
-            mFovControlResult.camState         = camWide;
-            mFovControlResult.camMaster3A      = camWide;
-            mFovControlResult.camMasterPreview = camWide;
-            mFovControlResult.snapshotFusion   = false;
+            mFovControlResult.activeCamState      = camWide;
+            mFovControlResult.camMaster3A         = camWide;
+            mFovControlResult.camMasterPreview    = camWide;
+            mFovControlResult.snapshotPostProcess = false;
             break;
         case STATE_TRANSITION_WIDE_TO_TELE:
             if (zoom > mFovControlData.transitionParams.cutOverMainToAux) {
@@ -583,8 +671,8 @@
                 mFovControlResult.camMasterPreview = camWide;
                 mFovControlResult.camMaster3A      = camWide;
             }
-            mFovControlResult.camState       = (camWide | camTele);
-            mFovControlResult.snapshotFusion = false;
+            mFovControlResult.activeCamState       = (camWide | camTele);
+            mFovControlResult.snapshotPostProcess  = false;
             break;
         case STATE_TRANSITION_TELE_TO_WIDE:
             if (zoom < mFovControlData.transitionParams.cutOverAuxToMain) {
@@ -594,49 +682,129 @@
                 mFovControlResult.camMasterPreview = camTele;
                 mFovControlResult.camMaster3A      = camTele;
             }
-            mFovControlResult.camState       = (camWide | camTele);
-            mFovControlResult.snapshotFusion = false;
+            mFovControlResult.activeCamState       = (camWide | camTele);
+            mFovControlResult.snapshotPostProcess  = false;
             break;
         case STATE_TELE:
-            mFovControlResult.camMaster3A      = camTele;
-            mFovControlResult.camMasterPreview = camTele;
-            mFovControlResult.camState         = camTele;
-            mFovControlResult.snapshotFusion   = false;
+            mFovControlResult.camMaster3A         = camTele;
+            mFovControlResult.camMasterPreview    = camTele;
+            mFovControlResult.activeCamState      = camTele;
+            mFovControlResult.snapshotPostProcess = false;
             break;
     }
 
+    if (mFovControlData.availableSpatialAlignSolns & CAM_SPATIAL_ALIGN_OEM) {
+        // Override the FOVC result
+        if (mFovControlData.spatialAlignResult.camMasterPreview == CAM_ROLE_WIDE) {
+            mFovControlResult.camMasterPreview = camWide;
+            mFovControlResult.camMaster3A      = camWide;
+            mFovControlResult.activeCamState  |= camWide;
+        } else {
+            mFovControlResult.camMasterPreview = camTele;
+            mFovControlResult.camMaster3A      = camTele;
+            mFovControlResult.activeCamState  |= camTele;
+        }
+    }
+
     // Debug print for the FOV-control result
     LOGD("Effective zoom: %f", zoom);
     LOGD("ZoomMain: %d, ZoomAux: %d", mFovControlData.zoomMain, mFovControlData.zoomAux);
     LOGD("Master camera for preview: %s",
-        (mFovControlResult.camMasterPreview == CAM_TYPE_MAIN ) ? "Main" : "Aux");
+            (mFovControlResult.camMasterPreview == CAM_TYPE_MAIN ) ? "Main" : "Aux");
     LOGD("Master camera for 3A     : %s",
-        (mFovControlResult.camMaster3A == CAM_TYPE_MAIN ) ? "Main" : "Aux");
+            (mFovControlResult.camMaster3A == CAM_TYPE_MAIN ) ? "Main" : "Aux");
     LOGD("Main camera status: %s",
-        (mFovControlResult.camState & CAM_TYPE_MAIN) ? "Active" : "Standby");
+            (mFovControlResult.activeCamState & CAM_TYPE_MAIN) ? "Active" : "Standby");
     LOGD("Aux camera status : %s",
-        (mFovControlResult.camState & CAM_TYPE_AUX) ? "Active" : "Standby");
-    LOGD("snapshot fusion   : %d", mFovControlResult.snapshotFusion);
+            (mFovControlResult.activeCamState & CAM_TYPE_AUX) ? "Active" : "Standby");
+    LOGD("snapshot postprocess : %d", mFovControlResult.snapshotPostProcess);
 
     return mFovControlResult;
 }
 
 
 /*===========================================================================
+ * FUNCTION    : validateAndExtractParameters
+ *
+ * DESCRIPTION : Validates a subset of parameters from capabilities and
+ *              saves those parameters for decision making.
+ *
+ * PARAMETERS  :
+ *  @capsMain  : The capabilities for the main camera
+ *  @capsAux   : The capabilities for the aux camera
+ *
+ * RETURN      :
+ * true        : Success
+ * false       : Failure
+ *
+ *==========================================================================*/
+bool QCameraFOVControl::validateAndExtractParameters(
+        cam_capability_t  *capsMainCam,
+        cam_capability_t  *capsAuxCam)
+{
+    bool rc = false;
+    if (capsMainCam && capsAuxCam) {
+
+        // TODO : Replace the hardcoded values for mFovControlConfig and mDualCamParams below
+        // with the ones extracted from capabilities when available in eeprom.
+        mFovControlConfig.percentMarginHysterisis  = 5;
+        mFovControlConfig.percentMarginMain        = 10;
+        mFovControlConfig.percentMarginAux         = 15;
+        mFovControlConfig.waitTimeForHandoffMs     = 1000;
+
+        mDualCamParams.paramsMain.sensorStreamWidth  = 4208;
+        mDualCamParams.paramsMain.sensorStreamHeight = 3120;
+        mDualCamParams.paramsMain.pixelPitchUm       = 1.12;
+        mDualCamParams.paramsMain.focalLengthMm      = 3.5;
+        mDualCamParams.paramsAux.sensorStreamWidth   = 4208;
+        mDualCamParams.paramsAux.sensorStreamHeight  = 3120;
+        mDualCamParams.paramsAux.pixelPitchUm        = 1.12;
+        mDualCamParams.paramsAux.focalLengthMm       = 7;
+        mDualCamParams.baselineMm                    = 9.5;
+        mDualCamParams.minFocusDistanceCm            = 30;
+        mDualCamParams.rollDegrees                   = 1.0;
+        mDualCamParams.pitchDegrees                  = 1.0;
+        mDualCamParams.yawDegrees                    = 1.0;
+        mDualCamParams.positionAux                   = CAM_POSITION_LEFT;
+
+        if ((capsMainCam->avail_spatial_align_solns & CAM_SPATIAL_ALIGN_QCOM) ||
+            (capsMainCam->avail_spatial_align_solns & CAM_SPATIAL_ALIGN_OEM)) {
+            mFovControlData.availableSpatialAlignSolns =
+                    capsMainCam->avail_spatial_align_solns;
+        } else {
+            LOGW("Spatial alignment not supported");
+        }
+
+        if (capsMainCam->zoom_supported > 0) {
+            mFovControlData.zoomRatioTable      = capsMainCam->zoom_ratio_tbl;
+            mFovControlData.zoomRatioTableCount = capsMainCam->zoom_ratio_tbl_cnt;
+        } else {
+            LOGE("zoom feature not supported");
+            return false;
+        }
+        rc = true;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
  * FUNCTION   : calculateBasicFovRatio
  *
  * DESCRIPTION: Calculate the FOV ratio between main and aux cameras
  *
  * PARAMETERS : None
  *
- * RETURN     : Calculated FOV ratio
+ * RETURN     :
+ * true       : Success
+ * false      : Failure
  *
  *==========================================================================*/
-float QCameraFOVControl::calculateBasicFovRatio()
+bool QCameraFOVControl::calculateBasicFovRatio()
 {
     float fovMain;
     float fovAux;
-    float ret = 0.0f;
+    bool rc = false;
 
     if ((mDualCamParams.paramsMain.focalLengthMm > 0.0f) &&
          (mDualCamParams.paramsAux.focalLengthMm > 0.0f)) {
@@ -648,93 +816,47 @@
                     mDualCamParams.paramsAux.pixelPitchUm) /
                     mDualCamParams.paramsAux.focalLengthMm;
         if (fovAux > 0.0f) {
-            ret = (fovMain / fovAux);
+            mFovControlData.basicFovRatio = (fovMain / fovAux);
+            rc = true;
         }
     }
-    return ret;
-}
-
-
-/*===========================================================================
- * FUNCTION   : calculateFovAdjustmentWorstCaseDisparity
- *
- * DESCRIPTION: Calculate the FOV adjustment with worst case disparity
- *
- * PARAMETERS : None
- *
- * RETURN     : FOV adjustment
- *
- *==========================================================================*/
-float QCameraFOVControl::calculateFovAdjustmentWorstCaseDisparity()
-{
-    float focalLengthAuxPixels;
-    float focalLengthMainPixels;
-    float focalLengthNormalized;
-    float disparityBasic;
-    float disparityFinal;
-    float fovAdjustFromDisparity = 0.0f;
-
-    if ((mDualCamParams.paramsMain.pixelPitchUm > 0.0f) &&
-         (mDualCamParams.paramsAux.pixelPitchUm > 0.0f) &&
-         (mDualCamParams.minFocusDistanceCm > 0.0f)) {
-        focalLengthMainPixels = (mDualCamParams.paramsMain.focalLengthMm * 1000) /
-                                mDualCamParams.paramsMain.pixelPitchUm;
-        focalLengthAuxPixels  = (mDualCamParams.paramsAux.focalLengthMm * 1000) /
-                                (mDualCamParams.paramsAux.pixelPitchUm);
-        if ((focalLengthMainPixels > 0.0f) &&
-            (focalLengthAuxPixels  > 0.0f)) {
-            focalLengthNormalized  = focalLengthAuxPixels / focalLengthMainPixels;
-            disparityBasic         = focalLengthMainPixels * mDualCamParams.baselineMm /
-                                    (mDualCamParams.minFocusDistanceCm * 10);
-            disparityFinal         = focalLengthNormalized * disparityBasic;
-
-            if (mDualCamParams.paramsAux.sensorStreamWidth) {
-                fovAdjustFromDisparity = 1 +
-                    (disparityFinal / mDualCamParams.paramsAux.sensorStreamWidth);
-            }
-        }
-    }
-    return fovAdjustFromDisparity;
-}
-
-
-/*===========================================================================
- * FUNCTION   : calculateFovAdjustmentRollPitchYaw
- *
- * DESCRIPTION: Calculate FOV adjustment factor considering roll, pitch, yaw.
- *
- * PARAMETERS : none
- *
- * RETURN     : FOV adjustment factor
- *
- *==========================================================================*/
-float QCameraFOVControl::calculateFovAdjustmentRollPitchYaw()
-{
-    // TODO :Change the logic here when needed. 7% is fine based on
-    // the different configurations observed
-    return (1.07);
+    return rc;
 }
 
 
 /*===========================================================================
  * FUNCTION   : combineFovAdjustment
  *
- * DESCRIPTION: Calculate the final FOV adjustment by combining three FOV
- *              adjustments calculated earlier
+ * DESCRIPTION: Calculate the final FOV adjustment by combining basic FOV ratio
+ *              with the margin info
  *
- * PARAMETERS :
- *  @fovAdjustBasic            : Basic FOV ratio
- *  @fovAdjustFromDisparity    : FOV adjustment due to disparity
- *  @fovAdjustFromRollPitchYaw : FOV adjustment with roll, pitch, yaw
+ * PARAMETERS : None
  *
- * RETURN     : Final FOV adjustment value
+ * RETURN     :
+ * true       : Success
+ * false      : Failure
  *
  *==========================================================================*/
-float QCameraFOVControl::combineFovAdjustment(float fovAdjustBasic,
-                                              float fovAdjustFromDisparity,
-                                              float fovAdjustFromRollPitchYaw)
+bool QCameraFOVControl::combineFovAdjustment()
 {
-    return (fovAdjustBasic * fovAdjustFromDisparity * fovAdjustFromRollPitchYaw);
+    float ratioMarginWidth;
+    float ratioMarginHeight;
+    float adjustedRatio;
+    bool rc = false;
+
+    ratioMarginWidth = (1.0 + (mFovControlData.camMainWidthMargin)) /
+            (1.0 + (mFovControlData.camAuxWidthMargin));
+    ratioMarginHeight = (1.0 + (mFovControlData.camMainHeightMargin)) /
+            (1.0 + (mFovControlData.camAuxHeightMargin));
+
+    adjustedRatio = (ratioMarginHeight < ratioMarginWidth) ? ratioMarginHeight : ratioMarginWidth;
+
+    if (adjustedRatio > 0.0f) {
+        mFovControlData.transitionParams.cutOverFactor =
+                (mFovControlData.basicFovRatio / adjustedRatio);
+        rc = true;
+    }
+    return rc;
 }
 
 
@@ -751,23 +873,24 @@
  * RETURN     : none
  *
  *==========================================================================*/
-void QCameraFOVControl::calculateDualCamTransitionParams(
-        float fovAdjustBasic,
-        float zoomTranslationFactor)
+void QCameraFOVControl::calculateDualCamTransitionParams()
 {
-    mFovControlData.transitionParams.cropRatio      = fovAdjustBasic;
-    mFovControlData.transitionParams.cutOverFactor  = zoomTranslationFactor;
+    mFovControlData.transitionParams.cropRatio = mFovControlData.basicFovRatio;
 
-    mFovControlData.transitionParams.cutOverMainToAux = zoomTranslationFactor +
-                        (mFovControlConfig.percentMarginHysterisis / 100.0) * fovAdjustBasic;
+    mFovControlData.transitionParams.cutOverMainToAux =
+            mFovControlData.transitionParams.cutOverFactor +
+            (mFovControlConfig.percentMarginHysterisis / 100.0) * mFovControlData.basicFovRatio;
+
+    mFovControlData.transitionParams.cutOverAuxToMain =
+            mFovControlData.transitionParams.cutOverFactor;
+
     mFovControlData.transitionParams.transitionHigh =
             mFovControlData.transitionParams.cutOverMainToAux +
-            (mFovControlConfig.percentMarginMain / 100.0) * fovAdjustBasic;
+            (mFovControlConfig.percentMarginMain / 100.0) * mFovControlData.basicFovRatio;
 
-    mFovControlData.transitionParams.cutOverAuxToMain = zoomTranslationFactor;
     mFovControlData.transitionParams.transitionLow =
             mFovControlData.transitionParams.cutOverAuxToMain -
-            (mFovControlConfig.percentMarginAux / 100.0) * fovAdjustBasic;
+            (mFovControlConfig.percentMarginAux / 100.0) * mFovControlData.basicFovRatio;
 }
 
 
@@ -783,11 +906,12 @@
  * RETURN     : Zoom value
  *
  *==========================================================================*/
-uint32_t QCameraFOVControl::findZoomValue(uint32_t zoomRatio)
+uint32_t QCameraFOVControl::findZoomValue(
+        uint32_t zoomRatio)
 {
     uint32_t zoom = 0;
-    for (int i = 0; i < ZOOM_TABLE_SIZE; ++i) {
-        if (zoomRatio <= zoomTableDualCam[i]) {
+    for (uint32_t i = 0; i < mFovControlData.zoomRatioTableCount; ++i) {
+        if (zoomRatio <= mFovControlData.zoomRatioTable[i]) {
             zoom = i;
             break;
         }
@@ -808,9 +932,10 @@
  * RETURN     : zoom ratio
  *
  *==========================================================================*/
-uint32_t QCameraFOVControl::findZoomRatio(uint32_t zoom)
+uint32_t QCameraFOVControl::findZoomRatio(
+        uint32_t zoom)
 {
-    return zoomTableDualCam[zoom];
+    return mFovControlData.zoomRatioTable[zoom];
 }
 
 
@@ -826,7 +951,8 @@
  * RETURN     : Zoom value for aux camera
  *
  *==========================================================================*/
-uint32_t QCameraFOVControl::readjustZoomForAux(uint32_t zoomMain)
+uint32_t QCameraFOVControl::readjustZoomForAux(
+        uint32_t zoomMain)
 {
     uint32_t zoomRatioMain;
     uint32_t zoomRatioAux;
@@ -850,7 +976,8 @@
  * RETURN     : Zoom value for main camera
  *
  *==========================================================================*/
-uint32_t QCameraFOVControl::readjustZoomForMain(uint32_t zoomAux)
+uint32_t QCameraFOVControl::readjustZoomForMain(
+        uint32_t zoomAux)
 {
     uint32_t zoomRatioMain;
     uint32_t zoomRatioAux;
@@ -874,7 +1001,8 @@
  * RETURN     : none
  *
  *==========================================================================*/
-void QCameraFOVControl::convertUserZoomToMainAndAux(uint32_t zoom)
+void QCameraFOVControl::convertUserZoomToMainAndAux(
+        uint32_t zoom)
 {
     mFovControlData.zoomMain = zoom;
     mFovControlData.zoomAux  = readjustZoomForAux(mFovControlData.zoomMain);
@@ -894,9 +1022,9 @@
 void QCameraFOVControl::convertDisparityForInputParams()
 {
     Mutex::Autolock lock(mMutex);
-
     mFovControlData.shiftHorzAdjMain = mFovControlData.transitionParams.cropRatio /
-            (mFovControlData.zoomMain / 4096.0f) * mFovControlData.spatialAlign.shiftHorz;
+            (mFovControlData.zoomMain / (float)mFovControlData.zoomRatioTable[0]) *
+            mFovControlData.spatialAlignResult.shiftHorz;
 }
 
 
@@ -911,9 +1039,12 @@
  * RETURN     : Translated focus area ROI for aux camera
  *
  *==========================================================================*/
-cam_roi_info_t QCameraFOVControl::translateFocusAreas(cam_roi_info_t roiAfMain)
+cam_roi_info_t QCameraFOVControl::translateFocusAreas(
+        cam_roi_info_t roiAfMain)
 {
     float fovRatio;
+    float zoomMain;
+    float zoomAux;
     float AuxDiffRoiLeft;
     float AuxDiffRoiTop;
     float AuxRoiLeft;
@@ -921,7 +1052,11 @@
 
     cam_roi_info_t roiAfAux;
 
+    zoomMain = findZoomRatio(mFovControlData.zoomMain);
+    zoomAux  = findZoomRatio(mFovControlData.zoomAux);
+
     fovRatio = mFovControlData.transitionParams.cropRatio;
+    fovRatio = (zoomAux / zoomMain) * mFovControlData.transitionParams.cropRatio;
 
     for (int i = 0; i < roiAfMain.num_roi; ++i) {
         AuxDiffRoiLeft = fovRatio*(roiAfMain.roi[i].left -
@@ -957,9 +1092,12 @@
  * RETURN     : Translated AEC ROI for aux camera
  *
  *==========================================================================*/
-cam_set_aec_roi_t QCameraFOVControl::translateMeteringAreas(cam_set_aec_roi_t roiAecMain)
+cam_set_aec_roi_t QCameraFOVControl::translateMeteringAreas(
+        cam_set_aec_roi_t roiAecMain)
 {
     float fovRatio;
+    float zoomMain;
+    float zoomAux;
     float AuxDiffRoiX;
     float AuxDiffRoiY;
     float AuxRoiX;
@@ -967,7 +1105,12 @@
 
     cam_set_aec_roi_t roiAecAux;
 
+    zoomMain = findZoomRatio(mFovControlData.zoomMain);
+    zoomAux  = findZoomRatio(mFovControlData.zoomAux);
+
     fovRatio = mFovControlData.transitionParams.cropRatio;
+    fovRatio = (zoomAux / zoomMain) * mFovControlData.transitionParams.cropRatio;
+
 
     for (int i = 0; i < roiAecMain.num_roi; ++i) {
         AuxDiffRoiX = fovRatio*(roiAecMain.cam_aec_roi_position.coordinate[i].x -
@@ -1005,17 +1148,18 @@
  * RETURN     : none
  *
  *==========================================================================*/
-cam_face_detection_data_t QCameraFOVControl::translateRoiFD(cam_face_detection_data_t metaFD)
+cam_face_detection_data_t QCameraFOVControl::translateRoiFD(
+        cam_face_detection_data_t metaFD)
 {
     cam_face_detection_data_t metaFDTranslated = metaFD;
 
     for (int i = 0; i < metaFDTranslated.num_faces_detected; ++i) {
         if (mDualCamParams.positionAux == CAM_POSITION_LEFT) {
             metaFDTranslated.faces[i].face_boundary.left -=
-                mFovControlData.spatialAlign.shiftHorz;
+                mFovControlData.spatialAlignResult.shiftHorz;
         } else {
             metaFDTranslated.faces[i].face_boundary.left +=
-                mFovControlData.spatialAlign.shiftHorz;
+                mFovControlData.spatialAlignResult.shiftHorz;
         }
     }
     return metaFDTranslated;
diff --git a/msmcobalt/QCamera2/util/QCameraFOVControl.h b/msmcobalt/QCamera2/util/QCameraFOVControl.h
index 2376fc7..0d99b38 100644
--- a/msmcobalt/QCamera2/util/QCameraFOVControl.h
+++ b/msmcobalt/QCamera2/util/QCameraFOVControl.h
@@ -31,24 +31,11 @@
 #define __QCAMERAFOVCONTROL_H__
 
 #include <utils/Mutex.h>
-
 #include "cam_intf.h"
 
-typedef enum {
-    CAM_TYPE_WIDE,
-    CAM_TYPE_TELE,
-    CAM_COUNT
-} cam_type;
+using namespace android;
 
-typedef enum {
-    CAM_POSITION_LEFT,
-    CAM_POSITION_RIGHT
-} cam_relative_position;
-
-typedef enum {
-    READY,
-    NOT_READY
-} status;
+namespace qcamera {
 
 typedef enum {
     AE_SETTLED,
@@ -61,26 +48,9 @@
 } af_status;
 
 typedef enum {
-    AWB_SETTLED,
-    AWB_CONVERGING
-} awb_status;
-
-typedef struct {
-    ae_status   statusAE;
-    af_status   statusAF;
-    awb_status  statusAWB;
-} status_3A_t;
-
-typedef struct {
-    status_3A_t camMain;
-    status_3A_t camAux;
-} dual_cam_3A_status_t;
-
-typedef struct {
-    status   status;
-    uint32_t shiftHorz;
-    uint32_t shiftVert;
-} spatial_align_metadata_t;
+    CAM_POSITION_LEFT,
+    CAM_POSITION_RIGHT
+} cam_relative_position;
 
 typedef enum {
     STATE_WIDE,
@@ -89,6 +59,36 @@
     STATE_TRANSITION_TELE_TO_WIDE
 } dual_cam_state;
 
+
+typedef struct {
+    ae_status status;
+    uint16_t  lux;
+} ae_info;
+
+typedef struct {
+    af_status status;
+    uint16_t  focusDistCm;
+} af_info;
+
+typedef struct {
+    ae_info ae;
+    af_info af;
+} status_3A_t;
+
+typedef struct {
+    status_3A_t main;
+    status_3A_t aux;
+} dual_cam_3A_status_t;
+
+typedef struct {
+    uint8_t         status;
+    uint32_t        shiftHorz;
+    uint32_t        shiftVert;
+    uint32_t        activeCamState;
+    uint8_t         camMasterPreview;
+    uint8_t         camMaster3A;
+} spatial_align_result_t;
+
 typedef struct {
     float    cropRatio;
     float    cutOverFactor;
@@ -102,14 +102,23 @@
 typedef struct {
     uint32_t                     zoomMain;
     uint32_t                     zoomAux;
+    uint32_t                    *zoomRatioTable;
+    uint32_t                     zoomRatioTableCount;
     cam_sync_type_t              camWide;
     cam_sync_type_t              camTele;
-    uint32_t                     shiftHorzAdjMain;
     dual_cam_state               camState;
     dual_cam_3A_status_t         status3A;
-    dual_cam_transition_params_t transitionParams;
     cam_dimension_t              previewSize;
-    spatial_align_metadata_t     spatialAlign;
+    spatial_align_result_t       spatialAlignResult;
+    uint32_t                     availableSpatialAlignSolns;
+    uint32_t                     shiftHorzAdjMain;
+    float                        camMainWidthMargin;
+    float                        camMainHeightMargin;
+    float                        camAuxWidthMargin;
+    float                        camAuxHeightMargin;
+    bool                         camcorderMode;
+    float                        basicFovRatio;
+    dual_cam_transition_params_t transitionParams;
 } fov_control_data_t;
 
 typedef struct {
@@ -140,92 +149,35 @@
 typedef struct {
     cam_sync_type_t camMasterPreview;
     cam_sync_type_t camMaster3A;
-    uint32_t        camState;
-    bool            snapshotFusion;
+    uint32_t        activeCamState;
+    bool            snapshotPostProcess;
 } fov_control_result_t;
 
 
-#define ZOOM_TABLE_SIZE 182
-// TODO : Replace zoom table with the zoom ratio table from capabilities.
-// That zoom ratio table has ratios normalized to 100.
-static const uint32_t zoomTableDualCam[ZOOM_TABLE_SIZE] = {
-                              4096, 4191, 4289, 4389, 4492,
-                              4597, 4705, 4815, 4927, 5042,
-                              5160, 5281, 5404, 5531, 5660,
-                              5792, 5928, 6066, 6208, 6353,
-                              6501, 6653, 6809, 6968, 7131,
-                              7298, 7468, 7643, 7822, 8004,
-                              8192, 8383, 8579, 8779, 8985,
-                              9195, 9410, 9630, 9855, 10085,
-                              10321, 10562, 10809, 11062, 11320,
-                              11585, 11856, 12133, 12416, 12706,
-                              13003, 13307, 13619, 13937, 14263,
-                              14596, 14937, 15286, 15644, 16009,
-                              16384, 16766, 17158, 17559, 17970,
-                              18390, 18820, 19260, 19710, 20171,
-                              20642, 21125, 21618, 22124, 22641,
-                              23170, 23712, 24266, 24833, 25413,
-                              26007, 26615, 27238, 27874, 28526,
-                              29192, 29875, 30573, 31288, 32019,
-                              32768, 33533, 34317, 35119, 35940,
-                              36780, 37640, 38520, 39420, 40342,
-                              41285, 42250, 43237, 44248, 45282,
-                              46340, 47424, 48532, 49666, 50827,
-                              52015, 53231, 54476, 55749, 57052,
-                              58385, 59750, 61147, 62576, 64039,
-                              65536, 67067, 68635, 70239, 71881,
-                              73561, 75281, 77040, 78841, 80684,
-                              82570, 84500, 86475, 88496, 90565,
-                              92681, 94848, 97065, 99334, 101655,
-                              104031, 106463, 108952, 111498, 114104,
-                              116771, 119501, 122294, 125152, 128078,
-                              131072, 134135, 137270, 140479, 143763,
-                              147123, 150562, 154081, 157682, 161368,
-                              165140, 169000, 172950, 176993, 181130,
-                              185363, 189696, 194130, 198668, 203311,
-                              208063, 212927, 217904, 222997, 228209,
-                              233543, 239002, 244589, 250305, 256156,
-                              262144, 999999
-};
-
-
-using namespace android;
-
-namespace qcamera {
-
 class QCameraFOVControl {
 public:
     ~QCameraFOVControl();
-
-    static QCameraFOVControl* create(cam_capability_t *capsMainCam,
-                                     cam_capability_t* capsAuxCam);
-
+    static QCameraFOVControl* create(cam_capability_t *capsMainCam, cam_capability_t* capsAuxCam);
+    int32_t updateConfigSettings(parm_buffer_t* paramsMainCam, parm_buffer_t* paramsAuxCam);
     cam_capability_t consolidateCapabilities(cam_capability_t* capsMainCam,
-                                             cam_capability_t* capsAuxCam);
-    int32_t translateInputParams(parm_buffer_t* paramsMainCam,
-                                 parm_buffer_t *paramsAuxCam);
+            cam_capability_t* capsAuxCam);
+    int32_t translateInputParams(parm_buffer_t* paramsMainCam, parm_buffer_t *paramsAuxCam);
     metadata_buffer_t* processResultMetadata(metadata_buffer_t* metaMainCam,
-                                             metadata_buffer_t* metaAuxCam);
+            metadata_buffer_t* metaAuxCam);
     fov_control_result_t getFovControlResult();
 
 private:
     QCameraFOVControl();
-    float calculateBasicFovRatio();
-    float calculateFovAdjustmentWorstCaseDisparity();
-    float calculateFovAdjustmentRollPitchYaw();
-    float combineFovAdjustment(float fovAdjustBasic,
-                               float fovAdjustFromDisparity,
-                               float fovAdjustFromRollPitchYaw);
-    void  calculateDualCamTransitionParams(float fovAdjustBasic,
-                                           float zoomTranslationFactor);
-
-
+    bool validateAndExtractParameters(cam_capability_t  *capsMainCam,
+            cam_capability_t  *capsAuxCam);
+    bool calculateBasicFovRatio();
+    bool combineFovAdjustment();
+    void  calculateDualCamTransitionParams();
     void convertUserZoomToMainAndAux(uint32_t zoom);
     uint32_t readjustZoomForAux(uint32_t zoomMain);
     uint32_t readjustZoomForMain(uint32_t zoomAux);
     uint32_t findZoomRatio(uint32_t zoom);
     inline uint32_t findZoomValue(uint32_t zoomRatio);
-
     cam_face_detection_data_t translateRoiFD(cam_face_detection_data_t faceDetectionInfo);
     cam_roi_info_t translateFocusAreas(cam_roi_info_t roiAfMain);
     cam_set_aec_roi_t translateMeteringAreas(cam_set_aec_roi_t roiAecMain);
diff --git a/msmcobalt/QCamera2/util/QCameraTrace.cpp b/msmcobalt/QCamera2/util/QCameraTrace.cpp
new file mode 100644
index 0000000..7e46a5d
--- /dev/null
+++ b/msmcobalt/QCamera2/util/QCameraTrace.cpp
@@ -0,0 +1,181 @@
+/* Copyright (c) 2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+// Camera dependencies
+#include "QCameraTrace.h"
+
+#define CAMSCOPE_MEMSTORE_SIZE 0x00100000 // 1MB
+
+volatile uint32_t kpi_camscope_flags = 0;
+volatile uint32_t kpi_camscope_frame_count = 0;
+
+static const char * camscope_filenames[CAMSCOPE_SECTION_SIZE] = {
+    "/data/misc/camera/camscope_mmcamera.bin",
+    "/data/misc/camera/camscope_hal.bin",
+    "/data/misc/camera/camscope_jpeg.bin"
+};
+
+static FILE * camscope_fd[CAMSCOPE_SECTION_SIZE];
+static uint32_t camscope_num_bytes_stored[CAMSCOPE_SECTION_SIZE];
+static char * camscope_memstore[CAMSCOPE_SECTION_SIZE];
+static pthread_mutex_t camscope_mutex[CAMSCOPE_SECTION_SIZE];
+
+/* camscope_init:
+ *
+ *  @camscope_section: camscope section where this function is occurring
+ *
+ *  Initializes the CameraScope tool functionality
+ *
+ *  Return: N/A
+ */
+void camscope_init(camscope_section_type camscope_section) {
+    pthread_mutex_init(&(camscope_mutex[camscope_section]), NULL);
+    if (camscope_fd[camscope_section] == NULL) {
+        if(camscope_memstore[camscope_section] == NULL) {
+            camscope_memstore[camscope_section] =
+                (char *)malloc(CAMSCOPE_MEMSTORE_SIZE);
+            if (camscope_memstore[camscope_section] == NULL) {
+              CLOGE(CAM_NO_MODULE, "Failed to allocate camscope memstore"
+                    "with size %d\n", CAMSCOPE_MEMSTORE_SIZE);
+            }
+        }
+        camscope_fd[camscope_section] =
+            fopen(camscope_filenames[camscope_section], "ab");
+    }
+}
+
+/* camscope_flush:
+ *
+ *  @camscope_section: camscope section where this function is occurring
+ *
+ *  Flushes the camscope memstore to the file system
+ *
+ *  Return: N/A
+ */
+static void camscope_flush(camscope_section_type camscope_section) {
+    if (camscope_fd[camscope_section] != NULL &&
+        camscope_memstore[camscope_section] != NULL) {
+        fwrite(camscope_memstore[camscope_section], sizeof(char),
+               camscope_num_bytes_stored[camscope_section],
+               camscope_fd[camscope_section]);
+        camscope_num_bytes_stored[camscope_section] = 0;
+    }
+}
+
+/* camscope_destroy:
+ *
+ *  @camscope_section: camscope section where this function is occurring
+ *
+ *  Flushes any remaining data to the file system and cleans up CameraScope
+ *
+ *  Return: N/A
+ */
+void camscope_destroy(camscope_section_type camscope_section) {
+    if (camscope_fd[camscope_section] != NULL) {
+        pthread_mutex_lock(&(camscope_mutex[camscope_section]));
+        if(camscope_memstore[camscope_section] != NULL) {
+            camscope_flush(camscope_section);
+            free(camscope_memstore[camscope_section]);
+            camscope_memstore[camscope_section] = NULL;
+        }
+        fclose(camscope_fd[camscope_section]);
+        camscope_fd[camscope_section] = NULL;
+        pthread_mutex_unlock(&(camscope_mutex[camscope_section]));
+    }
+    pthread_mutex_destroy(&(camscope_mutex[camscope_section]));
+}
+
+/* camscope_reserve:
+ *
+ *  @camscope_section:     camscope section where this function is occurring
+ *  @num_bytes_to_reserve: number in bytes to reserve on the memstore
+ *
+ *  Reserves a number of bytes on the memstore flushing to the
+ *  file system if remaining space is insufficient
+ *
+ *  Return: number of bytes successfully reserved on the memstore
+ */
+uint32_t camscope_reserve(camscope_section_type camscope_section,
+                                 uint32_t num_bytes_to_reserve) {
+    uint32_t bytes_reserved = 0;
+    if (camscope_fd[camscope_section] != NULL &&
+        num_bytes_to_reserve <= CAMSCOPE_MEMSTORE_SIZE) {
+        int32_t size = CAMSCOPE_MEMSTORE_SIZE -
+               camscope_num_bytes_stored[camscope_section] -
+               num_bytes_to_reserve;
+        if (size < 0) {
+            camscope_flush(camscope_section);
+        }
+        bytes_reserved = num_bytes_to_reserve;
+    }
+    return bytes_reserved;
+}
+
+/* camscope_store_data:
+ *
+ *  @camscope_section: camscope section where this function is occurring
+ *  @data:             data to be stored
+ *  @size:             size of data to be stored
+ *
+ *  Store the data to the memstore and calculate remaining space
+ *
+ *  Return: N/A
+ */
+void camscope_store_data(camscope_section_type camscope_section,
+                       void* data, uint32_t size) {
+    if(camscope_memstore[camscope_section] != NULL) {
+        memcpy(camscope_memstore[camscope_section] +
+               camscope_num_bytes_stored[camscope_section], (char*)data, size);
+        camscope_num_bytes_stored[camscope_section] += size;
+    }
+}
+
+/* camscope_mutex_lock:
+ *
+ *  @camscope_section: camscope section where this function is occurring
+ *
+ *  Lock the camscope mutex lock for the given camscope section
+ *
+ *  Return: N/A
+ */
+void camscope_mutex_lock(camscope_section_type camscope_section) {
+    pthread_mutex_lock(&(camscope_mutex[camscope_section]));
+}
+
+/* camscope_mutex_unlock:
+ *
+ *  @camscope_section: camscope section where this function is occurring
+ *
+ *  Unlock the camscope mutex lock for the given camscope section
+ *
+ *  Return: N/A
+ */
+void camscope_mutex_unlock(camscope_section_type camscope_section) {
+    pthread_mutex_unlock(&(camscope_mutex[camscope_section]));
+}
diff --git a/msmcobalt/QCamera2/util/QCameraTrace.h b/msmcobalt/QCamera2/util/QCameraTrace.h
index d7eeb8f..31b0f77 100644
--- a/msmcobalt/QCamera2/util/QCameraTrace.h
+++ b/msmcobalt/QCamera2/util/QCameraTrace.h
@@ -21,6 +21,7 @@
 #define __QCAMERATRACE_H__
 
 #include <utils/Trace.h>
+#include "camscope_packet_type.h"
 
 #ifdef QCAMERA_REDEFINE_LOG
 #define CAM_MODULE CAM_HAL_MODULE
@@ -99,6 +100,152 @@
 #define ATRACE_INT ATRACE_INT_DBG
 #define ATRACE_END ATRACE_END_DBG
 
+#define CAMSCOPE_MAX_STRING_LENGTH 64
+
+/* Initializes CameraScope tool */
+void camscope_init(camscope_section_type camscope_section);
+
+/* Cleans up CameraScope tool */
+void camscope_destroy(camscope_section_type camscope_section);
+
+/* Reserves a number of bytes on the memstore flushing to the
+ * file system if remaining space is insufficient */
+uint32_t camscope_reserve(camscope_section_type camscope_section,
+                                 uint32_t num_bytes_to_reserve);
+
+/* Store the data to the memstore and calculate remaining space */
+void camscope_store_data(camscope_section_type camscope_section,
+                       void* data, uint32_t size);
+
+/* Lock the camscope mutex lock for the given camscope section */
+void camscope_mutex_lock(camscope_section_type camscope_section);
+
+/* Unlock the camscope mutex lock for the given camscope section */
+void camscope_mutex_unlock(camscope_section_type camscope_section);
+
+#define CAMSCOPE_SYSTRACE_TIME_MARKER() { \
+    if (kpi_camscope_frame_count != 0) { \
+        if (kpi_camscope_flags & CAMSCOPE_ON_FLAG) { \
+            struct timeval t_domain; \
+            char trace_time_conv[CAMSCOPE_MAX_STRING_LENGTH]; \
+            gettimeofday(&t_domain, NULL); \
+            snprintf(trace_time_conv, sizeof(trace_time_conv), \
+                     "_CAMSCOPE_TIME_CONV_:%ld:%ld", t_domain.tv_sec, \
+                     t_domain.tv_usec); \
+            atrace_int(ATRACE_TAG_ALWAYS, trace_time_conv, 0); \
+        } \
+    } \
+}
+
+#define CAMSCOPE_MASK(mask) { \
+    char prop[PROPERTY_VALUE_MAX]; \
+    property_get("persist.camera.kpi.camscope", prop, "0"); \
+    mask = atoi(prop); \
+}
+
+#define CAMSCOPE_FRAME_COUNT_MASK(mask) { \
+    char prop[PROPERTY_VALUE_MAX]; \
+    property_get("persist.camera.kpi.camscope_cnt", prop, "0"); \
+    mask = atoi(prop); \
+}
+
+#define CAMSCOPE_UPDATE_FLAGS(camscope_section, camscope_prop) { \
+    if (kpi_camscope_frame_count != 0) { \
+        static uint32_t camscope_frame_counter = 0; \
+        if (camscope_frame_counter >= kpi_camscope_frame_count) { \
+            uint32_t prev_prop = camscope_prop; \
+            CAMSCOPE_MASK(camscope_prop); \
+            uint32_t is_prev_prop_on = (prev_prop & CAMSCOPE_ON_FLAG) \
+                                        ? 1 : 0; \
+            uint32_t is_prop_on = (camscope_prop & CAMSCOPE_ON_FLAG) \
+                                   ? 1 : 0; \
+            if (is_prev_prop_on ^ is_prop_on) { \
+                if (is_prop_on) { \
+                    camscope_init(camscope_section); \
+                } else { \
+                    camscope_destroy(camscope_section); \
+                } \
+            } \
+            CAMSCOPE_SYSTRACE_TIME_MARKER(); \
+            camscope_frame_counter = 0; \
+        } \
+        else { \
+            ++camscope_frame_counter; \
+        } \
+    } \
+}
+
+#define CAMSCOPE_INIT(camscope_section) { \
+    CAMSCOPE_FRAME_COUNT_MASK(kpi_camscope_frame_count); \
+    if (kpi_camscope_frame_count != 0) { \
+        CAMSCOPE_MASK(kpi_camscope_flags); \
+        if (kpi_camscope_flags & CAMSCOPE_ON_FLAG) { \
+            camscope_init(camscope_section); \
+            CAMSCOPE_SYSTRACE_TIME_MARKER(); \
+        } \
+    } \
+}
+
+#define CAMSCOPE_DESTROY(camscope_section) { \
+    if (kpi_camscope_frame_count != 0) { \
+        if (kpi_camscope_flags & CAMSCOPE_ON_FLAG) { \
+            camscope_destroy(camscope_section); \
+        } \
+    } \
+}
+
+#define KPI_ATRACE_CAMSCOPE_BEGIN(camscope_name) ({\
+if (camscope_name < CAMSCOPE_EVENT_NAME_SIZE && \
+    camscope_name >= 0) { \
+    KPI_ATRACE_BEGIN(camscope_atrace_names[camscope_name]); \
+} \
+camscope_sw_base_log((uint32_t)CAMSCOPE_SECTION_HAL, \
+                     CAMSCOPE_KPI_MASK, \
+                     CAMSCOPE_SYNC_BEGIN, \
+                     camscope_name); \
+})
+
+#define KPI_ATRACE_CAMSCOPE_END(camscope_name) ({\
+KPI_ATRACE_END(); \
+camscope_sw_base_log((uint32_t)CAMSCOPE_SECTION_HAL, \
+                     CAMSCOPE_KPI_MASK, \
+                     CAMSCOPE_SYNC_END, \
+                     camscope_name); \
+})
+
+// This macro only works with counter values that act like begin/end
+#define KPI_ATRACE_CAMSCOPE_INT(name, camscope_name, counter) ({\
+KPI_ATRACE_INT(name, counter); \
+camscope_timing_log((uint32_t)CAMSCOPE_SECTION_HAL, \
+                     CAMSCOPE_KPI_MASK, \
+                     counter ? CAMSCOPE_ASYNC_BEGIN : CAMSCOPE_ASYNC_END, \
+                     camscope_name, 0); \
+})
+
+#define ATRACE_CAMSCOPE_BEGIN(camscope_name) ({\
+if (camscope_name < CAMSCOPE_EVENT_NAME_SIZE && \
+    camscope_name >= 0) { \
+    ATRACE_BEGIN_DBG(camscope_atrace_names[camscope_name]); \
+} \
+camscope_sw_base_log((uint32_t)CAMSCOPE_SECTION_HAL, \
+                     CAMSCOPE_KPI_DBG_MASK, \
+                     CAMSCOPE_SYNC_BEGIN, \
+                     camscope_name); \
+})
+
+#define ATRACE_CAMSCOPE_END(camscope_name) ({\
+ATRACE_END_DBG(); \
+camscope_sw_base_log(CAMSCOPE_SECTION_HAL, \
+                     CAMSCOPE_KPI_DBG_MASK, \
+                     CAMSCOPE_SYNC_END, \
+                     camscope_name); \
+})
+
+#define KPI_ATRACE_CAMSCOPE_NAME(camscope_name) qcamera::CamscopeTraceKpi ___tracer(camscope_name)
+#define ATRACE_CAMSCOPE_NAME(camscope_name) qcamera::CamscopeTraceDbg ___tracer(camscope_name)
+#define KPI_ATRACE_CAMSCOPE_CALL(camscope_name) KPI_ATRACE_CAMSCOPE_NAME(camscope_name)
+#define ATRACE_CAMSCOPE_CALL(camscope_name) ATRACE_CAMSCOPE_NAME(camscope_name)
+
 #define KPI_ATRACE_NAME(name) qcamera::ScopedTraceKpi ___tracer(ATRACE_TAG, name)
 #define ATRACE_NAME(name) qcamera::ScopedTraceDbg ___tracer(ATRACE_TAG, name)
 #define KPI_ATRACE_CALL() KPI_ATRACE_NAME(__FUNCTION__)
@@ -143,6 +290,36 @@
     private:
         uint64_t mTag;
 };
+
+class CamscopeTraceKpi {
+public:
+    inline CamscopeTraceKpi(const uint32_t camscope_name)
+    : mCamscopeName(camscope_name) {
+        KPI_ATRACE_CAMSCOPE_BEGIN(mCamscopeName);
+    }
+
+    inline ~CamscopeTraceKpi() {
+        KPI_ATRACE_CAMSCOPE_END(mCamscopeName);
+    }
+
+    private:
+        const uint32_t mCamscopeName;
+};
+
+class CamscopeTraceDbg {
+public:
+    inline CamscopeTraceDbg(const uint32_t camscope_name)
+    : mCamscopeName(camscope_name) {
+        ATRACE_CAMSCOPE_BEGIN(mCamscopeName);
+    }
+
+    inline ~CamscopeTraceDbg() {
+        ATRACE_CAMSCOPE_END(mCamscopeName);
+    }
+
+    private:
+        const uint32_t mCamscopeName;
+};
 };
 
 extern volatile uint32_t gKpiDebugLevel;
diff --git a/msmcobalt/QCamera2/util/camscope_packet_type.cpp b/msmcobalt/QCamera2/util/camscope_packet_type.cpp
new file mode 100644
index 0000000..a70a6f8
--- /dev/null
+++ b/msmcobalt/QCamera2/util/camscope_packet_type.cpp
@@ -0,0 +1,474 @@
+/* Copyright (c) 2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#include "camscope_packet_type.h"
+#include "QCameraTrace.h"
+
+#if defined(__linux__) && !defined(__ANDROID__)
+#include <unistd.h>
+#include <sys/syscall.h>
+#endif
+
+const char * camscope_atrace_names[CAMSCOPE_EVENT_NAME_SIZE] = {
+    "Mct_Sof",
+    "Mct_super_params",
+    "Mct_special_event",
+    "Mct_process_bus_msg",
+    "Camera:AFD",
+    "Camera:ASD",
+    "Camera:AEC",
+    "Camera:AWB",
+    "Camera:AF",
+    "CPP",
+    "CPP_Capture",
+    "CPP_clock_request",
+    "CPP_Holding_Time",
+    "CPP_Hardware_On",
+    "Snapshot",
+    "ISP_Hardware_Update",
+    "JPEG",
+    "FaceProc",
+    "Sensor_process_event",
+    "FD_num_faces_detected",
+    "Camera:alloc",
+    "iface:streamon_fwd",
+    "iface:streamon_to_thread",
+    "iface:streamoff_fwd",
+    "iface:streamoff_to_thread",
+    "iface:config_ISP",
+    "iface:hw_config",
+    "iface:create_axi_hw",
+    "iface:config_axi_hw",
+    "iface:streamon",
+    "iface:streamoff",
+    "AF_START",
+    "AF_SET",
+    "Camera:IS",
+    "ISP:streamon",
+    "ISP:streamoff",
+    "ISP:set_Strm_config",
+    "VFE_HW_UPDATE",
+    "ISP:streamon_fwd",
+    "SENSOR_SD_OPEN",
+    "SENSOR_START_SESSION",
+    "SENSOR_SET_RESOLUTION",
+    "SENSOR_SET_STREAM_CONFIG",
+    "SENSOR_CONFIG_PDAF",
+    "SENSOR_LOAD_CHROMATIX",
+    "SENSOR_START_STREAM",
+    "SENSOR_SET_FPS",
+    "SENSOR_STREAMOFF",
+    "Camera:WNR",
+    "Camera:WNR:memcpy",
+    "PPROC_streamoff",
+    "CPP:Streamon",
+    "Camera:CAC",
+    "CPP_create_hw_frame",
+    "CPP_set_Strm_config",
+    "Mct_start_session",
+    "Mct_stop_session",
+    "IMG:streamon",
+    "MCT:create_buf",
+    "start_preview",
+    "stop_preview",
+    "take_picture",
+    "close_camera_device",
+    "openCamera",
+    "startPreview",
+    "stopPreview",
+    "capture_channel_cb_routine",
+    "preview_stream_cb_routine",
+    "SNAPSHOT",
+    "getStreamBufs",
+    "openCamera",
+    "closeCamera",
+    "flush",
+    "zsl_channel_cb",
+    "postproc_channel_cb_routine",
+    "synchronous_stream_cb_routine",
+    "nodisplay_preview_stream_cb_routine",
+    "rdi_mode_stream_cb_routine",
+    "postview_stream_cb_routine",
+    "video_stream_cb_routine",
+    "snapshot_channel_cb_routine",
+    "raw_stream_cb_routine",
+    "raw_channel_cb_routine",
+    "preview_raw_stream_cb_routine",
+    "snapshot_raw_stream_cb_routine",
+    "metadata_stream_cb_routine",
+    "reprocess_stream_cb_routine",
+    "callback_stream_cb_routine",
+    "set_preview_window",
+    "set_CallBacks",
+    "enable_msg_type",
+    "disable_msg_type",
+    "msg_type_enabled",
+    "prepare_preview",
+    "preview_enabled",
+    "restart_start_preview",
+    "restart_stop_preview",
+    "pre_start_recording",
+    "start_recording",
+    "stop_recording",
+    "recording_enabled",
+    "release_recording_frame",
+    "cancel_auto_focus",
+    "pre_take_picture",
+    "cancel_picture",
+    "set_parameters",
+    "stop_after_set_params",
+    "commit_params",
+    "restart_after_set_params",
+    "get_parameters",
+    "put_parameters",
+    "send_command",
+    "send_command_restart",
+    "release",
+    "register_face_image",
+    "prepare_snapshot",
+    "QCamera2HardwareInterface",
+    "initCapabilities",
+    "getCapabilities",
+    "preparePreview",
+    "prepareHardwareForSnapshot",
+    "initialize",
+    "configureStreams",
+    "configureStreamsPerfLocked",
+    "handleBatchMetadata",
+    "handleMetadataWithLock",
+    "handleInputBufferWithLock",
+    "handleBufferWithLock",
+    "processCaptureRequest",
+    "flushPerf",
+    "getCamInfo",
+    "dynamicUpdateMetaStreamInfo",
+    "start",
+    "stop",
+    "flush",
+    "streamCbRoutine",
+    "registerBuffer",
+    "reprocessCbRoutine",
+    "initialize",
+    "request",
+    "initialize",
+    "streamCbRoutine",
+    "initialize",
+    "streamCbRoutine",
+    "jpegEvtHandle",
+    "request",
+    "dataNotifyCB",
+    "streamCbRoutine",
+    "registerBuffer",
+    "start",
+    "stop",
+    "init",
+    "initJpeg",
+    "releaseJpegJobData",
+    "releasePPJobData",
+    "encodeData"
+};
+
+/*===========================================================================
+ * FUNCTION       : get_thread_id
+ *
+ * DESCRIPTION    : helper function to get the current thread ID
+ *
+ * PARAMETERS     : N/A
+ *
+ * RETURN         : the thread ID
+ *==========================================================================*/
+ pid_t get_thread_id() {
+#if defined(__linux__) && !defined(__ANDROID__)
+    return syscall(__NR_gettid);
+#else
+    return gettid();
+#endif
+}
+
+/*===========================================================================
+ * FUNCTION       : fill_camscope_base
+ *
+ * DESCRIPTION    : helper function to set the struct's data with the given
+ *                  parameters
+ *
+ * PARAMETERS     :
+ *   @scope_struct: struct to fill out
+ *   @packet_type : packet_type data value to set
+ *   @size        : size data value to set
+ *
+ * RETURN         : void
+ *==========================================================================*/
+void fill_camscope_base(camscope_base *scope_struct, uint32_t packet_type,
+                        uint32_t size) {
+    scope_struct->packet_type = packet_type;
+    scope_struct->size = size;
+}
+
+/*===========================================================================
+ * FUNCTION       : fill_camscope_sw_base
+ *
+ * DESCRIPTION    : helper function to set the struct's data with the given
+ *                  parameters
+ *
+ * PARAMETERS     :
+ *   @scope_struct: struct to fill out
+ *   @packet_type : packet_type data value to set
+ *   @size        : size data value to set
+ *   @timestamp   : timestamp value to store
+ *   @thread_id   : identifier of where the packet originates from
+ *   @event_name  : name of the event to store
+ *
+ * RETURN         : void
+ *==========================================================================*/
+void fill_camscope_sw_base(camscope_sw_base *scope_struct,
+                           uint32_t packet_type, uint32_t size,
+                           struct timeval timestamp,
+                           int32_t thread_id, uint32_t event_name) {
+    fill_camscope_base(&(scope_struct->base), packet_type, size);
+    scope_struct->timestamp = timestamp;
+    scope_struct->thread_id = thread_id;
+    scope_struct->event_name = event_name;
+}
+
+/*===========================================================================
+ * FUNCTION       : fill_camscope_timing
+ *
+ * DESCRIPTION    : helper function to set the struct's data with the given
+ *                  parameters
+ *
+ * PARAMETERS     :
+ *   @scope_struct: struct to fill out
+ *   @packet_type : packet_type data value to set
+ *   @size        : size data value to set
+ *   @timestamp   : timestamp value to store
+ *   @thread_id   : identifier of where the packet originates from
+ *   @event_name  : name of the event to store
+ *   @frame_id    : frame identifier of which frame the packet originates from
+ *
+ * RETURN         : void
+ *==========================================================================*/
+void fill_camscope_timing(camscope_timing *scope_struct, uint32_t packet_type,
+                          uint32_t size, struct timeval timestamp,
+                          int32_t thread_id, uint32_t event_name,
+                          uint32_t frame_id) {
+    fill_camscope_sw_base(&(scope_struct->sw_base), packet_type, size,
+                          timestamp, thread_id, event_name);
+    scope_struct->frame_id = frame_id;
+}
+
+/*===========================================================================
+ * FUNCTION        : fill_camscope_in_out_timing
+ *
+ * DESCRIPTION     : helper function to set the struct's data with the given
+ *                   parameters
+ *
+ * PARAMETERS      :
+ *   @scope_struct : struct to fill out
+ *   @packet_type  : packet_type data value to set
+ *   @size         : size data value to set
+ *   @timestamp    : timestamp value to store
+ *   @thread_id    : identifier of where the packet originates from
+ *   @event_name   : name of the event to store
+ *   @in_timestamp : timestamp of when start of event occurred
+ *   @out_timestamp: timestamp of when end of event occurred
+ *   @frame_id     : frame identifier of which frame the packet
+ *                   originates from
+ *
+ * RETURN          : void
+ *==========================================================================*/
+void fill_camscope_in_out_timing(camscope_in_out_timing *scope_struct,
+                                 uint32_t packet_type, uint32_t size,
+                                 struct timeval timestamp,
+                                 int32_t thread_id, uint32_t event_name,
+                                 struct timeval in_timestamp,
+                                 struct timeval out_timestamp,
+                                 uint32_t frame_id) {
+    fill_camscope_sw_base(&(scope_struct->sw_base), packet_type, size,
+                          timestamp, thread_id, event_name);
+    scope_struct->in_timestamp = in_timestamp;
+    scope_struct->out_timestamp = out_timestamp;
+    scope_struct->frame_id = frame_id;
+}
+
+/*===========================================================================
+ * FUNCTION               : camscope_base_log
+ *
+ * DESCRIPTION            : CameraScope Base logging function that stores
+ *                          the base amount of data for a camscope packet
+ *
+ * PARAMETERS             :
+ *   @camscope_section    : section of code where this log is being called
+ *   @camscope_enable_mask: Enable/Disable mask
+ *   @packet_type         : camscope packet_type
+ *
+ * RETURN                 : void
+ *==========================================================================*/
+void camscope_base_log(uint32_t camscope_section,
+                       uint32_t camscope_enable_mask, uint32_t packet_type) {
+    if (kpi_camscope_frame_count != 0) {
+        if (kpi_camscope_flags & camscope_enable_mask) {
+            struct timeval timestamp;
+            gettimeofday(&timestamp, NULL);
+            camscope_mutex_lock((camscope_section_type)camscope_section);
+            camscope_base scope_struct;
+            uint32_t size = sizeof(scope_struct);
+            uint32_t total_size =
+              camscope_reserve((camscope_section_type)camscope_section, size);
+            if (size == total_size) {
+                fill_camscope_base(&scope_struct, packet_type, size);
+                camscope_store_data((camscope_section_type)camscope_section,
+                                    &scope_struct, size);
+            }
+            camscope_mutex_unlock((camscope_section_type)camscope_section);
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION               : camscope_sw_base_log
+ *
+ * DESCRIPTION            : CameraScope Software Base logging function that
+ *                          stores the minimum amount of data for tracing
+ *
+ * PARAMETERS             :
+ *   @camscope_section    : section of code where this log is being called
+ *   @camscope_enable_mask: enable/disable mask
+ *   @packet_type         : camscope packet_type
+ *   @event_name          : name of the event that the packet is storing
+ *
+ * RETURN                 : void
+ *==========================================================================*/
+void camscope_sw_base_log(uint32_t camscope_section,
+                          uint32_t camscope_enable_mask,
+                          uint32_t packet_type, uint32_t event_name) {
+    if (kpi_camscope_frame_count != 0) {
+        if (kpi_camscope_flags & camscope_enable_mask) {
+            struct timeval timestamp;
+            gettimeofday(&timestamp, NULL);
+            camscope_mutex_lock((camscope_section_type)camscope_section);
+            camscope_sw_base scope_struct;
+            uint32_t size = sizeof(scope_struct);
+            int32_t thread_id = (int32_t)get_thread_id();
+            uint32_t total_size =
+              camscope_reserve((camscope_section_type)camscope_section, size);
+            if (size == total_size) {
+                fill_camscope_sw_base(&scope_struct, packet_type, size,
+                                      timestamp, thread_id, event_name);
+                camscope_store_data((camscope_section_type)camscope_section,
+                                    &scope_struct, size);
+            }
+            camscope_mutex_unlock((camscope_section_type)camscope_section);
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION               : camscope_timing_log
+ *
+ * DESCRIPTION            : CameraScope Timing logging function that
+ *                          stores data used for the timing of events
+ *                          with respect to their frame id
+ *
+ * PARAMETERS             :
+ *   @camscope_section    : section of code where this log is being called
+ *   @camscope_enable_mask: enable/Disable mask
+ *   @packet_type         : camscope packet_type
+ *   @event_name          : name of the event that the packet is storing
+ *   @frame_id            : frame id that the packet is logging
+ *
+ * RETURN                 : void
+ *==========================================================================*/
+void camscope_timing_log(uint32_t camscope_section,
+                         uint32_t camscope_enable_mask, uint32_t packet_type,
+                         uint32_t event_name, uint32_t frame_id) {
+    if (kpi_camscope_frame_count != 0) {
+        if (kpi_camscope_flags & camscope_enable_mask) {
+            struct timeval timestamp;
+            gettimeofday(&timestamp, NULL);
+            camscope_mutex_lock((camscope_section_type)camscope_section);
+            camscope_timing scope_struct;
+            uint32_t size = sizeof(scope_struct);
+            int32_t thread_id = (int32_t)get_thread_id();
+            uint32_t total_size =
+              camscope_reserve((camscope_section_type)camscope_section, size);
+            if (size == total_size) {
+                fill_camscope_timing(&scope_struct, packet_type, size,
+                                     timestamp, thread_id, event_name,
+                                     frame_id);
+                camscope_store_data((camscope_section_type)camscope_section,
+                                    &scope_struct, size);
+            }
+            camscope_mutex_unlock((camscope_section_type)camscope_section);
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION               : camscope_in_out_timing_log
+ *
+ * DESCRIPTION            : CameraScope In-Out Timing logging function that
+ *                          stores given timestamps with the packet data
+ *
+ * PARAMETERS             :
+ *   @camscope_section    : section of code where this log is being called
+ *   @camscope_enable_mask: enable/Disable mask
+ *   @packet_type         : camscope packet_type
+ *   @event_name          : name of the event that the packet is storing
+ *   @frame_id            : frame id that the packet is logging
+ *
+ * RETURN                 : void
+ *==========================================================================*/
+void camscope_in_out_timing_log(uint32_t camscope_section,
+                                uint32_t camscope_enable_mask,
+                                uint32_t packet_type, uint32_t event_name,
+                                struct timeval in_timestamp,
+                                struct timeval out_timestamp,
+                                uint32_t frame_id) {
+    if (kpi_camscope_frame_count != 0) {
+        if (kpi_camscope_flags & camscope_enable_mask) {
+            struct timeval timestamp;
+            gettimeofday(&timestamp, NULL);
+            camscope_mutex_lock((camscope_section_type)camscope_section);
+            camscope_in_out_timing scope_struct;
+            uint32_t size = sizeof(scope_struct);
+            int32_t thread_id = (int32_t)get_thread_id();
+            uint32_t total_size =
+              camscope_reserve((camscope_section_type)camscope_section, size);
+            if (size == total_size) {
+                fill_camscope_in_out_timing(&scope_struct, packet_type, size,
+                                            timestamp, thread_id, event_name,
+                                            in_timestamp, out_timestamp,
+                                            frame_id);
+                camscope_store_data((camscope_section_type)camscope_section,
+                                    &scope_struct, size);
+            }
+            camscope_mutex_unlock((camscope_section_type)camscope_section);
+        }
+    }
+}