msmcobalt: import from LA.UM.5.7.R1.06.00.01.253.019

Bug: 31420007
Change-Id: I5117c7a1622f7025ee208dbb1b8e8f2fe001b0c2
diff --git a/Android.mk b/Android.mk
index d9e939e..a747a99 100644
--- a/Android.mk
+++ b/Android.mk
@@ -1,10 +1,13 @@
 # TODO:  Find a better way to separate build configs for ADP vs non-ADP devices
 ifneq ($(TARGET_BOARD_AUTO),true)
-  ifneq ($(filter msm8996,$(TARGET_BOARD_PLATFORM)),)
-    ifneq ($(strip $(USE_CAMERA_STUB)),true)
-      ifneq ($(BUILD_TINY_ANDROID),true)
+  ifneq ($(strip $(USE_CAMERA_STUB)),true)
+    ifneq ($(BUILD_TINY_ANDROID),true)
+      ifneq ($(filter msm8996,$(TARGET_BOARD_PLATFORM)),)
         include $(call all-subdir-makefiles)
       endif
+      ifneq ($(filter msmcobalt,$(TARGET_BOARD_PLATFORM)),)
+        include $(call all-makefiles-under,$(call my-dir)/msmcobalt)
+      endif
     endif
   endif
 endif
diff --git a/msmcobalt/CleanSpec.mk b/msmcobalt/CleanSpec.mk
new file mode 100644
index 0000000..bb86ad0
--- /dev/null
+++ b/msmcobalt/CleanSpec.mk
@@ -0,0 +1,47 @@
+# Copyright (C) 2007 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# If you don't need to do a full clean build but would like to touch
+# a file or delete some intermediate files, add a clean step to the end
+# of the list.  These steps will only be run once, if they haven't been
+# run before.
+#
+# E.g.:
+#     $(call add-clean-step, touch -c external/sqlite/sqlite3.h)
+#     $(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/STATIC_LIBRARIES/libz_intermediates)
+#
+# Always use "touch -c" and "rm -f" or "rm -rf" to gracefully deal with
+# files that are missing or have been moved.
+#
+# Use $(PRODUCT_OUT) to get to the "out/target/product/blah/" directory.
+# Use $(OUT_DIR) to refer to the "out" directory.
+#
+# If you need to re-do something that's already mentioned, just copy
+# the command and add it to the bottom of the list.  E.g., if a change
+# that you made last week required touching a file and a change you
+# made today requires touching the same file, just copy the old
+# touch step and add it to the end of the list.
+#
+# ************************************************
+# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
+# ************************************************
+
+# For example:
+#$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/APPS/AndroidTests_intermediates)
+#$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/core_intermediates)
+#$(call add-clean-step, find $(OUT_DIR) -type f -name "IGTalkSession*" -print0 | xargs -0 rm -f)
+#$(call add-clean-step, rm -rf $(PRODUCT_OUT)/data/*)
+
+$(call add-clean-step, find $(OUT_DIR) -name "camera.msm8960*" -print0 | xargs -0 rm -rf)
diff --git a/msmcobalt/MODULE_LICENSE_BSD b/msmcobalt/MODULE_LICENSE_BSD
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/msmcobalt/MODULE_LICENSE_BSD
diff --git a/msmcobalt/QCamera2/Android.mk b/msmcobalt/QCamera2/Android.mk
new file mode 100644
index 0000000..04a935d
--- /dev/null
+++ b/msmcobalt/QCamera2/Android.mk
@@ -0,0 +1,132 @@
+ifneq (,$(filter $(TARGET_ARCH), arm arm64))
+
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_COPY_HEADERS_TO := qcom/camera
+LOCAL_COPY_HEADERS := QCameraFormat.h
+
+LOCAL_SRC_FILES := \
+        util/QCameraBufferMaps.cpp \
+        util/QCameraCmdThread.cpp \
+        util/QCameraFlash.cpp \
+        util/QCameraPerf.cpp \
+        util/QCameraQueue.cpp \
+        util/QCameraCommon.cpp \
+        QCamera2Hal.cpp \
+        QCamera2Factory.cpp
+
+#HAL 3.0 source
+LOCAL_SRC_FILES += \
+        HAL3/QCamera3HWI.cpp \
+        HAL3/QCamera3Mem.cpp \
+        HAL3/QCamera3Stream.cpp \
+        HAL3/QCamera3Channel.cpp \
+        HAL3/QCamera3VendorTags.cpp \
+        HAL3/QCamera3PostProc.cpp \
+        HAL3/QCamera3CropRegionMapper.cpp \
+        HAL3/QCamera3StreamMem.cpp
+
+LOCAL_CFLAGS := -Wall -Wextra -Werror
+
+#HAL 1.0 source
+
+ifeq ($(TARGET_SUPPORT_HAL1),false)
+LOCAL_CFLAGS += -DQCAMERA_HAL3_SUPPORT
+else
+LOCAL_CFLAGS += -DQCAMERA_HAL1_SUPPORT
+LOCAL_SRC_FILES += \
+        HAL/QCamera2HWI.cpp \
+        HAL/QCameraMuxer.cpp \
+        HAL/QCameraMem.cpp \
+        HAL/QCameraStateMachine.cpp \
+        util/QCameraDisplay.cpp \
+        HAL/QCameraChannel.cpp \
+        HAL/QCameraStream.cpp \
+        HAL/QCameraPostProc.cpp \
+        HAL/QCamera2HWICallbacks.cpp \
+        HAL/QCameraParameters.cpp \
+        HAL/QCameraParametersIntf.cpp \
+        HAL/QCameraThermalAdapter.cpp
+endif
+
+# System header file path prefix
+LOCAL_CFLAGS += -DSYSTEM_HEADER_PREFIX=sys
+
+LOCAL_CFLAGS += -DHAS_MULTIMEDIA_HINTS -D_ANDROID
+
+ifeq ($(TARGET_USES_AOSP),true)
+LOCAL_CFLAGS += -DVANILLA_HAL
+endif
+
+ifeq (1,$(filter 1,$(shell echo "$$(( $(PLATFORM_SDK_VERSION) <= 23 ))" )))
+LOCAL_CFLAGS += -DUSE_M_AOSP
+endif
+
+#use media extension
+ifeq ($(TARGET_USES_MEDIA_EXTENSIONS), true)
+LOCAL_CFLAGS += -DUSE_MEDIA_EXTENSIONS
+endif
+
+LOCAL_CFLAGS += -std=c++11 -std=gnu++0x
+#HAL 1.0 Flags
+LOCAL_CFLAGS += -DDEFAULT_DENOISE_MODE_ON -DHAL3 -DQCAMERA_REDEFINE_LOG
+
+LOCAL_C_INCLUDES := \
+        $(LOCAL_PATH)/../mm-image-codec/qexif \
+        $(LOCAL_PATH)/../mm-image-codec/qomx_core \
+        $(LOCAL_PATH)/include \
+        $(LOCAL_PATH)/stack/common \
+        $(LOCAL_PATH)/stack/mm-camera-interface/inc \
+        $(LOCAL_PATH)/util \
+        $(LOCAL_PATH)/HAL3 \
+        hardware/libhardware/include/hardware \
+        hardware/qcom/media/libstagefrighthw \
+        hardware/qcom/media/mm-core/inc \
+        system/core/include/cutils \
+        system/core/include/system \
+        system/media/camera/include/system
+
+#HAL 1.0 Include paths
+LOCAL_C_INCLUDES += \
+        hardware/qcom/camera/QCamera2/HAL
+
+ifeq ($(TARGET_COMPILE_WITH_MSM_KERNEL),true)
+LOCAL_C_INCLUDES += $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr/include
+LOCAL_ADDITIONAL_DEPENDENCIES := $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr
+endif
+ifeq ($(TARGET_TS_MAKEUP),true)
+LOCAL_CFLAGS += -DTARGET_TS_MAKEUP
+LOCAL_C_INCLUDES += $(LOCAL_PATH)/HAL/tsMakeuplib/include
+endif
+ifneq (,$(filter msm8974 msm8916 msm8226 msm8610 msm8916 apq8084 msm8084 msm8994 msm8992 msm8952 msm8937 msm8953 msm8996 msmcobalt msmfalcon, $(TARGET_BOARD_PLATFORM)))
+    LOCAL_CFLAGS += -DVENUS_PRESENT
+endif
+
+ifneq (,$(filter msm8996 msmcobalt msmfalcon,$(TARGET_BOARD_PLATFORM)))
+    LOCAL_CFLAGS += -DUBWC_PRESENT
+endif
+
+#LOCAL_STATIC_LIBRARIES := libqcamera2_util
+LOCAL_C_INCLUDES += \
+        $(TARGET_OUT_HEADERS)/qcom/display
+LOCAL_C_INCLUDES += \
+        hardware/qcom/display/libqservice
+LOCAL_SHARED_LIBRARIES := libcamera_client liblog libhardware libutils libcutils libdl libsync libgui
+LOCAL_SHARED_LIBRARIES += libmmcamera_interface libmmjpeg_interface libui libcamera_metadata
+LOCAL_SHARED_LIBRARIES += libqdMetaData libqservice libbinder
+LOCAL_SHARED_LIBRARIES += libcutils libdl
+ifeq ($(TARGET_TS_MAKEUP),true)
+LOCAL_SHARED_LIBRARIES += libts_face_beautify_hal libts_detected_face_hal
+endif
+
+LOCAL_MODULE_RELATIVE_PATH := hw
+LOCAL_MODULE := camera.$(TARGET_BOARD_PLATFORM)
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_32_BIT_ONLY := $(BOARD_QTI_CAMERA_32BIT_ONLY)
+include $(BUILD_SHARED_LIBRARY)
+
+include $(call first-makefiles-under,$(LOCAL_PATH))
+endif
diff --git a/msmcobalt/QCamera2/HAL/QCamera2HWI.cpp b/msmcobalt/QCamera2/HAL/QCamera2HWI.cpp
new file mode 100644
index 0000000..3843cf5
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/QCamera2HWI.cpp
@@ -0,0 +1,10402 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCamera2HWI"
+
+// To remove
+#include <cutils/properties.h>
+
+// System definitions
+#include <utils/Errors.h>
+#include <dlfcn.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include "gralloc_priv.h"
+#include "native_handle.h"
+
+// Camera definitions
+#include "android/QCamera2External.h"
+#include "QCamera2HWI.h"
+#include "QCameraBufferMaps.h"
+#include "QCameraFlash.h"
+#include "QCameraTrace.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+#define MAP_TO_DRIVER_COORDINATE(val, base, scale, offset) \
+    ((int32_t)val * (int32_t)scale / (int32_t)base + (int32_t)offset)
+#define CAMERA_MIN_STREAMING_BUFFERS     3
+#define EXTRA_ZSL_PREVIEW_STREAM_BUF     2
+#define CAMERA_MIN_JPEG_ENCODING_BUFFERS 2
+#define CAMERA_MIN_VIDEO_BUFFERS         9
+#define CAMERA_MIN_CALLBACK_BUFFERS      5
+#define CAMERA_LONGSHOT_STAGES           4
+#define CAMERA_MIN_CAMERA_BATCH_BUFFERS  6
+#define CAMERA_ISP_PING_PONG_BUFFERS     2
+#define MIN_UNDEQUEUED_BUFFERS           1 // This is required if preview window is not set
+
+#define HDR_CONFIDENCE_THRESHOLD 0.4
+
+#define CAMERA_OPEN_PERF_TIME_OUT 500 // 500 milliseconds
+
+// Very long wait, just to be sure we don't deadlock
+#define CAMERA_DEFERRED_THREAD_TIMEOUT 5000000000 // 5 seconds
+#define CAMERA_DEFERRED_MAP_BUF_TIMEOUT 2000000000 // 2 seconds
+#define CAMERA_MIN_METADATA_BUFFERS 10 // Need at least 10 for ZSL snapshot
+#define CAMERA_INITIAL_MAPPABLE_PREVIEW_BUFFERS 5
+#define CAMERA_MAX_PARAM_APPLY_DELAY 3
+
+namespace qcamera {
+
+extern cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
+extern pthread_mutex_t gCamLock;
+volatile uint32_t gCamHalLogLevel = 1;
+extern uint8_t gNumCameraSessions;
+uint32_t QCamera2HardwareInterface::sNextJobId = 1;
+
+camera_device_ops_t QCamera2HardwareInterface::mCameraOps = {
+    .set_preview_window =        QCamera2HardwareInterface::set_preview_window,
+    .set_callbacks =             QCamera2HardwareInterface::set_CallBacks,
+    .enable_msg_type =           QCamera2HardwareInterface::enable_msg_type,
+    .disable_msg_type =          QCamera2HardwareInterface::disable_msg_type,
+    .msg_type_enabled =          QCamera2HardwareInterface::msg_type_enabled,
+
+    .start_preview =             QCamera2HardwareInterface::start_preview,
+    .stop_preview =              QCamera2HardwareInterface::stop_preview,
+    .preview_enabled =           QCamera2HardwareInterface::preview_enabled,
+    .store_meta_data_in_buffers= QCamera2HardwareInterface::store_meta_data_in_buffers,
+
+    .start_recording =           QCamera2HardwareInterface::start_recording,
+    .stop_recording =            QCamera2HardwareInterface::stop_recording,
+    .recording_enabled =         QCamera2HardwareInterface::recording_enabled,
+    .release_recording_frame =   QCamera2HardwareInterface::release_recording_frame,
+
+    .auto_focus =                QCamera2HardwareInterface::auto_focus,
+    .cancel_auto_focus =         QCamera2HardwareInterface::cancel_auto_focus,
+
+    .take_picture =              QCamera2HardwareInterface::take_picture,
+    .cancel_picture =            QCamera2HardwareInterface::cancel_picture,
+
+    .set_parameters =            QCamera2HardwareInterface::set_parameters,
+    .get_parameters =            QCamera2HardwareInterface::get_parameters,
+    .put_parameters =            QCamera2HardwareInterface::put_parameters,
+    .send_command =              QCamera2HardwareInterface::send_command,
+
+    .release =                   QCamera2HardwareInterface::release,
+    .dump =                      QCamera2HardwareInterface::dump,
+};
+
+/*===========================================================================
+ * FUNCTION   : set_preview_window
+ *
+ * DESCRIPTION: set preview window.
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @window  : window ops table
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::set_preview_window(struct camera_device *device,
+        struct preview_stream_ops *window)
+{
+    ATRACE_CALL();
+    int rc = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    LOGD("E camera id %d window = %p", hw->getCameraId(), window);
+
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    rc = hw->processAPI(QCAMERA_SM_EVT_SET_PREVIEW_WINDOW, (void *)window);
+    if (rc == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_SET_PREVIEW_WINDOW, &apiResult);
+        rc = apiResult.status;
+    }
+    hw->unlockAPI();
+    LOGD("X camera id %d", hw->getCameraId());
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : set_CallBacks
+ *
+ * DESCRIPTION: set callbacks for notify and data
+ *
+ * PARAMETERS :
+ *   @device     : ptr to camera device struct
+ *   @notify_cb  : notify cb
+ *   @data_cb    : data cb
+ *   @data_cb_timestamp  : video data cd with timestamp
+ *   @get_memory : ops table for request gralloc memory
+ *   @user       : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::set_CallBacks(struct camera_device *device,
+        camera_notify_callback notify_cb,
+        camera_data_callback data_cb,
+        camera_data_timestamp_callback data_cb_timestamp,
+        camera_request_memory get_memory,
+        void *user)
+{
+    ATRACE_CALL();
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return;
+    }
+    LOGD("E camera id %d", hw->getCameraId());
+
+    qcamera_sm_evt_setcb_payload_t payload;
+    payload.notify_cb = notify_cb;
+    payload.data_cb = data_cb;
+    payload.data_cb_timestamp = data_cb_timestamp;
+    payload.get_memory = get_memory;
+    payload.user = user;
+
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    int32_t rc = hw->processAPI(QCAMERA_SM_EVT_SET_CALLBACKS, (void *)&payload);
+    if (rc == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_SET_CALLBACKS, &apiResult);
+    }
+    hw->unlockAPI();
+    LOGD("X camera id %d", hw->getCameraId());
+
+}
+
+/*===========================================================================
+ * FUNCTION   : enable_msg_type
+ *
+ * DESCRIPTION: enable certain msg type
+ *
+ * PARAMETERS :
+ *   @device     : ptr to camera device struct
+ *   @msg_type   : msg type mask
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::enable_msg_type(struct camera_device *device, int32_t msg_type)
+{
+    ATRACE_CALL();
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return;
+    }
+    LOGD("E camera id %d", hw->getCameraId());
+
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    int32_t rc = hw->processAPI(QCAMERA_SM_EVT_ENABLE_MSG_TYPE, (void *)&msg_type);
+    if (rc == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_ENABLE_MSG_TYPE, &apiResult);
+    }
+    hw->unlockAPI();
+    LOGD("X camera id %d", hw->getCameraId());
+
+}
+
+/*===========================================================================
+ * FUNCTION   : disable_msg_type
+ *
+ * DESCRIPTION: disable certain msg type
+ *
+ * PARAMETERS :
+ *   @device     : ptr to camera device struct
+ *   @msg_type   : msg type mask
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::disable_msg_type(struct camera_device *device, int32_t msg_type)
+{
+    ATRACE_CALL();
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return;
+    }
+    LOGD("E camera id %d", hw->getCameraId());
+
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    int32_t rc = hw->processAPI(QCAMERA_SM_EVT_DISABLE_MSG_TYPE, (void *)&msg_type);
+    if (rc == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_DISABLE_MSG_TYPE, &apiResult);
+    }
+    hw->unlockAPI();
+    LOGD("X camera id %d", hw->getCameraId());
+
+}
+
+/*===========================================================================
+ * FUNCTION   : msg_type_enabled
+ *
+ * DESCRIPTION: if certain msg type is enabled
+ *
+ * PARAMETERS :
+ *   @device     : ptr to camera device struct
+ *   @msg_type   : msg type mask
+ *
+ * RETURN     : 1 -- enabled
+ *              0 -- not enabled
+ *==========================================================================*/
+int QCamera2HardwareInterface::msg_type_enabled(struct camera_device *device, int32_t msg_type)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    LOGD("E camera id %d", hw->getCameraId());
+
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_MSG_TYPE_ENABLED, (void *)&msg_type);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_MSG_TYPE_ENABLED, &apiResult);
+        ret = apiResult.enabled;
+    }
+    hw->unlockAPI();
+    LOGD("X camera id %d", hw->getCameraId());
+
+   return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : prepare_preview
+ *
+ * DESCRIPTION: prepare preview
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::prepare_preview(struct camera_device *device)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    LOGH("[KPI Perf]: E PROFILE_PREPARE_PREVIEW camera id %d",
+             hw->getCameraId());
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    qcamera_sm_evt_enum_t evt = QCAMERA_SM_EVT_PREPARE_PREVIEW;
+    ret = hw->processAPI(evt, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(evt, &apiResult);
+        ret = apiResult.status;
+    }
+    hw->unlockAPI();
+    LOGH("[KPI Perf]: X");
+    return ret;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : start_preview
+ *
+ * DESCRIPTION: start preview
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::start_preview(struct camera_device *device)
+{
+    KPI_ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    LOGI("[KPI Perf]: E PROFILE_START_PREVIEW camera id %d",
+             hw->getCameraId());
+
+    // Release the timed perf lock acquired in openCamera
+    hw->m_perfLock.lock_rel_timed();
+
+    hw->m_perfLock.lock_acq();
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    qcamera_sm_evt_enum_t evt = QCAMERA_SM_EVT_START_PREVIEW;
+    if (hw->isNoDisplayMode()) {
+        evt = QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW;
+    }
+    ret = hw->processAPI(evt, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(evt, &apiResult);
+        ret = apiResult.status;
+    }
+    hw->unlockAPI();
+    hw->m_bPreviewStarted = true;
+    LOGI("[KPI Perf]: X ret = %d", ret);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop_preview
+ *
+ * DESCRIPTION: stop preview
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::stop_preview(struct camera_device *device)
+{
+    KPI_ATRACE_CALL();
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return;
+    }
+    LOGI("[KPI Perf]: E PROFILE_STOP_PREVIEW camera id %d",
+             hw->getCameraId());
+
+    // Disable power Hint for preview
+    hw->m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
+
+    hw->m_perfLock.lock_acq();
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    int32_t ret = hw->processAPI(QCAMERA_SM_EVT_STOP_PREVIEW, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_STOP_PREVIEW, &apiResult);
+    }
+    hw->unlockAPI();
+    LOGI("[KPI Perf]: X ret = %d", ret);
+}
+
+/*===========================================================================
+ * FUNCTION   : preview_enabled
+ *
+ * DESCRIPTION: if preview is running
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : 1 -- running
+ *              0 -- not running
+ *==========================================================================*/
+int QCamera2HardwareInterface::preview_enabled(struct camera_device *device)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    LOGD("E camera id %d", hw->getCameraId());
+
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_PREVIEW_ENABLED, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_PREVIEW_ENABLED, &apiResult);
+        ret = apiResult.enabled;
+    }
+
+    //if preview enabled, can enable preview callback send
+    if(apiResult.enabled) {
+        hw->m_stateMachine.setPreviewCallbackNeeded(true);
+    }
+    hw->unlockAPI();
+    LOGD("X camera id %d", hw->getCameraId());
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : store_meta_data_in_buffers
+ *
+ * DESCRIPTION: if need to store meta data in buffers for video frame
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @enable  : flag if enable
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::store_meta_data_in_buffers(
+                struct camera_device *device, int enable)
+{
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    LOGD("E camera id %d", hw->getCameraId());
+
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS, (void *)&enable);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS, &apiResult);
+        ret = apiResult.status;
+    }
+    hw->unlockAPI();
+    LOGD("X camera id %d", hw->getCameraId());
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : restart_start_preview
+ *
+ * DESCRIPTION: start preview as part of the restart preview
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::restart_start_preview(struct camera_device *device)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    LOGI("E camera id %d", hw->getCameraId());
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+
+    if (hw->getRelatedCamSyncInfo()->sync_control == CAM_SYNC_RELATED_SENSORS_ON) {
+        ret = hw->processAPI(QCAMERA_SM_EVT_RESTART_START_PREVIEW, NULL);
+        if (ret == NO_ERROR) {
+            hw->waitAPIResult(QCAMERA_SM_EVT_RESTART_START_PREVIEW, &apiResult);
+            ret = apiResult.status;
+        }
+    } else {
+        LOGE("This function is not supposed to be called in single-camera mode");
+        ret = INVALID_OPERATION;
+    }
+    // Preview restart done, update the mPreviewRestartNeeded flag to false.
+    hw->mPreviewRestartNeeded = false;
+    hw->unlockAPI();
+    LOGI("X camera id %d", hw->getCameraId());
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : restart_stop_preview
+ *
+ * DESCRIPTION: stop preview as part of the restart preview
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::restart_stop_preview(struct camera_device *device)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    LOGI("E camera id %d", hw->getCameraId());
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+
+    if (hw->getRelatedCamSyncInfo()->sync_control == CAM_SYNC_RELATED_SENSORS_ON) {
+        ret = hw->processAPI(QCAMERA_SM_EVT_RESTART_STOP_PREVIEW, NULL);
+        if (ret == NO_ERROR) {
+            hw->waitAPIResult(QCAMERA_SM_EVT_RESTART_STOP_PREVIEW, &apiResult);
+            ret = apiResult.status;
+        }
+    } else {
+        LOGE("This function is not supposed to be called in single-camera mode");
+        ret = INVALID_OPERATION;
+    }
+
+    hw->unlockAPI();
+    LOGI("X camera id %d", hw->getCameraId());
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : pre_start_recording
+ *
+ * DESCRIPTION: prepare for the start recording
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::pre_start_recording(struct camera_device *device)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    LOGH("[KPI Perf]: E PROFILE_PRE_START_RECORDING camera id %d",
+          hw->getCameraId());
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_PRE_START_RECORDING, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_PRE_START_RECORDING, &apiResult);
+        ret = apiResult.status;
+    }
+    hw->unlockAPI();
+    LOGH("[KPI Perf]: X");
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : start_recording
+ *
+ * DESCRIPTION: start recording
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::start_recording(struct camera_device *device)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    LOGI("[KPI Perf]: E PROFILE_START_RECORDING camera id %d",
+          hw->getCameraId());
+    // Give HWI control to call pre_start_recording in single camera mode.
+    // In dual-cam mode, this control belongs to muxer.
+    if (hw->getRelatedCamSyncInfo()->sync_control != CAM_SYNC_RELATED_SENSORS_ON) {
+        ret = pre_start_recording(device);
+        if (ret != NO_ERROR) {
+            LOGE("pre_start_recording failed with ret = %d", ret);
+            return ret;
+        }
+    }
+
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_START_RECORDING, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_START_RECORDING, &apiResult);
+        ret = apiResult.status;
+    }
+    hw->unlockAPI();
+    hw->m_bRecordStarted = true;
+    LOGI("[KPI Perf]: X ret = %d", ret);
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop_recording
+ *
+ * DESCRIPTION: stop recording
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::stop_recording(struct camera_device *device)
+{
+    ATRACE_CALL();
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return;
+    }
+    LOGI("[KPI Perf]: E PROFILE_STOP_RECORDING camera id %d",
+             hw->getCameraId());
+
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    int32_t ret = hw->processAPI(QCAMERA_SM_EVT_STOP_RECORDING, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_STOP_RECORDING, &apiResult);
+    }
+    hw->unlockAPI();
+    LOGI("[KPI Perf]: X ret = %d", ret);
+}
+
+/*===========================================================================
+ * FUNCTION   : recording_enabled
+ *
+ * DESCRIPTION: if recording is running
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : 1 -- running
+ *              0 -- not running
+ *==========================================================================*/
+int QCamera2HardwareInterface::recording_enabled(struct camera_device *device)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    LOGD("E camera id %d", hw->getCameraId());
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_RECORDING_ENABLED, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_RECORDING_ENABLED, &apiResult);
+        ret = apiResult.enabled;
+    }
+    hw->unlockAPI();
+    LOGD("X camera id %d", hw->getCameraId());
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : release_recording_frame
+ *
+ * DESCRIPTION: return recording frame back
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @opaque  : ptr to frame to be returned
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::release_recording_frame(
+            struct camera_device *device, const void *opaque)
+{
+    ATRACE_CALL();
+    int32_t ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return;
+    }
+    if (!opaque) {
+        LOGE("Error!! Frame info is NULL");
+        return;
+    }
+    LOGD("E camera id %d", hw->getCameraId());
+
+    //Close and delete duplicated native handle and FD's.
+    if ((hw->mVideoMem != NULL) && (hw->mStoreMetaDataInFrame)) {
+         ret = hw->mVideoMem->closeNativeHandle(opaque, TRUE);
+        if (ret != NO_ERROR) {
+            LOGE("Invalid video metadata");
+            return;
+        }
+    } else {
+        LOGW("Possible FD leak. Release recording called after stop");
+    }
+
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME, (void *)opaque);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME, &apiResult);
+    }
+    hw->unlockAPI();
+    LOGD("X camera id %d", hw->getCameraId());
+}
+
+/*===========================================================================
+ * FUNCTION   : auto_focus
+ *
+ * DESCRIPTION: start auto focus
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::auto_focus(struct camera_device *device)
+{
+    KPI_ATRACE_INT("Camera:AutoFocus", 1);
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    LOGH("[KPI Perf] : E PROFILE_AUTO_FOCUS camera id %d",
+             hw->getCameraId());
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_START_AUTO_FOCUS, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_START_AUTO_FOCUS, &apiResult);
+        ret = apiResult.status;
+    }
+    hw->unlockAPI();
+    LOGH("[KPI Perf] : X ret = %d", ret);
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : cancel_auto_focus
+ *
+ * DESCRIPTION: cancel auto focus
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::cancel_auto_focus(struct camera_device *device)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    LOGH("[KPI Perf] : E PROFILE_CANCEL_AUTO_FOCUS camera id %d",
+             hw->getCameraId());
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_STOP_AUTO_FOCUS, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_STOP_AUTO_FOCUS, &apiResult);
+        ret = apiResult.status;
+    }
+    hw->unlockAPI();
+    LOGH("[KPI Perf] : X ret = %d", ret);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : pre_take_picture
+ *
+ * DESCRIPTION: pre take picture, restart preview if necessary.
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::pre_take_picture(struct camera_device *device)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    LOGH("[KPI Perf]: E PROFILE_PRE_TAKE_PICTURE camera id %d",
+          hw->getCameraId());
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_PRE_TAKE_PICTURE, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_PRE_TAKE_PICTURE, &apiResult);
+        ret = apiResult.status;
+    }
+    hw->unlockAPI();
+    LOGH("[KPI Perf]: X");
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : take_picture
+ *
+ * DESCRIPTION: take picture
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::take_picture(struct camera_device *device)
+{
+    KPI_ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    LOGI("[KPI Perf]: E PROFILE_TAKE_PICTURE camera id %d",
+             hw->getCameraId());
+    if (!hw->mLongshotEnabled) {
+        hw->m_perfLock.lock_acq();
+    }
+    qcamera_api_result_t apiResult;
+
+   /** Added support for Retro-active Frames:
+     *  takePicture() is called before preparing Snapshot to indicate the
+     *  mm-camera-channel to pick up legacy frames even
+     *  before LED estimation is triggered.
+     */
+
+    LOGH("isLiveSnap %d, isZSL %d, isHDR %d longshot = %d",
+           hw->isLiveSnapshot(), hw->isZSLMode(), hw->isHDRMode(),
+           hw->isLongshotEnabled());
+
+    // Check for Retro-active Frames
+    if ((hw->mParameters.getNumOfRetroSnapshots() > 0) &&
+        !hw->isLiveSnapshot() && hw->isZSLMode() &&
+        !hw->isHDRMode() && !hw->isLongshotEnabled()) {
+        // Set Retro Picture Mode
+        hw->setRetroPicture(1);
+        hw->m_bLedAfAecLock = 0;
+        LOGL("Retro Enabled");
+
+        // Give HWI control to call pre_take_picture in single camera mode.
+        // In dual-cam mode, this control belongs to muxer.
+        if (hw->getRelatedCamSyncInfo()->sync_control != CAM_SYNC_RELATED_SENSORS_ON) {
+            ret = pre_take_picture(device);
+            if (ret != NO_ERROR) {
+                LOGE("pre_take_picture failed with ret = %d",ret);
+                return ret;
+            }
+        }
+
+        /* Call take Picture for total number of snapshots required.
+             This includes the number of retro frames and normal frames */
+        hw->lockAPI();
+        ret = hw->processAPI(QCAMERA_SM_EVT_TAKE_PICTURE, NULL);
+        if (ret == NO_ERROR) {
+          // Wait for retro frames, before calling prepare snapshot
+          LOGD("Wait for Retro frames to be done");
+          hw->waitAPIResult(QCAMERA_SM_EVT_TAKE_PICTURE, &apiResult);
+            ret = apiResult.status;
+        }
+        /* Unlock API since it is acquired in prepare snapshot seperately */
+        hw->unlockAPI();
+
+        /* Prepare snapshot in case LED needs to be flashed */
+        LOGD("Start Prepare Snapshot");
+        ret = hw->prepare_snapshot(device);
+    }
+    else {
+        hw->setRetroPicture(0);
+        // Check if prepare snapshot is done
+        if (!hw->mPrepSnapRun) {
+            // Ignore the status from prepare_snapshot
+            hw->prepare_snapshot(device);
+        }
+
+        // Give HWI control to call pre_take_picture in single camera mode.
+        // In dual-cam mode, this control belongs to muxer.
+        if (hw->getRelatedCamSyncInfo()->sync_control != CAM_SYNC_RELATED_SENSORS_ON) {
+            ret = pre_take_picture(device);
+            if (ret != NO_ERROR) {
+                LOGE("pre_take_picture failed with ret = %d",ret);
+                return ret;
+            }
+        }
+
+        // Regardless what the result value for prepare_snapshot,
+        // go ahead with capture anyway. Just like the way autofocus
+        // is handled in capture case
+        /* capture */
+        LOGL("Capturing normal frames");
+        hw->lockAPI();
+        ret = hw->processAPI(QCAMERA_SM_EVT_TAKE_PICTURE, NULL);
+        if (ret == NO_ERROR) {
+          hw->waitAPIResult(QCAMERA_SM_EVT_TAKE_PICTURE, &apiResult);
+            ret = apiResult.status;
+        }
+        hw->unlockAPI();
+        if (!hw->isLongshotEnabled()){
+            // For longshot mode, we prepare snapshot only once
+            hw->mPrepSnapRun = false;
+         }
+    }
+    LOGI("[KPI Perf]: X ret = %d", ret);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : cancel_picture
+ *
+ * DESCRIPTION: cancel current take picture request
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::cancel_picture(struct camera_device *device)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    LOGI("[KPI Perf]: E PROFILE_CANCEL_PICTURE camera id %d",
+             hw->getCameraId());
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_CANCEL_PICTURE, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_CANCEL_PICTURE, &apiResult);
+        ret = apiResult.status;
+    }
+    hw->unlockAPI();
+    LOGI("[KPI Perf]: X camera id %d ret = %d", hw->getCameraId(), ret);
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : set_parameters
+ *
+ * DESCRIPTION: set camera parameters
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @parms   : string of packed parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::set_parameters(struct camera_device *device,
+                                              const char *parms)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    LOGD("E camera id %d", hw->getCameraId());
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_SET_PARAMS, (void *)parms);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_SET_PARAMS, &apiResult);
+        ret = apiResult.status;
+    }
+
+    // Give HWI control to restart (if necessary) after set params
+    // in single camera mode. In dual-cam mode, this control belongs to muxer.
+    if (hw->getRelatedCamSyncInfo()->sync_control != CAM_SYNC_RELATED_SENSORS_ON) {
+        if ((ret == NO_ERROR) && hw->getNeedRestart()) {
+            LOGD("stopping after param change");
+            ret = hw->processAPI(QCAMERA_SM_EVT_SET_PARAMS_STOP, NULL);
+            if (ret == NO_ERROR) {
+                hw->waitAPIResult(QCAMERA_SM_EVT_SET_PARAMS_STOP, &apiResult);
+                ret = apiResult.status;
+            }
+        }
+
+        if (ret == NO_ERROR) {
+            LOGD("committing param change");
+            ret = hw->processAPI(QCAMERA_SM_EVT_SET_PARAMS_COMMIT, NULL);
+            if (ret == NO_ERROR) {
+                hw->waitAPIResult(QCAMERA_SM_EVT_SET_PARAMS_COMMIT, &apiResult);
+                ret = apiResult.status;
+            }
+        }
+
+        if ((ret == NO_ERROR) && hw->getNeedRestart()) {
+            LOGD("restarting after param change");
+            ret = hw->processAPI(QCAMERA_SM_EVT_SET_PARAMS_RESTART, NULL);
+            if (ret == NO_ERROR) {
+                hw->waitAPIResult(QCAMERA_SM_EVT_SET_PARAMS_RESTART, &apiResult);
+                ret = apiResult.status;
+            }
+        }
+    }
+
+    hw->unlockAPI();
+    LOGD("X camera id %d ret %d", hw->getCameraId(), ret);
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop_after_set_params
+ *
+ * DESCRIPTION: stop after a set param call, if necessary
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::stop_after_set_params(struct camera_device *device)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    LOGD("E camera id %d", hw->getCameraId());
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+
+    if (hw->getRelatedCamSyncInfo()->sync_control == CAM_SYNC_RELATED_SENSORS_ON) {
+        ret = hw->processAPI(QCAMERA_SM_EVT_SET_PARAMS_STOP, NULL);
+        if (ret == NO_ERROR) {
+            hw->waitAPIResult(QCAMERA_SM_EVT_SET_PARAMS_STOP, &apiResult);
+            ret = apiResult.status;
+        }
+    } else {
+        LOGE("is not supposed to be called in single-camera mode");
+        ret = INVALID_OPERATION;
+    }
+
+    hw->unlockAPI();
+    LOGD("X camera id %d", hw->getCameraId());
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : commit_params
+ *
+ * DESCRIPTION: commit after a set param call
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::commit_params(struct camera_device *device)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    LOGD("E camera id %d", hw->getCameraId());
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+
+    if (hw->getRelatedCamSyncInfo()->sync_control == CAM_SYNC_RELATED_SENSORS_ON) {
+        ret = hw->processAPI(QCAMERA_SM_EVT_SET_PARAMS_COMMIT, NULL);
+        if (ret == NO_ERROR) {
+            hw->waitAPIResult(QCAMERA_SM_EVT_SET_PARAMS_COMMIT, &apiResult);
+            ret = apiResult.status;
+        }
+    } else {
+        LOGE("is not supposed to be called in single-camera mode");
+        ret = INVALID_OPERATION;
+    }
+
+    hw->unlockAPI();
+    LOGD("X camera id %d", hw->getCameraId());
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : restart_after_set_params
+ *
+ * DESCRIPTION: restart after a set param call, if necessary
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::restart_after_set_params(struct camera_device *device)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    LOGD("E camera id %d", hw->getCameraId());
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+
+    if (hw->getRelatedCamSyncInfo()->sync_control == CAM_SYNC_RELATED_SENSORS_ON) {
+        ret = hw->processAPI(QCAMERA_SM_EVT_SET_PARAMS_RESTART, NULL);
+        if (ret == NO_ERROR) {
+            hw->waitAPIResult(QCAMERA_SM_EVT_SET_PARAMS_RESTART, &apiResult);
+            ret = apiResult.status;
+        }
+    } else {
+        LOGE("is not supposed to be called in single-camera mode");
+        ret = INVALID_OPERATION;
+    }
+
+    hw->unlockAPI();
+    LOGD("X camera id %d", hw->getCameraId());
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : get_parameters
+ *
+ * DESCRIPTION: query camera parameters
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : packed parameters in a string
+ *==========================================================================*/
+char* QCamera2HardwareInterface::get_parameters(struct camera_device *device)
+{
+    ATRACE_CALL();
+    char *ret = NULL;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return NULL;
+    }
+    LOGD("E camera id %d", hw->getCameraId());
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    int32_t rc = hw->processAPI(QCAMERA_SM_EVT_GET_PARAMS, NULL);
+    if (rc == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_GET_PARAMS, &apiResult);
+        ret = apiResult.params;
+    }
+    hw->unlockAPI();
+    LOGD("E camera id %d", hw->getCameraId());
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : put_parameters
+ *
+ * DESCRIPTION: return camera parameters string back to HAL
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @parm    : ptr to parameter string to be returned
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::put_parameters(struct camera_device *device,
+                                               char *parm)
+{
+    ATRACE_CALL();
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return;
+    }
+    LOGD("E camera id %d", hw->getCameraId());
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    int32_t ret = hw->processAPI(QCAMERA_SM_EVT_PUT_PARAMS, (void *)parm);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_PUT_PARAMS, &apiResult);
+    }
+    hw->unlockAPI();
+    LOGD("E camera id %d", hw->getCameraId());
+}
+
+/*===========================================================================
+ * FUNCTION   : send_command
+ *
+ * DESCRIPTION: command to be executed
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @cmd     : cmd to be executed
+ *   @arg1    : ptr to optional argument1
+ *   @arg2    : ptr to optional argument2
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::send_command(struct camera_device *device,
+                                            int32_t cmd,
+                                            int32_t arg1,
+                                            int32_t arg2)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    LOGD("E camera id %d", hw->getCameraId());
+
+    qcamera_sm_evt_command_payload_t payload;
+    memset(&payload, 0, sizeof(qcamera_sm_evt_command_payload_t));
+    payload.cmd = cmd;
+    payload.arg1 = arg1;
+    payload.arg2 = arg2;
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_SEND_COMMAND, (void *)&payload);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_SEND_COMMAND, &apiResult);
+        ret = apiResult.status;
+    }
+    hw->unlockAPI();
+    LOGD("E camera id %d", hw->getCameraId());
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : send_command_restart
+ *
+ * DESCRIPTION: restart if necessary after a send_command
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @cmd     : cmd to be executed
+ *   @arg1    : ptr to optional argument1
+ *   @arg2    : ptr to optional argument2
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::send_command_restart(struct camera_device *device,
+        int32_t cmd,
+        int32_t arg1,
+        int32_t arg2)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+            reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+
+    qcamera_sm_evt_command_payload_t payload;
+    memset(&payload, 0, sizeof(qcamera_sm_evt_command_payload_t));
+    payload.cmd = cmd;
+    payload.arg1 = arg1;
+    payload.arg2 = arg2;
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_SEND_COMMAND_RESTART, (void *)&payload);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_SEND_COMMAND_RESTART, &apiResult);
+        ret = apiResult.status;
+    }
+    hw->unlockAPI();
+    LOGD("E camera id %d", hw->getCameraId());
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : release
+ *
+ * DESCRIPTION: release camera resource
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::release(struct camera_device *device)
+{
+    ATRACE_CALL();
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return;
+    }
+    LOGD("E camera id %d", hw->getCameraId());
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    int32_t ret = hw->processAPI(QCAMERA_SM_EVT_RELEASE, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_RELEASE, &apiResult);
+    }
+    hw->unlockAPI();
+    LOGD("E camera id %d", hw->getCameraId());
+}
+
+/*===========================================================================
+ * FUNCTION   : dump
+ *
+ * DESCRIPTION: dump camera status
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @fd      : fd for status to be dumped to
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::dump(struct camera_device *device, int fd)
+{
+    int ret = NO_ERROR;
+
+    //Log level property is read when "adb shell dumpsys media.camera" is
+    //called so that the log level can be controlled without restarting
+    //media server
+    getLogLevel();
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    LOGD("E camera id %d", hw->getCameraId());
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_DUMP, (void *)&fd);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_DUMP, &apiResult);
+        ret = apiResult.status;
+    }
+    hw->unlockAPI();
+    LOGD("E camera id %d", hw->getCameraId());
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : close_camera_device
+ *
+ * DESCRIPTION: close camera device
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::close_camera_device(hw_device_t *hw_dev)
+{
+    KPI_ATRACE_CALL();
+    int ret = NO_ERROR;
+
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(
+            reinterpret_cast<camera_device_t *>(hw_dev)->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    LOGI("[KPI Perf]: E camera id %d", hw->getCameraId());
+    delete hw;
+    LOGI("[KPI Perf]: X");
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : register_face_image
+ *
+ * DESCRIPTION: register a face image into imaging lib for face authenticatio/
+ *              face recognition
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @img_ptr : ptr to image buffer
+ *   @config  : ptr to config about input image, i.e., format, dimension, and etc.
+ *
+ * RETURN     : >=0 unique ID of face registerd.
+ *              <0  failure.
+ *==========================================================================*/
+int QCamera2HardwareInterface::register_face_image(struct camera_device *device,
+                                                   void *img_ptr,
+                                                   cam_pp_offline_src_config_t *config)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    LOGD("E camera id %d", hw->getCameraId());
+    qcamera_sm_evt_reg_face_payload_t payload;
+    memset(&payload, 0, sizeof(qcamera_sm_evt_reg_face_payload_t));
+    payload.img_ptr = img_ptr;
+    payload.config = config;
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+    ret = hw->processAPI(QCAMERA_SM_EVT_REG_FACE_IMAGE, (void *)&payload);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_REG_FACE_IMAGE, &apiResult);
+        ret = apiResult.handle;
+    }
+    hw->unlockAPI();
+    LOGD("E camera id %d", hw->getCameraId());
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : prepare_snapshot
+ *
+ * DESCRIPTION: prepares hardware for snapshot
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::prepare_snapshot(struct camera_device *device)
+{
+    ATRACE_CALL();
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    if (hw->isLongshotEnabled() && hw->mPrepSnapRun == true) {
+        // For longshot mode, we prepare snapshot only once
+        LOGH("prepare snapshot only once ");
+        return NO_ERROR;
+    }
+    LOGH("[KPI Perf]: E PROFILE_PREPARE_SNAPSHOT camera id %d",
+             hw->getCameraId());
+    hw->lockAPI();
+    qcamera_api_result_t apiResult;
+
+    /* Prepare snapshot in case LED needs to be flashed */
+    if (hw->mFlashNeeded || hw->mParameters.isChromaFlashEnabled()) {
+        /* Prepare snapshot in case LED needs to be flashed */
+        ret = hw->processAPI(QCAMERA_SM_EVT_PREPARE_SNAPSHOT, NULL);
+        if (ret == NO_ERROR) {
+          hw->waitAPIResult(QCAMERA_SM_EVT_PREPARE_SNAPSHOT, &apiResult);
+            ret = apiResult.status;
+        }
+        hw->mPrepSnapRun = true;
+    }
+    hw->unlockAPI();
+    LOGH("[KPI Perf]: X, ret: %d", ret);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCamera2HardwareInterface
+ *
+ * DESCRIPTION: constructor of QCamera2HardwareInterface
+ *
+ * PARAMETERS :
+ *   @cameraId  : camera ID
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera2HardwareInterface::QCamera2HardwareInterface(uint32_t cameraId)
+    : mCameraId(cameraId),
+      mCameraHandle(NULL),
+      mCameraOpened(false),
+      m_bRelCamCalibValid(false),
+      mPreviewWindow(NULL),
+      mMsgEnabled(0),
+      mStoreMetaDataInFrame(0),
+      mJpegCb(NULL),
+      mCallbackCookie(NULL),
+      mJpegCallbackCookie(NULL),
+      m_bMpoEnabled(TRUE),
+      m_stateMachine(this),
+      m_smThreadActive(true),
+      m_postprocessor(this),
+      m_thermalAdapter(QCameraThermalAdapter::getInstance()),
+      m_cbNotifier(this),
+      m_bPreviewStarted(false),
+      m_bRecordStarted(false),
+      m_currentFocusState(CAM_AF_STATE_INACTIVE),
+      mDumpFrmCnt(0U),
+      mDumpSkipCnt(0U),
+      mThermalLevel(QCAMERA_THERMAL_NO_ADJUSTMENT),
+      mActiveAF(false),
+      m_HDRSceneEnabled(false),
+      mLongshotEnabled(false),
+      mLiveSnapshotThread(0),
+      mIntPicThread(0),
+      mFlashNeeded(false),
+      mDeviceRotation(0U),
+      mCaptureRotation(0U),
+      mJpegExifRotation(0U),
+      mUseJpegExifRotation(false),
+      mIs3ALocked(false),
+      mPrepSnapRun(false),
+      mZoomLevel(0),
+      mPreviewRestartNeeded(false),
+      mVFrameCount(0),
+      mVLastFrameCount(0),
+      mVLastFpsTime(0),
+      mVFps(0),
+      mPFrameCount(0),
+      mPLastFrameCount(0),
+      mPLastFpsTime(0),
+      mPFps(0),
+      mInstantAecFrameCount(0),
+      m_bIntJpegEvtPending(false),
+      m_bIntRawEvtPending(false),
+      mReprocJob(0),
+      mJpegJob(0),
+      mMetadataAllocJob(0),
+      mInitPProcJob(0),
+      mParamAllocJob(0),
+      mParamInitJob(0),
+      mOutputCount(0),
+      mInputCount(0),
+      mAdvancedCaptureConfigured(false),
+      mHDRBracketingEnabled(false),
+      mNumPreviewFaces(-1),
+      mJpegClientHandle(0),
+      mJpegHandleOwner(false),
+      mMetadataMem(NULL),
+      mVideoMem(NULL),
+      mCACDoneReceived(false),
+      m_bNeedRestart(false),
+      mBootToMonoTimestampOffset(0)
+{
+#ifdef TARGET_TS_MAKEUP
+    memset(&mFaceRect, -1, sizeof(mFaceRect));
+#endif
+    getLogLevel();
+    ATRACE_CALL();
+    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
+    mCameraDevice.common.version = HARDWARE_DEVICE_API_VERSION(1, 0);
+    mCameraDevice.common.close = close_camera_device;
+    mCameraDevice.ops = &mCameraOps;
+    mCameraDevice.priv = this;
+
+    pthread_mutex_init(&m_lock, NULL);
+    pthread_cond_init(&m_cond, NULL);
+
+    m_apiResultList = NULL;
+
+    pthread_mutex_init(&m_evtLock, NULL);
+    pthread_cond_init(&m_evtCond, NULL);
+    memset(&m_evtResult, 0, sizeof(qcamera_api_result_t));
+
+
+    pthread_mutex_init(&m_int_lock, NULL);
+    pthread_cond_init(&m_int_cond, NULL);
+
+    memset(m_channels, 0, sizeof(m_channels));
+
+    memset(&mExifParams, 0, sizeof(mm_jpeg_exif_params_t));
+
+    memset(m_BackendFileName, 0, QCAMERA_MAX_FILEPATH_LENGTH);
+
+    memset(mDefOngoingJobs, 0, sizeof(mDefOngoingJobs));
+    memset(&mJpegMetadata, 0, sizeof(mJpegMetadata));
+    memset(&mJpegHandle, 0, sizeof(mJpegHandle));
+    memset(&mJpegMpoHandle, 0, sizeof(mJpegMpoHandle));
+
+    mDeferredWorkThread.launch(deferredWorkRoutine, this);
+    mDeferredWorkThread.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC, FALSE, FALSE);
+    m_perfLock.lock_init();
+
+    pthread_mutex_init(&mGrallocLock, NULL);
+    mEnqueuedBuffers = 0;
+    mFrameSkipStart = 0;
+    mFrameSkipEnd = 0;
+    mLastPreviewFrameID = 0;
+
+    //Load and read GPU library.
+    lib_surface_utils = NULL;
+    LINK_get_surface_pixel_alignment = NULL;
+    mSurfaceStridePadding = CAM_PAD_TO_32;
+    lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
+    if (lib_surface_utils) {
+        *(void **)&LINK_get_surface_pixel_alignment =
+                dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
+         if (LINK_get_surface_pixel_alignment) {
+             mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
+         }
+         dlclose(lib_surface_utils);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera2HardwareInterface
+ *
+ * DESCRIPTION: destructor of QCamera2HardwareInterface
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera2HardwareInterface::~QCamera2HardwareInterface()
+{
+    LOGH("E");
+
+    mDeferredWorkThread.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, TRUE, TRUE);
+    mDeferredWorkThread.exit();
+
+    if (mMetadataMem != NULL) {
+        delete mMetadataMem;
+        mMetadataMem = NULL;
+    }
+
+    m_perfLock.lock_acq();
+    lockAPI();
+    m_smThreadActive = false;
+    unlockAPI();
+    m_stateMachine.releaseThread();
+    closeCamera();
+    m_perfLock.lock_rel();
+    m_perfLock.lock_deinit();
+    pthread_mutex_destroy(&m_lock);
+    pthread_cond_destroy(&m_cond);
+    pthread_mutex_destroy(&m_evtLock);
+    pthread_cond_destroy(&m_evtCond);
+    pthread_mutex_destroy(&m_int_lock);
+    pthread_cond_destroy(&m_int_cond);
+    pthread_mutex_destroy(&mGrallocLock);
+    LOGH("X");
+}
+
+/*===========================================================================
+ * FUNCTION   : deferPPInit
+ *
+ * DESCRIPTION: Queue postproc init task to deferred thread
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : uint32_t job id of pproc init job
+ *              0  -- failure
+ *==========================================================================*/
+uint32_t QCamera2HardwareInterface::deferPPInit()
+{
+    // init pproc
+    DeferWorkArgs args;
+    DeferPProcInitArgs pprocInitArgs;
+
+    memset(&args, 0, sizeof(DeferWorkArgs));
+    memset(&pprocInitArgs, 0, sizeof(DeferPProcInitArgs));
+
+    pprocInitArgs.jpeg_cb = jpegEvtHandle;
+    pprocInitArgs.user_data = this;
+    args.pprocInitArgs = pprocInitArgs;
+
+    return queueDeferredWork(CMD_DEF_PPROC_INIT,
+            args);
+}
+
+/*===========================================================================
+ * FUNCTION   : openCamera
+ *
+ * DESCRIPTION: open camera
+ *
+ * PARAMETERS :
+ *   @hw_device  : double ptr for camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::openCamera(struct hw_device_t **hw_device)
+{
+    KPI_ATRACE_CALL();
+    int rc = NO_ERROR;
+    if (mCameraOpened) {
+        *hw_device = NULL;
+        LOGE("Permission Denied");
+        return PERMISSION_DENIED;
+    }
+    LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
+            mCameraId);
+    m_perfLock.lock_acq_timed(CAMERA_OPEN_PERF_TIME_OUT);
+    rc = openCamera();
+    if (rc == NO_ERROR){
+        *hw_device = &mCameraDevice.common;
+        if (m_thermalAdapter.init(this) != 0) {
+          LOGW("Init thermal adapter failed");
+        }
+    }
+    else
+        *hw_device = NULL;
+
+    LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
+            mCameraId, rc);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : openCamera
+ *
+ * DESCRIPTION: open camera
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::openCamera()
+{
+    int32_t rc = NO_ERROR;
+    char value[PROPERTY_VALUE_MAX];
+
+    if (mCameraHandle) {
+        LOGE("Failure: Camera already opened");
+        return ALREADY_EXISTS;
+    }
+
+    rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
+    if (rc < 0) {
+        LOGE("Failed to reserve flash for camera id: %d",
+                mCameraId);
+        return UNKNOWN_ERROR;
+    }
+
+    // alloc param buffer
+    DeferWorkArgs args;
+    memset(&args, 0, sizeof(args));
+    mParamAllocJob = queueDeferredWork(CMD_DEF_PARAM_ALLOC, args);
+    if (mParamAllocJob == 0) {
+        LOGE("Failed queueing PARAM_ALLOC job");
+        return -ENOMEM;
+    }
+
+    if (gCamCapability[mCameraId] != NULL) {
+        // allocate metadata buffers
+        DeferWorkArgs args;
+        DeferMetadataAllocArgs metadataAllocArgs;
+
+        memset(&args, 0, sizeof(args));
+        memset(&metadataAllocArgs, 0, sizeof(metadataAllocArgs));
+
+        uint32_t padding =
+                gCamCapability[mCameraId]->padding_info.plane_padding;
+        metadataAllocArgs.size = PAD_TO_SIZE(sizeof(metadata_buffer_t),
+                padding);
+        metadataAllocArgs.bufferCnt = CAMERA_MIN_METADATA_BUFFERS;
+        args.metadataAllocArgs = metadataAllocArgs;
+
+        mMetadataAllocJob = queueDeferredWork(CMD_DEF_METADATA_ALLOC, args);
+        if (mMetadataAllocJob == 0) {
+            LOGE("Failed to allocate metadata buffer");
+            rc = -ENOMEM;
+            goto error_exit1;
+        }
+
+        rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
+        if (rc) {
+            LOGE("camera_open failed. rc = %d, mCameraHandle = %p",
+                     rc, mCameraHandle);
+            goto error_exit2;
+        }
+
+        mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
+                camEvtHandle,
+                (void *) this);
+    } else {
+        LOGH("Capabilities not inited, initializing now.");
+
+        rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
+        if (rc) {
+            LOGE("camera_open failed. rc = %d, mCameraHandle = %p",
+                     rc, mCameraHandle);
+            goto error_exit2;
+        }
+
+        if(NO_ERROR != initCapabilities(mCameraId,mCameraHandle)) {
+            LOGE("initCapabilities failed.");
+            rc = UNKNOWN_ERROR;
+            goto error_exit3;
+        }
+
+        mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
+                camEvtHandle,
+                (void *) this);
+    }
+
+    // Init params in the background
+    // 1. It's safe to queue init job, even if alloc job is not yet complete.
+    // It will be queued to the same thread, so the alloc is guaranteed to
+    // finish first.
+    // 2. However, it is not safe to begin param init until after camera is
+    // open. That is why we wait until after camera open completes to schedule
+    // this task.
+    memset(&args, 0, sizeof(args));
+    mParamInitJob = queueDeferredWork(CMD_DEF_PARAM_INIT, args);
+    if (mParamInitJob == 0) {
+        LOGE("Failed queuing PARAM_INIT job");
+        rc = -ENOMEM;
+        goto error_exit3;
+    }
+
+    mCameraOpened = true;
+
+    //Notify display HAL that a camera session is active.
+    //But avoid calling the same during bootup because camera service might open/close
+    //cameras at boot time during its initialization and display service will also internally
+    //wait for camera service to initialize first while calling this display API, resulting in a
+    //deadlock situation. Since boot time camera open/close calls are made only to fetch
+    //capabilities, no need of this display bw optimization.
+    //Use "service.bootanim.exit" property to know boot status.
+    property_get("service.bootanim.exit", value, "0");
+    if (atoi(value) == 1) {
+        pthread_mutex_lock(&gCamLock);
+        if (gNumCameraSessions++ == 0) {
+            setCameraLaunchStatus(true);
+        }
+        pthread_mutex_unlock(&gCamLock);
+    }
+
+    // Setprop to decide the time source (whether boottime or monotonic).
+    // By default, use monotonic time.
+    property_get("persist.camera.time.monotonic", value, "1");
+    mBootToMonoTimestampOffset = 0;
+    if (atoi(value) == 1) {
+        // if monotonic is set, then need to use time in monotonic.
+        // So, Measure the clock offset between BOOTTIME and MONOTONIC
+        // The clock domain source for ISP is BOOTTIME and
+        // for Video/display is MONOTONIC
+        // The below offset is used to convert from clock domain of other subsystem
+        // (video/hardware composer) to that of camera. Assumption is that this
+        // offset won't change during the life cycle of the camera device. In other
+        // words, camera device shouldn't be open during CPU suspend.
+        mBootToMonoTimestampOffset = getBootToMonoTimeOffset();
+    }
+    LOGH("mBootToMonoTimestampOffset = %lld", mBootToMonoTimestampOffset);
+
+    return NO_ERROR;
+
+error_exit3:
+    if(mJpegClientHandle) {
+        deinitJpegHandle();
+    }
+    mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
+    mCameraHandle = NULL;
+error_exit2:
+    waitDeferredWork(mMetadataAllocJob);
+error_exit1:
+    waitDeferredWork(mParamAllocJob);
+    return rc;
+
+}
+
+/*===========================================================================
+ * FUNCTION   : bundleRelatedCameras
+ *
+ * DESCRIPTION: bundle cameras to enable syncing of cameras
+ *
+ * PARAMETERS :
+ *   @sync        :indicates whether syncing is On or Off
+ *   @sessionid  :session id for other camera session
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::bundleRelatedCameras(bool syncOn,
+            uint32_t sessionid)
+{
+    LOGD("bundleRelatedCameras sync %d with sessionid %d",
+            syncOn, sessionid);
+
+    int32_t rc = mParameters.bundleRelatedCameras(syncOn, sessionid);
+    if (rc != NO_ERROR) {
+        LOGE("bundleRelatedCameras failed %d", rc);
+        return rc;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCameraSessionId
+ *
+ * DESCRIPTION: gets the backend session Id of this HWI instance
+ *
+ * PARAMETERS :
+ *   @sessionid  : pointer to the output session id
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::getCameraSessionId(uint32_t* session_id)
+{
+    int32_t rc = NO_ERROR;
+
+    if(session_id != NULL) {
+        rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
+                session_id);
+        LOGD("Getting Camera Session Id %d", *session_id);
+    } else {
+        LOGE("Session Id is Null");
+        return UNKNOWN_ERROR;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : isFrameSyncEnabled
+ *
+ * DESCRIPTION: returns whether frame sync is enabled
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : bool indicating whether frame sync is enabled
+ *==========================================================================*/
+bool QCamera2HardwareInterface::isFrameSyncEnabled(void)
+{
+    return mParameters.isFrameSyncEnabled();
+}
+
+/*===========================================================================
+ * FUNCTION   : setFrameSyncEnabled
+ *
+ * DESCRIPTION: sets whether frame sync is enabled
+ *
+ * PARAMETERS :
+ *   @enable  : flag whether to enable or disable frame sync
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::setFrameSyncEnabled(bool enable)
+{
+    return mParameters.setFrameSyncEnabled(enable);
+}
+
+/*===========================================================================
+ * FUNCTION   : getRelatedCamSyncInfo
+ *
+ * DESCRIPTION:returns the related cam sync info for this HWI instance
+ *
+ * PARAMETERS :none
+ *
+ * RETURN     : const pointer to cam_sync_related_sensors_event_info_t
+ *==========================================================================*/
+const cam_sync_related_sensors_event_info_t*
+        QCamera2HardwareInterface::getRelatedCamSyncInfo(void)
+{
+    return mParameters.getRelatedCamSyncInfo();
+}
+
+/*===========================================================================
+ * FUNCTION   : setRelatedCamSyncInfo
+ *
+ * DESCRIPTION:sets the related cam sync info for this HWI instance
+ *
+ * PARAMETERS :
+ *   @info  : ptr to related cam info parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::setRelatedCamSyncInfo(
+        cam_sync_related_sensors_event_info_t* info)
+{
+    if(info) {
+        return mParameters.setRelatedCamSyncInfo(info);
+    } else {
+        return BAD_TYPE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getMpoComposition
+ *
+ * DESCRIPTION:function to retrieve whether Mpo composition should be enabled
+ *                    or not
+ *
+ * PARAMETERS :none
+ *
+ * RETURN     : bool indicates whether mpo composition is enabled or not
+ *==========================================================================*/
+bool QCamera2HardwareInterface::getMpoComposition(void)
+{
+    LOGH("MpoComposition:%d ", m_bMpoEnabled);
+    return m_bMpoEnabled;
+}
+
+/*===========================================================================
+ * FUNCTION   : setMpoComposition
+ *
+ * DESCRIPTION:set if Mpo composition should be enabled for this HWI instance
+ *
+ * PARAMETERS :
+ *   @enable  : indicates whether Mpo composition enabled or not
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::setMpoComposition(bool enable)
+{
+    // By default set Mpo composition to disable
+    m_bMpoEnabled = false;
+
+    // Enable Mpo composition only if
+    // 1) frame sync is ON between two cameras and
+    // 2) any advanced features are not enabled (AOST features) and
+    // 3) not in recording mode (for liveshot case)
+    // 4) flash is not needed
+    if ((getRelatedCamSyncInfo()->sync_control == CAM_SYNC_RELATED_SENSORS_ON) &&
+            !mParameters.isAdvCamFeaturesEnabled() &&
+            !mParameters.getRecordingHintValue() &&
+            !mFlashNeeded &&
+            !isLongshotEnabled()) {
+        m_bMpoEnabled = enable;
+        LOGH("MpoComposition:%d ", m_bMpoEnabled);
+        return NO_ERROR;
+    } else {
+        return BAD_TYPE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getRecordingHintValue
+ *
+ * DESCRIPTION:function to retrieve recording hint value
+ *
+ * PARAMETERS :none
+ *
+ * RETURN     : bool indicates whether recording hint is enabled or not
+ *==========================================================================*/
+bool QCamera2HardwareInterface::getRecordingHintValue(void)
+{
+    return mParameters.getRecordingHintValue();
+}
+
+/*===========================================================================
+ * FUNCTION   : setRecordingHintValue
+ *
+ * DESCRIPTION:set recording hint value
+ *
+ * PARAMETERS :
+ *   @enable  : video hint value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::setRecordingHintValue(int32_t value)
+{
+    return mParameters.updateRecordingHintValue(value);
+}
+
+/*===========================================================================
+ * FUNCTION   : closeCamera
+ *
+ * DESCRIPTION: close camera
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::closeCamera()
+{
+    int rc = NO_ERROR;
+    int i;
+    char value[PROPERTY_VALUE_MAX];
+    LOGI("E");
+    if (!mCameraOpened) {
+        return NO_ERROR;
+    }
+    LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
+             mCameraId);
+
+    // set open flag to false
+    mCameraOpened = false;
+
+    // Reset Stream config info
+    mParameters.setStreamConfigure(false, false, true);
+
+    // deinit Parameters
+    mParameters.deinit();
+
+    // exit notifier
+    m_cbNotifier.exit();
+
+    // stop and deinit postprocessor
+    waitDeferredWork(mReprocJob);
+    // Close the JPEG session
+    waitDeferredWork(mJpegJob);
+    m_postprocessor.stop();
+    deinitJpegHandle();
+    m_postprocessor.deinit();
+    mInitPProcJob = 0; // reset job id, so pproc can be reinited later
+
+    m_thermalAdapter.deinit();
+
+    // delete all channels if not already deleted
+    for (i = 0; i < QCAMERA_CH_TYPE_MAX; i++) {
+        if (m_channels[i] != NULL) {
+            m_channels[i]->stop();
+            delete m_channels[i];
+            m_channels[i] = NULL;
+        }
+    }
+
+    //free all pending api results here
+    if(m_apiResultList != NULL) {
+        api_result_list *apiResultList = m_apiResultList;
+        api_result_list *apiResultListNext;
+        while (apiResultList != NULL) {
+            apiResultListNext = apiResultList->next;
+            free(apiResultList);
+            apiResultList = apiResultListNext;
+        }
+    }
+
+    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
+    mCameraHandle = NULL;
+
+    //Notify display HAL that there is no active camera session
+    //but avoid calling the same during bootup. Refer to openCamera
+    //for more details.
+    property_get("service.bootanim.exit", value, "0");
+    if (atoi(value) == 1) {
+        pthread_mutex_lock(&gCamLock);
+        if (--gNumCameraSessions == 0) {
+            setCameraLaunchStatus(false);
+        }
+        pthread_mutex_unlock(&gCamLock);
+    }
+
+    if (mExifParams.debug_params) {
+        free(mExifParams.debug_params);
+        mExifParams.debug_params = NULL;
+    }
+
+    if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
+        LOGD("Failed to release flash for camera id: %d",
+                mCameraId);
+    }
+
+    LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
+         mCameraId, rc);
+
+    return rc;
+}
+
+#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
+
+/*===========================================================================
+ * FUNCTION   : initCapabilities
+ *
+ * DESCRIPTION: initialize camera capabilities in static data struct
+ *
+ * PARAMETERS :
+ *   @cameraId  : camera Id
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::initCapabilities(uint32_t cameraId,
+        mm_camera_vtbl_t *cameraHandle)
+{
+    ATRACE_CALL();
+    int rc = NO_ERROR;
+    QCameraHeapMemory *capabilityHeap = NULL;
+
+    /* Allocate memory for capability buffer */
+    capabilityHeap = new QCameraHeapMemory(QCAMERA_ION_USE_CACHE);
+    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), NON_SECURE);
+    if(rc != OK) {
+        LOGE("No memory for cappability");
+        goto allocate_failed;
+    }
+
+    /* Map memory for capability buffer */
+    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
+
+    cam_buf_map_type_list bufMapList;
+    rc = QCameraBufferMaps::makeSingletonBufMapList(
+            CAM_MAPPING_BUF_TYPE_CAPABILITY,
+            0 /*stream id*/, 0 /*buffer index*/, -1 /*plane index*/,
+            0 /*cookie*/, capabilityHeap->getFd(0), sizeof(cam_capability_t),
+            bufMapList, capabilityHeap->getPtr(0));
+
+    if (rc == NO_ERROR) {
+        rc = cameraHandle->ops->map_bufs(cameraHandle->camera_handle,
+                &bufMapList);
+    }
+
+    if(rc < 0) {
+        LOGE("failed to map capability buffer");
+        goto map_failed;
+    }
+
+    /* Query Capability */
+    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
+    if(rc < 0) {
+        LOGE("failed to query capability");
+        goto query_failed;
+    }
+    gCamCapability[cameraId] =
+            (cam_capability_t *)malloc(sizeof(cam_capability_t));
+
+    if (!gCamCapability[cameraId]) {
+        LOGE("out of memory");
+        goto query_failed;
+    }
+    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
+                                        sizeof(cam_capability_t));
+
+    int index;
+    for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
+        cam_analysis_info_t *p_analysis_info =
+                &gCamCapability[cameraId]->analysis_info[index];
+        p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
+        p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
+    }
+
+    rc = NO_ERROR;
+
+query_failed:
+    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
+                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
+map_failed:
+    capabilityHeap->deallocate();
+    delete capabilityHeap;
+allocate_failed:
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCapabilities
+ *
+ * DESCRIPTION: query camera capabilities
+ *
+ * PARAMETERS :
+ *   @cameraId  : camera Id
+ *   @info      : camera info struct to be filled in with camera capabilities
+ *
+ * RETURN     : int type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::getCapabilities(uint32_t cameraId,
+        struct camera_info *info, cam_sync_type_t *p_cam_type)
+{
+    ATRACE_CALL();
+    int rc = NO_ERROR;
+    struct  camera_info *p_info = NULL;
+    pthread_mutex_lock(&gCamLock);
+    p_info = get_cam_info(cameraId, p_cam_type);
+    p_info->device_version = CAMERA_DEVICE_API_VERSION_1_0;
+    p_info->static_camera_characteristics = NULL;
+    memcpy(info, p_info, sizeof (struct camera_info));
+    pthread_mutex_unlock(&gCamLock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCamHalCapabilities
+ *
+ * DESCRIPTION: get the HAL capabilities structure
+ *
+ * PARAMETERS :
+ *   @cameraId  : camera Id
+ *
+ * RETURN     : capability structure of respective camera
+ *
+ *==========================================================================*/
+cam_capability_t* QCamera2HardwareInterface::getCamHalCapabilities()
+{
+    return gCamCapability[mCameraId];
+}
+
+/*===========================================================================
+ * FUNCTION   : getBufNumRequired
+ *
+ * DESCRIPTION: return number of stream buffers needed for given stream type
+ *
+ * PARAMETERS :
+ *   @stream_type  : type of stream
+ *
+ * RETURN     : number of buffers needed
+ *==========================================================================*/
+uint8_t QCamera2HardwareInterface::getBufNumRequired(cam_stream_type_t stream_type)
+{
+    int bufferCnt = 0;
+    int minCaptureBuffers = mParameters.getNumOfSnapshots();
+    char value[PROPERTY_VALUE_MAX];
+    bool raw_yuv = false;
+    int persist_cnt = 0;
+
+    int zslQBuffers = mParameters.getZSLQueueDepth();
+
+    int minCircularBufNum = mParameters.getMaxUnmatchedFramesInQueue() +
+                            CAMERA_MIN_JPEG_ENCODING_BUFFERS;
+
+    int maxStreamBuf = minCaptureBuffers + mParameters.getMaxUnmatchedFramesInQueue() +
+                       mParameters.getNumOfExtraHDRInBufsIfNeeded() -
+                       mParameters.getNumOfExtraHDROutBufsIfNeeded() +
+                       mParameters.getNumOfExtraBuffersForImageProc() +
+                       EXTRA_ZSL_PREVIEW_STREAM_BUF;
+
+    int minUndequeCount = 0;
+    if (!isNoDisplayMode()) {
+        if(mPreviewWindow != NULL) {
+            if (mPreviewWindow->get_min_undequeued_buffer_count(mPreviewWindow,&minUndequeCount)
+                != 0) {
+                LOGW("get_min_undequeued_buffer_count  failed");
+                //TODO: hardcoded because MIN_UNDEQUEUED_BUFFERS not defined
+                //minUndequeCount = BufferQueue::MIN_UNDEQUEUED_BUFFERS;
+                minUndequeCount = MIN_UNDEQUEUED_BUFFERS;
+            }
+        } else {
+            //preview window might not be set at this point. So, query directly
+            //from BufferQueue implementation of gralloc buffers.
+            //minUndequeCount = BufferQueue::MIN_UNDEQUEUED_BUFFERS;
+            //hardcoded because MIN_UNDEQUEUED_BUFFERS not defined. REVISIT
+            minUndequeCount = MIN_UNDEQUEUED_BUFFERS;
+        }
+        if (minUndequeCount != MIN_UNDEQUEUED_BUFFERS) {
+            // minUndequeCount from valid preview window != hardcoded MIN_UNDEQUEUED_BUFFERS
+            // and so change the MACRO as per minUndequeCount
+            LOGW("WARNING : minUndequeCount(%d) != hardcoded value(%d)",
+                     minUndequeCount, MIN_UNDEQUEUED_BUFFERS);
+        }
+    }
+
+    LOGD("minCaptureBuffers = %d zslQBuffers = %d minCircularBufNum = %d"
+            "maxStreamBuf = %d minUndequeCount = %d",
+            minCaptureBuffers, zslQBuffers, minCircularBufNum,
+            maxStreamBuf, minUndequeCount);
+    // Get buffer count for the particular stream type
+    switch (stream_type) {
+    case CAM_STREAM_TYPE_PREVIEW:
+        {
+            if (mParameters.isZSLMode()) {
+                // We need to add two extra streming buffers to add
+                // flexibility in forming matched super buf in ZSL queue.
+                // with number being 'zslQBuffers + minCircularBufNum'
+                // we see preview buffers sometimes get dropped at CPP
+                // and super buf is not forming in ZSL Q for long time.
+
+                bufferCnt = zslQBuffers + minCircularBufNum +
+                        mParameters.getNumOfExtraBuffersForImageProc() +
+                        mParameters.getNumOfExtraBuffersForPreview() +
+                        mParameters.getNumOfExtraHDRInBufsIfNeeded();
+            } else {
+                bufferCnt = CAMERA_MIN_STREAMING_BUFFERS +
+                        mParameters.getMaxUnmatchedFramesInQueue() +
+                        mParameters.getNumOfExtraBuffersForPreview();
+            }
+            // ISP allocates native preview buffers and so reducing same from HAL allocation
+            if (bufferCnt > CAMERA_ISP_PING_PONG_BUFFERS )
+                bufferCnt -= CAMERA_ISP_PING_PONG_BUFFERS;
+
+            // Extra ZSL preview frames are not needed for HFR case.
+            // Thumbnail will not be derived from preview for HFR live snapshot case.
+            if ((mParameters.getRecordingHintValue() == true)
+                    && (!mParameters.isHfrMode())) {
+                bufferCnt += EXTRA_ZSL_PREVIEW_STREAM_BUF;
+            }
+
+            // Add the display minUndequeCount count on top of camera requirement
+            bufferCnt += minUndequeCount;
+
+            property_get("persist.camera.preview_yuv", value, "0");
+            persist_cnt = atoi(value);
+            if ((persist_cnt < CAM_MAX_NUM_BUFS_PER_STREAM)
+                    && (bufferCnt < persist_cnt)) {
+                bufferCnt = persist_cnt;
+            }
+        }
+        break;
+    case CAM_STREAM_TYPE_POSTVIEW:
+        {
+            bufferCnt = minCaptureBuffers +
+                        mParameters.getMaxUnmatchedFramesInQueue() +
+                        mParameters.getNumOfExtraHDRInBufsIfNeeded() -
+                        mParameters.getNumOfExtraHDROutBufsIfNeeded() +
+                        mParameters.getNumOfExtraBuffersForImageProc();
+
+            if (bufferCnt > maxStreamBuf) {
+                bufferCnt = maxStreamBuf;
+            }
+            bufferCnt += minUndequeCount;
+        }
+        break;
+    case CAM_STREAM_TYPE_SNAPSHOT:
+        {
+            if (mParameters.isZSLMode() || mLongshotEnabled) {
+                if ((minCaptureBuffers == 1 || mParameters.isUbiRefocus()) &&
+                        !mLongshotEnabled) {
+                    // Single ZSL snapshot case
+                    bufferCnt = zslQBuffers + CAMERA_MIN_STREAMING_BUFFERS +
+                            mParameters.getNumOfExtraBuffersForImageProc();
+                }
+                else {
+                    // ZSL Burst or Longshot case
+                    bufferCnt = zslQBuffers + minCircularBufNum +
+                            mParameters.getNumOfExtraBuffersForImageProc();
+                }
+                if (getSensorType() == CAM_SENSOR_YUV && bufferCnt > CAMERA_ISP_PING_PONG_BUFFERS) {
+                    //ISP allocates native buffers in YUV case
+                    bufferCnt -= CAMERA_ISP_PING_PONG_BUFFERS;
+                }
+            } else {
+                bufferCnt = minCaptureBuffers +
+                            mParameters.getNumOfExtraHDRInBufsIfNeeded() -
+                            mParameters.getNumOfExtraHDROutBufsIfNeeded() +
+                            mParameters.getNumOfExtraBuffersForImageProc();
+
+                if (bufferCnt > maxStreamBuf) {
+                    bufferCnt = maxStreamBuf;
+                }
+            }
+        }
+        break;
+    case CAM_STREAM_TYPE_RAW:
+        property_get("persist.camera.raw_yuv", value, "0");
+        raw_yuv = atoi(value) > 0 ? true : false;
+
+        if (isRdiMode() || raw_yuv) {
+            bufferCnt = zslQBuffers + minCircularBufNum;
+        } else if (mParameters.isZSLMode()) {
+            bufferCnt = zslQBuffers + minCircularBufNum;
+            if (getSensorType() == CAM_SENSOR_YUV && bufferCnt > CAMERA_ISP_PING_PONG_BUFFERS) {
+                //ISP allocates native buffers in YUV case
+                bufferCnt -= CAMERA_ISP_PING_PONG_BUFFERS;
+            }
+
+        } else {
+            bufferCnt = minCaptureBuffers +
+                        mParameters.getNumOfExtraHDRInBufsIfNeeded() -
+                        mParameters.getNumOfExtraHDROutBufsIfNeeded() +
+                        mParameters.getNumOfExtraBuffersForImageProc();
+
+            if (bufferCnt > maxStreamBuf) {
+                bufferCnt = maxStreamBuf;
+            }
+        }
+
+        property_get("persist.camera.preview_raw", value, "0");
+        persist_cnt = atoi(value);
+        if ((persist_cnt < CAM_MAX_NUM_BUFS_PER_STREAM)
+                && (bufferCnt < persist_cnt)) {
+            bufferCnt = persist_cnt;
+        }
+        property_get("persist.camera.video_raw", value, "0");
+        persist_cnt = atoi(value);
+        if ((persist_cnt < CAM_MAX_NUM_BUFS_PER_STREAM)
+                && (bufferCnt < persist_cnt)) {
+            bufferCnt = persist_cnt;
+        }
+
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        {
+            if (mParameters.getBufBatchCount()) {
+                //Video Buffer in case of HFR or camera batching..
+                bufferCnt = CAMERA_MIN_CAMERA_BATCH_BUFFERS;
+            } else if (mParameters.getVideoBatchSize()) {
+                //Video Buffer count only for HAL to HAL batching.
+                bufferCnt = (CAMERA_MIN_VIDEO_BATCH_BUFFERS
+                        * mParameters.getVideoBatchSize());
+                if (bufferCnt < CAMERA_MIN_VIDEO_BUFFERS) {
+                    bufferCnt = CAMERA_MIN_VIDEO_BUFFERS;
+                }
+            } else {
+                // No batching enabled.
+                bufferCnt = CAMERA_MIN_VIDEO_BUFFERS;
+            }
+
+            bufferCnt += mParameters.getNumOfExtraBuffersForVideo();
+            //if its 4K encoding usecase, then add extra buffer
+            cam_dimension_t dim;
+            mParameters.getStreamDimension(CAM_STREAM_TYPE_VIDEO, dim);
+            if (is4k2kResolution(&dim)) {
+                 //get additional buffer count
+                 property_get("vidc.enc.dcvs.extra-buff-count", value, "0");
+                 bufferCnt += atoi(value);
+            }
+        }
+        break;
+    case CAM_STREAM_TYPE_METADATA:
+        {
+            if (mParameters.isZSLMode()) {
+                // MetaData buffers should be >= (Preview buffers-minUndequeCount)
+                bufferCnt = zslQBuffers + minCircularBufNum +
+                            mParameters.getNumOfExtraHDRInBufsIfNeeded() -
+                            mParameters.getNumOfExtraHDROutBufsIfNeeded() +
+                            mParameters.getNumOfExtraBuffersForImageProc() +
+                            EXTRA_ZSL_PREVIEW_STREAM_BUF;
+            } else {
+                bufferCnt = minCaptureBuffers +
+                            mParameters.getNumOfExtraHDRInBufsIfNeeded() -
+                            mParameters.getNumOfExtraHDROutBufsIfNeeded() +
+                            mParameters.getMaxUnmatchedFramesInQueue() +
+                            CAMERA_MIN_STREAMING_BUFFERS +
+                            mParameters.getNumOfExtraBuffersForImageProc();
+
+                if (bufferCnt > zslQBuffers + minCircularBufNum) {
+                    bufferCnt = zslQBuffers + minCircularBufNum;
+                }
+            }
+            if (CAMERA_MIN_METADATA_BUFFERS > bufferCnt) {
+                bufferCnt = CAMERA_MIN_METADATA_BUFFERS;
+            }
+        }
+        break;
+    case CAM_STREAM_TYPE_OFFLINE_PROC:
+        {
+            bufferCnt = minCaptureBuffers;
+            // One of the ubifocus buffers is miscellaneous buffer
+            if (mParameters.isUbiRefocus()) {
+                bufferCnt -= 1;
+            }
+            if (mLongshotEnabled) {
+                bufferCnt = mParameters.getLongshotStages();
+            }
+        }
+        break;
+    case CAM_STREAM_TYPE_CALLBACK:
+        bufferCnt = CAMERA_MIN_CALLBACK_BUFFERS;
+        break;
+    case CAM_STREAM_TYPE_ANALYSIS:
+    case CAM_STREAM_TYPE_DEFAULT:
+    case CAM_STREAM_TYPE_MAX:
+    default:
+        bufferCnt = 0;
+        break;
+    }
+
+    LOGH("Buffer count = %d for stream type = %d", bufferCnt, stream_type);
+    if (CAM_MAX_NUM_BUFS_PER_STREAM < bufferCnt) {
+        LOGW("Buffer count %d for stream type %d exceeds limit %d",
+                 bufferCnt, stream_type, CAM_MAX_NUM_BUFS_PER_STREAM);
+        return CAM_MAX_NUM_BUFS_PER_STREAM;
+    }
+
+    return (uint8_t)bufferCnt;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocateStreamBuf
+ *
+ * DESCRIPTION: alocate stream buffers
+ *
+ * PARAMETERS :
+ *   @stream_type  : type of stream
+ *   @size         : size of buffer
+ *   @stride       : stride of buffer
+ *   @scanline     : scanline of buffer
+ *   @bufferCnt    : [IN/OUT] minimum num of buffers to be allocated.
+ *                   could be modified during allocation if more buffers needed
+ *
+ * RETURN     : ptr to a memory obj that holds stream buffers.
+ *              NULL if failed
+ *==========================================================================*/
+QCameraMemory *QCamera2HardwareInterface::allocateStreamBuf(
+        cam_stream_type_t stream_type, size_t size, int stride, int scanline,
+        uint8_t &bufferCnt)
+{
+    int rc = NO_ERROR;
+    QCameraMemory *mem = NULL;
+    bool bCachedMem = QCAMERA_ION_USE_CACHE;
+    bool bPoolMem = false;
+    char value[PROPERTY_VALUE_MAX];
+    property_get("persist.camera.mem.usepool", value, "1");
+    if (atoi(value) == 1) {
+        bPoolMem = true;
+    }
+
+    // Allocate stream buffer memory object
+    switch (stream_type) {
+    case CAM_STREAM_TYPE_PREVIEW:
+        {
+            if (isNoDisplayMode()) {
+                mem = new QCameraStreamMemory(mGetMemory,
+                        bCachedMem,
+                        (bPoolMem) ? &m_memoryPool : NULL,
+                        stream_type);
+            } else {
+                cam_dimension_t dim;
+                int minFPS, maxFPS;
+                QCameraGrallocMemory *grallocMemory =
+                    new QCameraGrallocMemory(mGetMemory);
+
+                mParameters.getStreamDimension(stream_type, dim);
+                /* we are interested only in maxfps here */
+                mParameters.getPreviewFpsRange(&minFPS, &maxFPS);
+                int usage = 0;
+                if(mParameters.isUBWCEnabled()) {
+                    cam_format_t fmt;
+                    mParameters.getStreamFormat(CAM_STREAM_TYPE_PREVIEW,fmt);
+                    if (fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
+                        usage = GRALLOC_USAGE_PRIVATE_ALLOC_UBWC ;
+                    }
+                }
+                if (grallocMemory) {
+                    grallocMemory->setMappable(
+                            CAMERA_INITIAL_MAPPABLE_PREVIEW_BUFFERS);
+                    grallocMemory->setWindowInfo(mPreviewWindow,
+                            dim.width,dim.height, stride, scanline,
+                            mParameters.getPreviewHalPixelFormat(),
+                            maxFPS, usage);
+                    pthread_mutex_lock(&mGrallocLock);
+                    if (bufferCnt > CAMERA_INITIAL_MAPPABLE_PREVIEW_BUFFERS) {
+                        mEnqueuedBuffers = (bufferCnt -
+                                CAMERA_INITIAL_MAPPABLE_PREVIEW_BUFFERS);
+                    } else {
+                        mEnqueuedBuffers = 0;
+                    }
+                    pthread_mutex_unlock(&mGrallocLock);
+                }
+                mem = grallocMemory;
+            }
+        }
+        break;
+    case CAM_STREAM_TYPE_POSTVIEW:
+        {
+            if (isNoDisplayMode() || isPreviewRestartEnabled()) {
+                mem = new QCameraStreamMemory(mGetMemory, bCachedMem);
+            } else {
+                cam_dimension_t dim;
+                int minFPS, maxFPS;
+                QCameraGrallocMemory *grallocMemory =
+                        new QCameraGrallocMemory(mGetMemory);
+
+                mParameters.getStreamDimension(stream_type, dim);
+                /* we are interested only in maxfps here */
+                mParameters.getPreviewFpsRange(&minFPS, &maxFPS);
+                if (grallocMemory) {
+                    grallocMemory->setWindowInfo(mPreviewWindow, dim.width,
+                            dim.height, stride, scanline,
+                            mParameters.getPreviewHalPixelFormat(), maxFPS);
+                }
+                mem = grallocMemory;
+            }
+        }
+        break;
+    case CAM_STREAM_TYPE_ANALYSIS:
+    case CAM_STREAM_TYPE_SNAPSHOT:
+    case CAM_STREAM_TYPE_RAW:
+    case CAM_STREAM_TYPE_OFFLINE_PROC:
+        mem = new QCameraStreamMemory(mGetMemory,
+                bCachedMem,
+                (bPoolMem) ? &m_memoryPool : NULL,
+                stream_type);
+        break;
+    case CAM_STREAM_TYPE_METADATA:
+        {
+            if (mMetadataMem == NULL) {
+                mem = new QCameraMetadataStreamMemory(QCAMERA_ION_USE_CACHE);
+            } else {
+                mem = mMetadataMem;
+                mMetadataMem = NULL;
+
+                int32_t numAdditionalBuffers = bufferCnt - mem->getCnt();
+                if (numAdditionalBuffers > 0) {
+                    rc = mem->allocateMore(numAdditionalBuffers, size);
+                    if (rc != NO_ERROR) {
+                        LOGE("Failed to allocate additional buffers, "
+                                "but attempting to proceed.");
+                    }
+                }
+                bufferCnt = mem->getCnt();
+                // The memory is already allocated  and initialized, so
+                // simply return here.
+                return mem;
+            }
+        }
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        {
+            //Use uncached allocation by default
+            if (mParameters.isVideoBuffersCached() || mParameters.isSeeMoreEnabled() ||
+                    mParameters.isHighQualityNoiseReductionMode()) {
+                bCachedMem = QCAMERA_ION_USE_CACHE;
+            }
+            else {
+                bCachedMem = QCAMERA_ION_USE_NOCACHE;
+            }
+
+            QCameraVideoMemory *videoMemory = NULL;
+            if (mParameters.getVideoBatchSize()) {
+                videoMemory = new QCameraVideoMemory(
+                        mGetMemory, FALSE, QCAMERA_MEM_TYPE_BATCH);
+                if (videoMemory == NULL) {
+                    LOGE("Out of memory for video batching obj");
+                    return NULL;
+                }
+                /*
+                *   numFDs = BATCH size
+                *  numInts = 5  // OFFSET, SIZE, USAGE, TIMESTAMP, FORMAT
+                */
+                rc = videoMemory->allocateMeta(
+                        CAMERA_MIN_VIDEO_BATCH_BUFFERS,
+                        mParameters.getVideoBatchSize(),
+                        VIDEO_METADATA_NUM_INTS);
+                if (rc < 0) {
+                    delete videoMemory;
+                    return NULL;
+                }
+            } else {
+                videoMemory =
+                        new QCameraVideoMemory(mGetMemory, bCachedMem);
+                if (videoMemory == NULL) {
+                    LOGE("Out of memory for video obj");
+                    return NULL;
+                }
+            }
+
+            int usage = 0;
+            cam_format_t fmt;
+            mParameters.getStreamFormat(CAM_STREAM_TYPE_VIDEO,fmt);
+            if (mParameters.isUBWCEnabled() && (fmt == CAM_FORMAT_YUV_420_NV12_UBWC)) {
+                usage = private_handle_t::PRIV_FLAGS_UBWC_ALIGNED;
+            }
+            videoMemory->setVideoInfo(usage, fmt);
+            mem = videoMemory;
+            if (!mParameters.getBufBatchCount()) {
+                //For batch mode this will be part of user buffer.
+                mVideoMem = videoMemory;
+            }
+        }
+        break;
+    case CAM_STREAM_TYPE_CALLBACK:
+        mem = new QCameraStreamMemory(mGetMemory,
+                bCachedMem,
+                (bPoolMem) ? &m_memoryPool : NULL,
+                stream_type);
+        break;
+    case CAM_STREAM_TYPE_DEFAULT:
+    case CAM_STREAM_TYPE_MAX:
+    default:
+        break;
+    }
+    if (!mem) {
+        return NULL;
+    }
+
+    if (bufferCnt > 0) {
+        if (mParameters.isSecureMode() &&
+            (stream_type == CAM_STREAM_TYPE_RAW) &&
+            (mParameters.isRdiMode())) {
+            LOGD("Allocating %d secure buffers of size %d ", bufferCnt, size);
+            rc = mem->allocate(bufferCnt, size, SECURE);
+        } else {
+            rc = mem->allocate(bufferCnt, size, NON_SECURE);
+        }
+        if (rc < 0) {
+            delete mem;
+            return NULL;
+        }
+        bufferCnt = mem->getCnt();
+    }
+    LOGH("rc = %d type = %d count = %d size = %d cache = %d, pool = %d",
+            rc, stream_type, bufferCnt, size, bCachedMem, bPoolMem);
+    return mem;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocateMoreStreamBuf
+ *
+ * DESCRIPTION: alocate more stream buffers from the memory object
+ *
+ * PARAMETERS :
+ *   @mem_obj      : memory object ptr
+ *   @size         : size of buffer
+ *   @bufferCnt    : [IN/OUT] additional number of buffers to be allocated.
+ *                   output will be the number of total buffers
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::allocateMoreStreamBuf(
+        QCameraMemory *mem_obj, size_t size, uint8_t &bufferCnt)
+{
+    int rc = NO_ERROR;
+
+    if (bufferCnt > 0) {
+        rc = mem_obj->allocateMore(bufferCnt, size);
+        bufferCnt = mem_obj->getCnt();
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocateMiscBuf
+ *
+ * DESCRIPTION: alocate miscellaneous buffer
+ *
+ * PARAMETERS :
+ *   @streamInfo  : stream info
+ *
+ * RETURN     : ptr to a memory obj that holds stream info buffer.
+ *              NULL if failed
+ *==========================================================================*/
+QCameraHeapMemory *QCamera2HardwareInterface::allocateMiscBuf(
+        cam_stream_info_t *streamInfo)
+{
+    int rc = NO_ERROR;
+    uint8_t bufNum = 0;
+    size_t bufSize = 0;
+    QCameraHeapMemory *miscBuf = NULL;
+    cam_feature_mask_t feature_mask =
+            streamInfo->reprocess_config.pp_feature_config.feature_mask;
+
+    switch (streamInfo->stream_type) {
+    case CAM_STREAM_TYPE_OFFLINE_PROC:
+        if (CAM_QCOM_FEATURE_TRUEPORTRAIT & feature_mask) {
+            bufNum = 1;
+            bufSize = mParameters.getTPMaxMetaSize();
+        } else if (CAM_QCOM_FEATURE_REFOCUS & feature_mask) {
+            bufNum = 1;
+            bufSize = mParameters.getRefocusMaxMetaSize();
+        }
+        break;
+    default:
+        break;
+    }
+
+    if (bufNum && bufSize) {
+        miscBuf = new QCameraHeapMemory(QCAMERA_ION_USE_CACHE);
+
+        if (!miscBuf) {
+            LOGE("Unable to allocate miscBuf object");
+            return NULL;
+        }
+
+        rc = miscBuf->allocate(bufNum, bufSize, NON_SECURE);
+        if (rc < 0) {
+            LOGE("Failed to allocate misc buffer memory");
+            delete miscBuf;
+            return NULL;
+        }
+    }
+
+    return miscBuf;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocateStreamInfoBuf
+ *
+ * DESCRIPTION: alocate stream info buffer
+ *
+ * PARAMETERS :
+ *   @stream_type  : type of stream
+ *
+ * RETURN     : ptr to a memory obj that holds stream info buffer.
+ *              NULL if failed
+ *==========================================================================*/
+QCameraHeapMemory *QCamera2HardwareInterface::allocateStreamInfoBuf(
+        cam_stream_type_t stream_type)
+{
+    int rc = NO_ERROR;
+    char value[PROPERTY_VALUE_MAX];
+    bool raw_yuv = false;
+
+    QCameraHeapMemory *streamInfoBuf = new QCameraHeapMemory(QCAMERA_ION_USE_CACHE);
+    if (!streamInfoBuf) {
+        LOGE("allocateStreamInfoBuf: Unable to allocate streamInfo object");
+        return NULL;
+    }
+
+    rc = streamInfoBuf->allocate(1, sizeof(cam_stream_info_t), NON_SECURE);
+    if (rc < 0) {
+        LOGE("allocateStreamInfoBuf: Failed to allocate stream info memory");
+        delete streamInfoBuf;
+        return NULL;
+    }
+
+    cam_stream_info_t *streamInfo = (cam_stream_info_t *)streamInfoBuf->getPtr(0);
+    memset(streamInfo, 0, sizeof(cam_stream_info_t));
+    streamInfo->stream_type = stream_type;
+    rc = mParameters.getStreamFormat(stream_type, streamInfo->fmt);
+    rc = mParameters.getStreamDimension(stream_type, streamInfo->dim);
+    rc = mParameters.getStreamRotation(stream_type, streamInfo->pp_config, streamInfo->dim);
+    streamInfo->num_bufs = getBufNumRequired(stream_type);
+    streamInfo->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+    streamInfo->is_secure = NON_SECURE;
+
+    switch (stream_type) {
+    case CAM_STREAM_TYPE_SNAPSHOT:
+        if ((mParameters.isZSLMode() && mParameters.getRecordingHintValue() != true) ||
+            mLongshotEnabled) {
+            streamInfo->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+        } else {
+            streamInfo->streaming_mode = CAM_STREAMING_MODE_BURST;
+            streamInfo->num_of_burst = (uint8_t)
+                    (mParameters.getNumOfSnapshots()
+                        + mParameters.getNumOfExtraHDRInBufsIfNeeded()
+                        - mParameters.getNumOfExtraHDROutBufsIfNeeded()
+                        + mParameters.getNumOfExtraBuffersForImageProc());
+        }
+        break;
+    case CAM_STREAM_TYPE_RAW:
+        property_get("persist.camera.raw_yuv", value, "0");
+        raw_yuv = atoi(value) > 0 ? true : false;
+        if ((mParameters.isZSLMode()) || (isRdiMode()) || (raw_yuv)) {
+            streamInfo->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+        } else {
+            streamInfo->streaming_mode = CAM_STREAMING_MODE_BURST;
+            streamInfo->num_of_burst = mParameters.getNumOfSnapshots();
+        }
+        if (mParameters.isSecureMode() && mParameters.isRdiMode()) {
+            streamInfo->is_secure = SECURE;
+        } else {
+            streamInfo->is_secure = NON_SECURE;
+        }
+        break;
+    case CAM_STREAM_TYPE_POSTVIEW:
+        if (mLongshotEnabled) {
+            streamInfo->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+        } else {
+            streamInfo->streaming_mode = CAM_STREAMING_MODE_BURST;
+            streamInfo->num_of_burst = (uint8_t)(mParameters.getNumOfSnapshots()
+                + mParameters.getNumOfExtraHDRInBufsIfNeeded()
+                - mParameters.getNumOfExtraHDROutBufsIfNeeded()
+                + mParameters.getNumOfExtraBuffersForImageProc());
+        }
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        streamInfo->dis_enable = mParameters.isDISEnabled();
+        if (mParameters.getBufBatchCount()) {
+            //Update stream info structure with batch mode info
+            streamInfo->streaming_mode = CAM_STREAMING_MODE_BATCH;
+            streamInfo->user_buf_info.frame_buf_cnt = mParameters.getBufBatchCount();
+            streamInfo->user_buf_info.size =
+                    (uint32_t)(sizeof(struct msm_camera_user_buf_cont_t));
+            cam_fps_range_t pFpsRange;
+            mParameters.getHfrFps(pFpsRange);
+            streamInfo->user_buf_info.frameInterval =
+                    (long)((1000/pFpsRange.video_max_fps) * 1000);
+            LOGH("Video Batch Count = %d, interval = %d",
+                    streamInfo->user_buf_info.frame_buf_cnt,
+                    streamInfo->user_buf_info.frameInterval);
+        }
+        if (mParameters.getRecordingHintValue()) {
+            if(mParameters.isDISEnabled()) {
+                streamInfo->is_type = mParameters.getISType();
+            } else {
+                streamInfo->is_type = IS_TYPE_NONE;
+            }
+        }
+        if (mParameters.isSecureMode()) {
+            streamInfo->is_secure = SECURE;
+        }
+        break;
+    case CAM_STREAM_TYPE_PREVIEW:
+        if (mParameters.getRecordingHintValue()) {
+            if(mParameters.isDISEnabled()) {
+                streamInfo->is_type = mParameters.getPreviewISType();
+            } else {
+                streamInfo->is_type = IS_TYPE_NONE;
+            }
+        }
+        if (mParameters.isSecureMode()) {
+            streamInfo->is_secure = SECURE;
+        }
+        break;
+    case CAM_STREAM_TYPE_ANALYSIS:
+        streamInfo->noFrameExpected = 1;
+        break;
+    default:
+        break;
+    }
+
+    // Update feature mask
+    mParameters.updatePpFeatureMask(stream_type);
+
+    // Get feature mask
+    mParameters.getStreamPpMask(stream_type, streamInfo->pp_config.feature_mask);
+
+    // Update pp config
+    if (streamInfo->pp_config.feature_mask & CAM_QCOM_FEATURE_FLIP) {
+        int flipMode = mParameters.getFlipMode(stream_type);
+        if (flipMode > 0) {
+            streamInfo->pp_config.flip = (uint32_t)flipMode;
+        }
+    }
+    if (streamInfo->pp_config.feature_mask & CAM_QCOM_FEATURE_SHARPNESS) {
+        streamInfo->pp_config.sharpness = mParameters.getSharpness();
+    }
+    if (streamInfo->pp_config.feature_mask & CAM_QCOM_FEATURE_EFFECT) {
+        streamInfo->pp_config.effect = mParameters.getEffectValue();
+    }
+
+    if (streamInfo->pp_config.feature_mask & CAM_QCOM_FEATURE_DENOISE2D) {
+        streamInfo->pp_config.denoise2d.denoise_enable = 1;
+        streamInfo->pp_config.denoise2d.process_plates =
+                mParameters.getDenoiseProcessPlate(CAM_INTF_PARM_WAVELET_DENOISE);
+    }
+
+    if (!((needReprocess()) && (CAM_STREAM_TYPE_SNAPSHOT == stream_type ||
+            CAM_STREAM_TYPE_RAW == stream_type))) {
+        if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
+                CAM_QCOM_FEATURE_CROP)
+            streamInfo->pp_config.feature_mask |= CAM_QCOM_FEATURE_CROP;
+        if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
+                CAM_QCOM_FEATURE_SCALE)
+            streamInfo->pp_config.feature_mask |= CAM_QCOM_FEATURE_SCALE;
+    }
+
+    LOGH("type %d, fmt %d, dim %dx%d, num_bufs %d mask = 0x%x\n",
+           stream_type, streamInfo->fmt, streamInfo->dim.width,
+           streamInfo->dim.height, streamInfo->num_bufs,
+           streamInfo->pp_config.feature_mask);
+
+    return streamInfoBuf;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocateStreamUserBuf
+ *
+ * DESCRIPTION: allocate user ptr for stream buffers
+ *
+ * PARAMETERS :
+ *   @streamInfo  : stream info structure
+ *
+ * RETURN     : ptr to a memory obj that holds stream info buffer.
+ *                    NULL if failed
+
+ *==========================================================================*/
+QCameraMemory *QCamera2HardwareInterface::allocateStreamUserBuf(
+        cam_stream_info_t *streamInfo)
+{
+    int rc = NO_ERROR;
+    QCameraMemory *mem = NULL;
+    int size = 0;
+
+    if (streamInfo->streaming_mode != CAM_STREAMING_MODE_BATCH) {
+        LOGE("Stream is not in BATCH mode. Invalid Stream");
+        return NULL;
+    }
+
+    // Allocate stream user buffer memory object
+    switch (streamInfo->stream_type) {
+    case CAM_STREAM_TYPE_VIDEO: {
+        QCameraVideoMemory *video_mem = new QCameraVideoMemory(
+                mGetMemory, FALSE, QCAMERA_MEM_TYPE_BATCH);
+        if (video_mem == NULL) {
+            LOGE("Out of memory for video obj");
+            return NULL;
+        }
+        /*
+        *   numFDs = BATCH size
+        *  numInts = 5  // OFFSET, SIZE, USAGE, TIMESTAMP, FORMAT
+        */
+        rc = video_mem->allocateMeta(streamInfo->num_bufs,
+                mParameters.getBufBatchCount(), VIDEO_METADATA_NUM_INTS);
+        if (rc < 0) {
+            LOGE("allocateMeta failed");
+            delete video_mem;
+            return NULL;
+        }
+        int usage = 0;
+        cam_format_t fmt;
+        mParameters.getStreamFormat(CAM_STREAM_TYPE_VIDEO, fmt);
+        if(mParameters.isUBWCEnabled() && (fmt == CAM_FORMAT_YUV_420_NV12_UBWC)) {
+            usage = private_handle_t::PRIV_FLAGS_UBWC_ALIGNED;
+        }
+        video_mem->setVideoInfo(usage, fmt);
+        mem = static_cast<QCameraMemory *>(video_mem);
+        mVideoMem = video_mem;
+    }
+    break;
+
+    case CAM_STREAM_TYPE_PREVIEW:
+    case CAM_STREAM_TYPE_POSTVIEW:
+    case CAM_STREAM_TYPE_ANALYSIS:
+    case CAM_STREAM_TYPE_SNAPSHOT:
+    case CAM_STREAM_TYPE_RAW:
+    case CAM_STREAM_TYPE_METADATA:
+    case CAM_STREAM_TYPE_OFFLINE_PROC:
+    case CAM_STREAM_TYPE_CALLBACK:
+        LOGE("Stream type Not supported.for BATCH processing");
+    break;
+
+    case CAM_STREAM_TYPE_DEFAULT:
+    case CAM_STREAM_TYPE_MAX:
+    default:
+        break;
+    }
+    if (!mem) {
+        LOGE("Failed to allocate mem");
+        return NULL;
+    }
+
+    /*Size of this buffer will be number of batch buffer */
+    size = PAD_TO_SIZE((streamInfo->num_bufs * streamInfo->user_buf_info.size),
+            CAM_PAD_TO_4K);
+
+    LOGH("Allocating BATCH Buffer count = %d", streamInfo->num_bufs);
+
+    if (size > 0) {
+        // Allocating one buffer for all batch buffers
+        rc = mem->allocate(1, size, NON_SECURE);
+        if (rc < 0) {
+            delete mem;
+            return NULL;
+        }
+    }
+    return mem;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : waitForDeferredAlloc
+ *
+ * DESCRIPTION: Wait for deferred allocation, if applicable
+ *              (applicable only for metadata buffers so far)
+ *
+ * PARAMETERS :
+ *   @stream_type  : type of stream to (possibly) wait for
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::waitForDeferredAlloc(cam_stream_type_t stream_type)
+{
+    if (stream_type == CAM_STREAM_TYPE_METADATA) {
+        waitDeferredWork(mMetadataAllocJob);
+    }
+}
+
+
+/*===========================================================================
+ * FUNCTION   : setPreviewWindow
+ *
+ * DESCRIPTION: set preview window impl
+ *
+ * PARAMETERS :
+ *   @window  : ptr to window ops table struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::setPreviewWindow(
+        struct preview_stream_ops *window)
+{
+    mPreviewWindow = window;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setCallBacks
+ *
+ * DESCRIPTION: set callbacks impl
+ *
+ * PARAMETERS :
+ *   @notify_cb  : notify cb
+ *   @data_cb    : data cb
+ *   @data_cb_timestamp : data cb with time stamp
+ *   @get_memory : request memory ops table
+ *   @user       : user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::setCallBacks(camera_notify_callback notify_cb,
+                                            camera_data_callback data_cb,
+                                            camera_data_timestamp_callback data_cb_timestamp,
+                                            camera_request_memory get_memory,
+                                            void *user)
+{
+    mNotifyCb        = notify_cb;
+    mDataCb          = data_cb;
+    mDataCbTimestamp = data_cb_timestamp;
+    mGetMemory       = get_memory;
+    mCallbackCookie  = user;
+    m_cbNotifier.setCallbacks(notify_cb, data_cb, data_cb_timestamp, user);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setJpegCallBacks
+ *
+ * DESCRIPTION: set JPEG callbacks impl
+ *
+ * PARAMETERS :
+ *   @jpegCb  : Jpeg callback method
+ *   @callbackCookie    : callback cookie
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+void QCamera2HardwareInterface::setJpegCallBacks(jpeg_data_callback jpegCb,
+                                            void *callbackCookie)
+{
+    LOGH("camera id %d", getCameraId());
+    mJpegCb        = jpegCb;
+    mJpegCallbackCookie  = callbackCookie;
+    m_cbNotifier.setJpegCallBacks(mJpegCb, mJpegCallbackCookie);
+}
+
+/*===========================================================================
+ * FUNCTION   : enableMsgType
+ *
+ * DESCRIPTION: enable msg type impl
+ *
+ * PARAMETERS :
+ *   @msg_type  : msg type mask to be enabled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::enableMsgType(int32_t msg_type)
+{
+    int32_t rc = NO_ERROR;
+
+    if (mParameters.isUBWCEnabled()) {
+        /*Need Special CALLBACK stream incase application requesting for
+              Preview callback  in UBWC case*/
+        if (!(msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME)) &&
+                (msg_type & CAMERA_MSG_PREVIEW_FRAME)) {
+            // Start callback channel only when preview/zsl channel is active
+            QCameraChannel* previewCh = NULL;
+            if (isZSLMode() && (getRecordingHintValue() != true)) {
+                previewCh = m_channels[QCAMERA_CH_TYPE_ZSL];
+            } else {
+                previewCh = m_channels[QCAMERA_CH_TYPE_PREVIEW];
+            }
+            QCameraChannel* callbackCh = m_channels[QCAMERA_CH_TYPE_CALLBACK];
+            if ((callbackCh != NULL) &&
+                    (previewCh != NULL) && previewCh->isActive()) {
+                rc = startChannel(QCAMERA_CH_TYPE_CALLBACK);
+                if (rc != NO_ERROR) {
+                    LOGE("START Callback Channel failed");
+                }
+            }
+        }
+    }
+    mMsgEnabled |= msg_type;
+    LOGH("(0x%x) : mMsgEnabled = 0x%x rc = %d", msg_type , mMsgEnabled, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : disableMsgType
+ *
+ * DESCRIPTION: disable msg type impl
+ *
+ * PARAMETERS :
+ *   @msg_type  : msg type mask to be disabled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::disableMsgType(int32_t msg_type)
+{
+    int32_t rc = NO_ERROR;
+
+    if (mParameters.isUBWCEnabled()) {
+        /*STOP CALLBACK STREAM*/
+        if ((msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME)) &&
+                (msg_type & CAMERA_MSG_PREVIEW_FRAME)) {
+            // Stop callback channel only if it is active
+            if ((m_channels[QCAMERA_CH_TYPE_CALLBACK] != NULL) &&
+                   (m_channels[QCAMERA_CH_TYPE_CALLBACK]->isActive())) {
+                rc = stopChannel(QCAMERA_CH_TYPE_CALLBACK);
+                if (rc != NO_ERROR) {
+                    LOGE("STOP Callback Channel failed");
+                }
+            }
+        }
+    }
+    mMsgEnabled &= ~msg_type;
+    LOGH("(0x%x) : mMsgEnabled = 0x%x rc = %d", msg_type , mMsgEnabled, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : msgTypeEnabled
+ *
+ * DESCRIPTION: impl to determine if certain msg_type is enabled
+ *
+ * PARAMETERS :
+ *   @msg_type  : msg type mask
+ *
+ * RETURN     : 0 -- not enabled
+ *              none 0 -- enabled
+ *==========================================================================*/
+int QCamera2HardwareInterface::msgTypeEnabled(int32_t msg_type)
+{
+    return (mMsgEnabled & msg_type);
+}
+
+/*===========================================================================
+ * FUNCTION   : msgTypeEnabledWithLock
+ *
+ * DESCRIPTION: impl to determine if certain msg_type is enabled with lock
+ *
+ * PARAMETERS :
+ *   @msg_type  : msg type mask
+ *
+ * RETURN     : 0 -- not enabled
+ *              none 0 -- enabled
+ *==========================================================================*/
+int QCamera2HardwareInterface::msgTypeEnabledWithLock(int32_t msg_type)
+{
+    int enabled = 0;
+    lockAPI();
+    enabled = mMsgEnabled & msg_type;
+    unlockAPI();
+    return enabled;
+}
+
+/*===========================================================================
+ * FUNCTION   : startPreview
+ *
+ * DESCRIPTION: start preview impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::startPreview()
+{
+    KPI_ATRACE_CALL();
+    int32_t rc = NO_ERROR;
+
+    LOGI("E ZSL = %d Recording Hint = %d", mParameters.isZSLMode(),
+            mParameters.getRecordingHintValue());
+
+    m_perfLock.lock_acq();
+
+    updateThermalLevel((void *)&mThermalLevel);
+
+    setDisplayFrameSkip();
+
+    // start preview stream
+    if (mParameters.isZSLMode() && mParameters.getRecordingHintValue() != true) {
+        rc = startChannel(QCAMERA_CH_TYPE_ZSL);
+    } else {
+        rc = startChannel(QCAMERA_CH_TYPE_PREVIEW);
+    }
+
+    if (rc != NO_ERROR) {
+        LOGE("failed to start channels");
+        m_perfLock.lock_rel();
+        return rc;
+    }
+
+    if ((msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME))
+            && (m_channels[QCAMERA_CH_TYPE_CALLBACK] != NULL)) {
+        rc = startChannel(QCAMERA_CH_TYPE_CALLBACK);
+        if (rc != NO_ERROR) {
+            LOGE("failed to start callback stream");
+            stopChannel(QCAMERA_CH_TYPE_ZSL);
+            stopChannel(QCAMERA_CH_TYPE_PREVIEW);
+            m_perfLock.lock_rel();
+            return rc;
+        }
+    }
+
+    updatePostPreviewParameters();
+    m_stateMachine.setPreviewCallbackNeeded(true);
+
+    // if job id is non-zero, that means the postproc init job is already
+    // pending or complete
+    if (mInitPProcJob == 0) {
+        mInitPProcJob = deferPPInit();
+        if (mInitPProcJob == 0) {
+            LOGE("Unable to initialize postprocessor, mCameraHandle = %p",
+                     mCameraHandle);
+            rc = -ENOMEM;
+            m_perfLock.lock_rel();
+            return rc;
+        }
+    }
+    m_perfLock.lock_rel();
+
+    if (rc == NO_ERROR) {
+        // Set power Hint for preview
+        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, true);
+    }
+
+    LOGI("X rc = %d", rc);
+    return rc;
+}
+
+int32_t QCamera2HardwareInterface::updatePostPreviewParameters() {
+    // Enable OIS only in Camera mode and 4k2k camcoder mode
+    int32_t rc = NO_ERROR;
+    rc = mParameters.updateOisValue(1);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : stopPreview
+ *
+ * DESCRIPTION: stop preview impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::stopPreview()
+{
+    KPI_ATRACE_CALL();
+    LOGI("E");
+    mNumPreviewFaces = -1;
+    mActiveAF = false;
+
+    // Disable power Hint for preview
+    m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
+
+    m_perfLock.lock_acq();
+
+    // stop preview stream
+    stopChannel(QCAMERA_CH_TYPE_CALLBACK);
+    stopChannel(QCAMERA_CH_TYPE_ZSL);
+    stopChannel(QCAMERA_CH_TYPE_PREVIEW);
+    stopChannel(QCAMERA_CH_TYPE_RAW);
+
+    m_cbNotifier.flushPreviewNotifications();
+    //add for ts makeup
+#ifdef TARGET_TS_MAKEUP
+    ts_makeup_finish();
+#endif
+    // delete all channels from preparePreview
+    unpreparePreview();
+
+    m_perfLock.lock_rel();
+
+    LOGI("X");
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : storeMetaDataInBuffers
+ *
+ * DESCRIPTION: enable store meta data in buffers for video frames impl
+ *
+ * PARAMETERS :
+ *   @enable  : flag if need enable
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::storeMetaDataInBuffers(int enable)
+{
+    mStoreMetaDataInFrame = enable;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : preStartRecording
+ *
+ * DESCRIPTION: Prepare start recording impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::preStartRecording()
+{
+    int32_t rc = NO_ERROR;
+    LOGH("E");
+    if (mParameters.getRecordingHintValue() == false) {
+
+        // Give HWI control to restart preview only in single camera mode.
+        // In dual-cam mode, this control belongs to muxer.
+        if (getRelatedCamSyncInfo()->sync_control != CAM_SYNC_RELATED_SENSORS_ON) {
+            LOGH("start recording when hint is false, stop preview first");
+            stopPreview();
+
+            // Set recording hint to TRUE
+            mParameters.updateRecordingHintValue(TRUE);
+            rc = preparePreview();
+            if (rc == NO_ERROR) {
+                rc = startPreview();
+            }
+        }
+        else
+        {
+            // For dual cam mode, update the flag mPreviewRestartNeeded to true
+            // Restart control will be handled by muxer.
+            mPreviewRestartNeeded = true;
+        }
+    }
+
+    LOGH("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : startRecording
+ *
+ * DESCRIPTION: start recording impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::startRecording()
+{
+    int32_t rc = NO_ERROR;
+
+    LOGI("E");
+    mVideoMem = NULL;
+    //link meta stream with video channel if low power mode.
+    if (isLowPowerMode()) {
+        // Find and try to link a metadata stream from preview channel
+        QCameraChannel *pMetaChannel = NULL;
+        QCameraStream *pMetaStream = NULL;
+        QCameraChannel *pVideoChannel = m_channels[QCAMERA_CH_TYPE_VIDEO];
+
+        if (m_channels[QCAMERA_CH_TYPE_PREVIEW] != NULL) {
+            pMetaChannel = m_channels[QCAMERA_CH_TYPE_PREVIEW];
+            uint32_t streamNum = pMetaChannel->getNumOfStreams();
+            QCameraStream *pStream = NULL;
+            for (uint32_t i = 0 ; i < streamNum ; i++ ) {
+                pStream = pMetaChannel->getStreamByIndex(i);
+                if ((NULL != pStream) &&
+                        (CAM_STREAM_TYPE_METADATA == pStream->getMyType())) {
+                    pMetaStream = pStream;
+                    break;
+                }
+            }
+        }
+
+        if ((NULL != pMetaChannel) && (NULL != pMetaStream)) {
+            rc = pVideoChannel->linkStream(pMetaChannel, pMetaStream);
+            if (NO_ERROR != rc) {
+                LOGW("Metadata stream link failed %d", rc);
+            }
+        }
+    }
+
+    if (rc == NO_ERROR) {
+        rc = startChannel(QCAMERA_CH_TYPE_VIDEO);
+    }
+
+    if (mParameters.isTNRSnapshotEnabled() && !isLowPowerMode()) {
+        QCameraChannel *pChannel = m_channels[QCAMERA_CH_TYPE_SNAPSHOT];
+        if (!mParameters.is4k2kVideoResolution()) {
+            // Find and try to link a metadata stream from preview channel
+            QCameraChannel *pMetaChannel = NULL;
+            QCameraStream *pMetaStream = NULL;
+
+            if (m_channels[QCAMERA_CH_TYPE_PREVIEW] != NULL) {
+                pMetaChannel = m_channels[QCAMERA_CH_TYPE_PREVIEW];
+                uint32_t streamNum = pMetaChannel->getNumOfStreams();
+                QCameraStream *pStream = NULL;
+                for (uint32_t i = 0 ; i < streamNum ; i++ ) {
+                    pStream = pMetaChannel->getStreamByIndex(i);
+                    if ((NULL != pStream) &&
+                            (CAM_STREAM_TYPE_METADATA ==
+                            pStream->getMyType())) {
+                        pMetaStream = pStream;
+                        break;
+                    }
+                }
+            }
+
+            if ((NULL != pMetaChannel) && (NULL != pMetaStream)) {
+                rc = pChannel->linkStream(pMetaChannel, pMetaStream);
+                if (NO_ERROR != rc) {
+                    LOGW("Metadata stream link failed %d", rc);
+                }
+            }
+        }
+        LOGH("START snapshot Channel for TNR processing");
+        rc = pChannel->start();
+    }
+
+    if (rc == NO_ERROR) {
+        // Set power Hint for video encoding
+        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, true);
+    }
+
+    LOGI("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stopRecording
+ *
+ * DESCRIPTION: stop recording impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::stopRecording()
+{
+    LOGI("E");
+    // stop snapshot channel
+    if (mParameters.isTNRSnapshotEnabled()) {
+        LOGH("STOP snapshot Channel for TNR processing");
+        stopChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+    }
+    int rc = stopChannel(QCAMERA_CH_TYPE_VIDEO);
+
+    m_cbNotifier.flushVideoNotifications();
+    // Disable power hint for video encoding
+    m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
+    mVideoMem = NULL;
+    LOGI("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseRecordingFrame
+ *
+ * DESCRIPTION: return video frame impl
+ *
+ * PARAMETERS :
+ *   @opaque  : ptr to video frame to be returned
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::releaseRecordingFrame(const void * opaque)
+{
+    int32_t rc = UNKNOWN_ERROR;
+    QCameraVideoChannel *pChannel =
+            (QCameraVideoChannel *)m_channels[QCAMERA_CH_TYPE_VIDEO];
+    LOGD("opaque data = %p",opaque);
+
+    if(pChannel != NULL) {
+        rc = pChannel->releaseFrame(opaque, mStoreMetaDataInFrame > 0);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : autoFocus
+ *
+ * DESCRIPTION: start auto focus impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::autoFocus()
+{
+    int rc = NO_ERROR;
+    cam_focus_mode_type focusMode = mParameters.getFocusMode();
+    LOGH("E");
+
+    switch (focusMode) {
+    case CAM_FOCUS_MODE_AUTO:
+    case CAM_FOCUS_MODE_MACRO:
+    case CAM_FOCUS_MODE_CONTINOUS_VIDEO:
+    case CAM_FOCUS_MODE_CONTINOUS_PICTURE:
+        mActiveAF = true;
+        LOGI("Send AUTO FOCUS event. focusMode=%d, m_currentFocusState=%d",
+                focusMode, m_currentFocusState);
+        rc = mCameraHandle->ops->do_auto_focus(mCameraHandle->camera_handle);
+        break;
+    case CAM_FOCUS_MODE_INFINITY:
+    case CAM_FOCUS_MODE_FIXED:
+    case CAM_FOCUS_MODE_EDOF:
+    default:
+        LOGI("No ops in focusMode (%d)", focusMode);
+        rc = sendEvtNotify(CAMERA_MSG_FOCUS, true, 0);
+        break;
+    }
+
+    if (NO_ERROR != rc) {
+        mActiveAF = false;
+    }
+    LOGH("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : cancelAutoFocus
+ *
+ * DESCRIPTION: cancel auto focus impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::cancelAutoFocus()
+{
+    int rc = NO_ERROR;
+    cam_focus_mode_type focusMode = mParameters.getFocusMode();
+
+    switch (focusMode) {
+    case CAM_FOCUS_MODE_AUTO:
+    case CAM_FOCUS_MODE_MACRO:
+    case CAM_FOCUS_MODE_CONTINOUS_VIDEO:
+    case CAM_FOCUS_MODE_CONTINOUS_PICTURE:
+        mActiveAF = false;
+        rc = mCameraHandle->ops->cancel_auto_focus(mCameraHandle->camera_handle);
+        break;
+    case CAM_FOCUS_MODE_INFINITY:
+    case CAM_FOCUS_MODE_FIXED:
+    case CAM_FOCUS_MODE_EDOF:
+    default:
+        LOGD("No ops in focusMode (%d)", focusMode);
+        break;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : processUFDumps
+ *
+ * DESCRIPTION: process UF jpeg dumps for refocus support
+ *
+ * PARAMETERS :
+ *   @evt     : payload of jpeg event, including information about jpeg encoding
+ *              status, jpeg size and so on.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : none
+ *==========================================================================*/
+bool QCamera2HardwareInterface::processUFDumps(qcamera_jpeg_evt_payload_t *evt)
+{
+   bool ret = true;
+   if (mParameters.isUbiRefocus()) {
+       int index = (int)getOutputImageCount();
+       bool allFocusImage = (index == ((int)mParameters.getRefocusOutputCount() - 1));
+       char name[FILENAME_MAX];
+
+       camera_memory_t *jpeg_mem = NULL;
+       omx_jpeg_ouput_buf_t *jpeg_out = NULL;
+       size_t dataLen;
+       uint8_t *dataPtr;
+       if (NO_ERROR != waitDeferredWork(mInitPProcJob)) {
+           LOGE("Init PProc Deferred work failed");
+           return false;
+       }
+       if (!m_postprocessor.getJpegMemOpt()) {
+           dataLen = evt->out_data.buf_filled_len;
+           dataPtr = evt->out_data.buf_vaddr;
+       } else {
+           jpeg_out  = (omx_jpeg_ouput_buf_t*) evt->out_data.buf_vaddr;
+           if (!jpeg_out) {
+              LOGE("Null pointer detected");
+              return false;
+           }
+           jpeg_mem = (camera_memory_t *)jpeg_out->mem_hdl;
+           if (!jpeg_mem) {
+              LOGE("Null pointer detected");
+              return false;
+           }
+           dataPtr = (uint8_t *)jpeg_mem->data;
+           dataLen = jpeg_mem->size;
+       }
+
+       if (allFocusImage)  {
+           snprintf(name, sizeof(name), "AllFocusImage");
+           index = -1;
+       } else {
+           snprintf(name, sizeof(name), "%d", 0);
+       }
+       CAM_DUMP_TO_FILE(QCAMERA_DUMP_FRM_LOCATION"ubifocus", name, index, "jpg",
+           dataPtr, dataLen);
+       LOGD("Dump the image %d %d allFocusImage %d",
+           getOutputImageCount(), index, allFocusImage);
+       setOutputImageCount(getOutputImageCount() + 1);
+       if (!allFocusImage) {
+           ret = false;
+       }
+   }
+   return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : unconfigureAdvancedCapture
+ *
+ * DESCRIPTION: unconfigure Advanced Capture.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::unconfigureAdvancedCapture()
+{
+    int32_t rc = NO_ERROR;
+
+    if (mAdvancedCaptureConfigured) {
+
+        mAdvancedCaptureConfigured = false;
+
+        if(mIs3ALocked) {
+            mParameters.set3ALock(false);
+            mIs3ALocked = false;
+        }
+        if (mParameters.isHDREnabled() || mParameters.isAEBracketEnabled()) {
+            rc = mParameters.setToneMapMode(true, true);
+            if (rc != NO_ERROR) {
+                LOGW("Failed to enable tone map during HDR/AEBracketing");
+            }
+            mHDRBracketingEnabled = false;
+            rc = mParameters.stopAEBracket();
+        } else if ((mParameters.isChromaFlashEnabled())
+                || (mFlashNeeded && !mLongshotEnabled)
+                || (mParameters.getLowLightLevel() != CAM_LOW_LIGHT_OFF)
+                || (mParameters.getManualCaptureMode() >= CAM_MANUAL_CAPTURE_TYPE_2)) {
+            rc = mParameters.resetFrameCapture(TRUE);
+        } else if (mParameters.isUbiFocusEnabled() || mParameters.isUbiRefocus()) {
+            rc = configureAFBracketing(false);
+        } else if (mParameters.isOptiZoomEnabled()) {
+            rc = mParameters.setAndCommitZoom(mZoomLevel);
+            setDisplaySkip(FALSE, CAMERA_MAX_PARAM_APPLY_DELAY);
+        } else if (mParameters.isStillMoreEnabled()) {
+            cam_still_more_t stillmore_config = mParameters.getStillMoreSettings();
+            stillmore_config.burst_count = 0;
+            mParameters.setStillMoreSettings(stillmore_config);
+
+            /* If SeeMore is running, it will handle re-enabling tone map */
+            if (!mParameters.isSeeMoreEnabled() && !mParameters.isLTMForSeeMoreEnabled()) {
+                rc = mParameters.setToneMapMode(true, true);
+                if (rc != NO_ERROR) {
+                    LOGW("Failed to enable tone map during StillMore");
+                }
+            }
+
+            /* Re-enable Tintless */
+            mParameters.setTintless(true);
+        } else {
+            LOGW("No Advanced Capture feature enabled!!");
+            rc = BAD_VALUE;
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : configureAdvancedCapture
+ *
+ * DESCRIPTION: configure Advanced Capture.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::configureAdvancedCapture()
+{
+    LOGH("E");
+    int32_t rc = NO_ERROR;
+
+    rc = mParameters.checkFeatureConcurrency();
+    if (rc != NO_ERROR) {
+        LOGE("Cannot support Advanced capture modes");
+        return rc;
+    }
+
+    setOutputImageCount(0);
+    mInputCount = 0;
+    mAdvancedCaptureConfigured = true;
+    /* Display should be disabled for advanced modes */
+    bool bSkipDisplay = true;
+
+    if (getRelatedCamSyncInfo()->mode == CAM_MODE_SECONDARY) {
+        // no Advance capture settings for Aux camera
+        LOGH("X Secondary Camera, no need to process!! ");
+        return rc;
+    }
+
+    /* Do not stop display if in stillmore livesnapshot */
+    if (mParameters.isStillMoreEnabled() &&
+            mParameters.isSeeMoreEnabled()) {
+        bSkipDisplay = false;
+    }
+    if (mParameters.isUbiFocusEnabled() || mParameters.isUbiRefocus()) {
+        rc = configureAFBracketing();
+    } else if (mParameters.isOptiZoomEnabled()) {
+        rc = configureOptiZoom();
+    } else if(mParameters.isHDREnabled()) {
+        rc = configureHDRBracketing();
+        if (mHDRBracketingEnabled) {
+            rc = mParameters.setToneMapMode(false, true);
+            if (rc != NO_ERROR) {
+                LOGW("Failed to disable tone map during HDR");
+            }
+        }
+    } else if (mParameters.isAEBracketEnabled()) {
+        rc = mParameters.setToneMapMode(false, true);
+        if (rc != NO_ERROR) {
+            LOGW("Failed to disable tone map during AEBracketing");
+        }
+        rc = configureAEBracketing();
+    } else if (mParameters.isStillMoreEnabled()) {
+        bSkipDisplay = false;
+        rc = configureStillMore();
+    } else if ((mParameters.isChromaFlashEnabled())
+            || (mParameters.getLowLightLevel() != CAM_LOW_LIGHT_OFF)
+            || (mParameters.getManualCaptureMode() >= CAM_MANUAL_CAPTURE_TYPE_2)) {
+        rc = mParameters.configFrameCapture(TRUE);
+    } else if (mFlashNeeded && !mLongshotEnabled) {
+        rc = mParameters.configFrameCapture(TRUE);
+        bSkipDisplay = false;
+    } else {
+        LOGH("Advanced Capture feature not enabled!! ");
+        mAdvancedCaptureConfigured = false;
+        bSkipDisplay = false;
+    }
+
+    LOGH("Stop preview temporarily for advanced captures");
+    setDisplaySkip(bSkipDisplay);
+
+    LOGH("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : configureAFBracketing
+ *
+ * DESCRIPTION: configure AF Bracketing.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::configureAFBracketing(bool enable)
+{
+    LOGH("E");
+    int32_t rc = NO_ERROR;
+    cam_af_bracketing_t *af_bracketing_need;
+
+    if (mParameters.isUbiRefocus()) {
+        af_bracketing_need =
+                &gCamCapability[mCameraId]->refocus_af_bracketing_need;
+    } else {
+        af_bracketing_need =
+                &gCamCapability[mCameraId]->ubifocus_af_bracketing_need;
+    }
+
+    //Enable AF Bracketing.
+    cam_af_bracketing_t afBracket;
+    memset(&afBracket, 0, sizeof(cam_af_bracketing_t));
+    afBracket.enable = enable;
+    afBracket.burst_count = af_bracketing_need->burst_count;
+
+    for(int8_t i = 0; i < MAX_AF_BRACKETING_VALUES; i++) {
+        afBracket.focus_steps[i] = af_bracketing_need->focus_steps[i];
+        LOGH("focus_step[%d] = %d", i, afBracket.focus_steps[i]);
+    }
+    //Send cmd to backend to set AF Bracketing for Ubi Focus.
+    rc = mParameters.commitAFBracket(afBracket);
+    if ( NO_ERROR != rc ) {
+        LOGE("cannot configure AF bracketing");
+        return rc;
+    }
+    if (enable) {
+        mParameters.set3ALock(true);
+        mIs3ALocked = true;
+    }
+    LOGH("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : configureHDRBracketing
+ *
+ * DESCRIPTION: configure HDR Bracketing.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::configureHDRBracketing()
+{
+    LOGH("E");
+    int32_t rc = NO_ERROR;
+
+    cam_hdr_bracketing_info_t& hdrBracketingSetting =
+            gCamCapability[mCameraId]->hdr_bracketing_setting;
+
+    // 'values' should be in "idx1,idx2,idx3,..." format
+    uint32_t hdrFrameCount =
+            hdrBracketingSetting.num_frames;
+    LOGH("HDR values %d, %d frame count: %u",
+          (int8_t) hdrBracketingSetting.exp_val.values[0],
+          (int8_t) hdrBracketingSetting.exp_val.values[1],
+          hdrFrameCount);
+
+    // Enable AE Bracketing for HDR
+    cam_exp_bracketing_t aeBracket;
+    memset(&aeBracket, 0, sizeof(cam_exp_bracketing_t));
+    aeBracket.mode =
+        hdrBracketingSetting.exp_val.mode;
+
+    if (aeBracket.mode == CAM_EXP_BRACKETING_ON) {
+        mHDRBracketingEnabled = true;
+    }
+
+    String8 tmp;
+    for (uint32_t i = 0; i < hdrFrameCount; i++) {
+        tmp.appendFormat("%d",
+            (int8_t) hdrBracketingSetting.exp_val.values[i]);
+        tmp.append(",");
+    }
+    if (mParameters.isHDR1xFrameEnabled()
+        && mParameters.isHDR1xExtraBufferNeeded()) {
+            tmp.appendFormat("%d", 0);
+            tmp.append(",");
+    }
+
+    if( !tmp.isEmpty() &&
+        ( MAX_EXP_BRACKETING_LENGTH > tmp.length() ) ) {
+        //Trim last comma
+        memset(aeBracket.values, '\0', MAX_EXP_BRACKETING_LENGTH);
+        memcpy(aeBracket.values, tmp.string(), tmp.length() - 1);
+    }
+
+    LOGH("HDR config values %s",
+          aeBracket.values);
+    rc = mParameters.setHDRAEBracket(aeBracket);
+    if ( NO_ERROR != rc ) {
+        LOGE("cannot configure HDR bracketing");
+        return rc;
+    }
+    LOGH("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : configureAEBracketing
+ *
+ * DESCRIPTION: configure AE Bracketing.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::configureAEBracketing()
+{
+    LOGH("E");
+    int32_t rc = NO_ERROR;
+
+    rc = mParameters.setAEBracketing();
+    if ( NO_ERROR != rc ) {
+        LOGE("cannot configure AE bracketing");
+        return rc;
+    }
+    LOGH("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : configureOptiZoom
+ *
+ * DESCRIPTION: configure Opti Zoom.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::configureOptiZoom()
+{
+    int32_t rc = NO_ERROR;
+
+    //store current zoom level.
+    mZoomLevel = mParameters.getParmZoomLevel();
+
+    //set zoom level to 1x;
+    mParameters.setAndCommitZoom(0);
+
+    mParameters.set3ALock(true);
+    mIs3ALocked = true;
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : configureStillMore
+ *
+ * DESCRIPTION: configure StillMore.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::configureStillMore()
+{
+    int32_t rc = NO_ERROR;
+    uint8_t burst_cnt = 0;
+    cam_still_more_t stillmore_config;
+    cam_still_more_t stillmore_cap;
+
+    /* Disable Tone Map. If seemore is enabled, it will handle disabling it. */
+    if (!mParameters.isSeeMoreEnabled() && !mParameters.isLTMForSeeMoreEnabled()) {
+        rc = mParameters.setToneMapMode(false, true);
+        if (rc != NO_ERROR) {
+            LOGW("Failed to disable tone map during StillMore");
+        }
+    }
+
+    /* Lock 3A */
+    mParameters.set3ALock(true);
+    mIs3ALocked = true;
+
+    /* Disable Tintless */
+    mParameters.setTintless(false);
+
+    /* Initialize burst count from capability */
+    stillmore_cap = mParameters.getStillMoreCapability();
+    burst_cnt = stillmore_cap.max_burst_count;
+
+    /* Reconfigure burst count from dynamic scene data */
+    cam_dyn_img_data_t dynamic_img_data = mParameters.getDynamicImgData();
+    if (dynamic_img_data.input_count >= stillmore_cap.min_burst_count &&
+            dynamic_img_data.input_count <= stillmore_cap.max_burst_count) {
+        burst_cnt = dynamic_img_data.input_count;
+    }
+
+    /* Reconfigure burst count in the case of liveshot */
+    if (mParameters.isSeeMoreEnabled()) {
+        burst_cnt = 1;
+    }
+
+    /* Reconfigure burst count from user input */
+    char prop[PROPERTY_VALUE_MAX];
+    property_get("persist.camera.imglib.stillmore", prop, "0");
+    uint8_t burst_setprop = (uint32_t)atoi(prop);
+    if (burst_setprop != 0)  {
+       if ((burst_setprop < stillmore_cap.min_burst_count) ||
+               (burst_setprop > stillmore_cap.max_burst_count)) {
+           burst_cnt = stillmore_cap.max_burst_count;
+       } else {
+           burst_cnt = burst_setprop;
+       }
+    }
+
+    memset(&stillmore_config, 0, sizeof(cam_still_more_t));
+    stillmore_config.burst_count = burst_cnt;
+    mParameters.setStillMoreSettings(stillmore_config);
+
+    LOGH("Stillmore burst %d", burst_cnt);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stopAdvancedCapture
+ *
+ * DESCRIPTION: stops advanced capture based on capture type
+ *
+ * PARAMETERS :
+ *   @pChannel : channel.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::stopAdvancedCapture(
+        QCameraPicChannel *pChannel)
+{
+    LOGH("stop bracketig");
+    int32_t rc = NO_ERROR;
+
+    if(mParameters.isUbiFocusEnabled() || mParameters.isUbiRefocus()) {
+        rc = pChannel->stopAdvancedCapture(MM_CAMERA_AF_BRACKETING);
+    } else if (mParameters.isChromaFlashEnabled()
+            || (mFlashNeeded && !mLongshotEnabled)
+            || (mParameters.getLowLightLevel() != CAM_LOW_LIGHT_OFF)
+            || (mParameters.getManualCaptureMode() >= CAM_MANUAL_CAPTURE_TYPE_2)) {
+        rc = pChannel->stopAdvancedCapture(MM_CAMERA_FRAME_CAPTURE);
+    } else if(mParameters.isHDREnabled()
+            || mParameters.isAEBracketEnabled()) {
+        rc = pChannel->stopAdvancedCapture(MM_CAMERA_AE_BRACKETING);
+    } else if (mParameters.isOptiZoomEnabled()) {
+        rc = pChannel->stopAdvancedCapture(MM_CAMERA_ZOOM_1X);
+    } else if (mParameters.isStillMoreEnabled()) {
+        LOGH("stopAdvancedCapture not needed for StillMore");
+    } else {
+        LOGH("No Advanced Capture feature enabled!");
+        rc = BAD_VALUE;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : startAdvancedCapture
+ *
+ * DESCRIPTION: starts advanced capture based on capture type
+ *
+ * PARAMETERS :
+ *   @pChannel : channel.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::startAdvancedCapture(
+        QCameraPicChannel *pChannel)
+{
+    LOGH("Start bracketing");
+    int32_t rc = NO_ERROR;
+
+    if(mParameters.isUbiFocusEnabled() || mParameters.isUbiRefocus()) {
+        rc = pChannel->startAdvancedCapture(MM_CAMERA_AF_BRACKETING);
+    } else if (mParameters.isOptiZoomEnabled()) {
+        rc = pChannel->startAdvancedCapture(MM_CAMERA_ZOOM_1X);
+    } else if (mParameters.isStillMoreEnabled()) {
+        LOGH("startAdvancedCapture not needed for StillMore");
+    } else if (mParameters.isHDREnabled()
+            || mParameters.isAEBracketEnabled()) {
+        rc = pChannel->startAdvancedCapture(MM_CAMERA_AE_BRACKETING);
+    } else if (mParameters.isChromaFlashEnabled()
+            || (mFlashNeeded && !mLongshotEnabled)
+            || (mParameters.getLowLightLevel() != CAM_LOW_LIGHT_OFF)
+            || (mParameters.getManualCaptureMode() >= CAM_MANUAL_CAPTURE_TYPE_2)) {
+        cam_capture_frame_config_t config = mParameters.getCaptureFrameConfig();
+        rc = pChannel->startAdvancedCapture(MM_CAMERA_FRAME_CAPTURE, &config);
+    } else {
+        LOGE("No Advanced Capture feature enabled!");
+        rc = BAD_VALUE;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : preTakePicture
+ *
+ * DESCRIPTION: Prepare take picture impl, Restarts preview if necessary
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::preTakePicture()
+{
+    int32_t rc = NO_ERROR;
+    LOGH("E");
+    if (mParameters.getRecordingHintValue() == true) {
+
+        // Give HWI control to restart preview only in single camera mode.
+        // In dual-cam mode, this control belongs to muxer.
+        if (getRelatedCamSyncInfo()->sync_control != CAM_SYNC_RELATED_SENSORS_ON) {
+            LOGH("restart preview if rec hint is true and preview is running");
+            stopPreview();
+            mParameters.updateRecordingHintValue(FALSE);
+            // start preview again
+            rc = preparePreview();
+            if (rc == NO_ERROR) {
+                rc = startPreview();
+                if (rc != NO_ERROR) {
+                    unpreparePreview();
+                }
+            }
+        }
+        else
+        {
+            // For dual cam mode, update the flag mPreviewRestartNeeded to true
+            // Restart control will be handled by muxer.
+            mPreviewRestartNeeded = true;
+        }
+    }
+
+    LOGH("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : takePicture
+ *
+ * DESCRIPTION: take picture impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::takePicture()
+{
+    int rc = NO_ERROR;
+
+    // Get total number for snapshots (retro + regular)
+    uint8_t numSnapshots = mParameters.getNumOfSnapshots();
+    // Get number of retro-active snapshots
+    uint8_t numRetroSnapshots = mParameters.getNumOfRetroSnapshots();
+    LOGH("E");
+
+    //Set rotation value from user settings as Jpeg rotation
+    //to configure back-end modules.
+    mParameters.setJpegRotation(mParameters.getRotation());
+
+    // Check if retro-active snapshots are not enabled
+    if (!isRetroPicture() || !mParameters.isZSLMode()) {
+      numRetroSnapshots = 0;
+      LOGH("Reset retro snaphot count to zero");
+    }
+
+    //Do special configure for advanced capture modes.
+    rc = configureAdvancedCapture();
+    if (rc != NO_ERROR) {
+        LOGE("Unsupported capture call");
+        return rc;
+    }
+
+    if (mAdvancedCaptureConfigured) {
+        numSnapshots = mParameters.getBurstCountForAdvancedCapture();
+    }
+    LOGI("snap count = %d zsl = %d advanced = %d",
+            numSnapshots, mParameters.isZSLMode(), mAdvancedCaptureConfigured);
+
+    if (mParameters.isZSLMode()) {
+        QCameraChannel *pChannel = m_channels[QCAMERA_CH_TYPE_ZSL];
+        QCameraPicChannel *pPicChannel = (QCameraPicChannel *)pChannel;
+        if (NULL != pPicChannel) {
+
+            if (mParameters.getofflineRAW()) {
+                startRAWChannel(pPicChannel);
+                pPicChannel = (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_RAW];
+                if (pPicChannel == NULL) {
+                    LOGE("RAW Channel is NULL in Manual capture mode");
+                    stopRAWChannel();
+                    return UNKNOWN_ERROR;
+                }
+            }
+
+            rc = configureOnlineRotation(*pPicChannel);
+            if (rc != NO_ERROR) {
+                LOGE("online rotation failed");
+                return rc;
+            }
+
+            // start postprocessor
+            DeferWorkArgs args;
+            memset(&args, 0, sizeof(DeferWorkArgs));
+
+            args.pprocArgs = pPicChannel;
+
+            // No need to wait for mInitPProcJob here, because it was
+            // queued in startPreview, and will definitely be processed before
+            // mReprocJob can begin.
+            mReprocJob = queueDeferredWork(CMD_DEF_PPROC_START,
+                    args);
+            if (mReprocJob == 0) {
+                LOGE("Failure: Unable to start pproc");
+                return -ENOMEM;
+            }
+
+            // Check if all preview buffers are mapped before creating
+            // a jpeg session as preview stream buffers are queried during the same
+            uint8_t numStreams = pChannel->getNumOfStreams();
+            QCameraStream *pStream = NULL;
+            QCameraStream *pPreviewStream = NULL;
+            for (uint8_t i = 0 ; i < numStreams ; i++ ) {
+                pStream = pChannel->getStreamByIndex(i);
+                if (!pStream)
+                    continue;
+                if (CAM_STREAM_TYPE_PREVIEW == pStream->getMyType()) {
+                    pPreviewStream = pStream;
+                    break;
+                }
+            }
+            if (pPreviewStream != NULL) {
+                Mutex::Autolock l(mMapLock);
+                QCameraMemory *pMemory = pStream->getStreamBufs();
+                if (!pMemory) {
+                    LOGE("Error!! pMemory is NULL");
+                    return -ENOMEM;
+                }
+
+                uint8_t waitCnt = 2;
+                while (!pMemory->checkIfAllBuffersMapped() && (waitCnt > 0)) {
+                    LOGL(" Waiting for preview buffers to be mapped");
+                    mMapCond.waitRelative(
+                            mMapLock, CAMERA_DEFERRED_MAP_BUF_TIMEOUT);
+                    LOGL("Wait completed!!");
+                    waitCnt--;
+                }
+                // If all buffers are not mapped after retries, assert
+                assert(pMemory->checkIfAllBuffersMapped());
+            } else {
+                assert(pPreviewStream);
+            }
+
+            // Create JPEG session
+            mJpegJob = queueDeferredWork(CMD_DEF_CREATE_JPEG_SESSION,
+                    args);
+            if (mJpegJob == 0) {
+                LOGE("Failed to queue CREATE_JPEG_SESSION");
+                if (NO_ERROR != waitDeferredWork(mReprocJob)) {
+                        LOGE("Reprocess Deferred work was failed");
+                }
+                m_postprocessor.stop();
+                return -ENOMEM;
+            }
+
+            if (mAdvancedCaptureConfigured) {
+                rc = startAdvancedCapture(pPicChannel);
+                if (rc != NO_ERROR) {
+                    LOGE("cannot start zsl advanced capture");
+                    return rc;
+                }
+            }
+            if (mLongshotEnabled && mPrepSnapRun) {
+                mCameraHandle->ops->start_zsl_snapshot(
+                        mCameraHandle->camera_handle,
+                        pPicChannel->getMyHandle());
+            }
+            // If frame sync is ON and it is a SECONDARY camera,
+            // we do not need to send the take picture command to interface
+            // It will be handled along with PRIMARY camera takePicture request
+            mm_camera_req_buf_t buf;
+            memset(&buf, 0x0, sizeof(buf));
+            if ((!mParameters.isAdvCamFeaturesEnabled() &&
+                    !mFlashNeeded &&
+                    !isLongshotEnabled() &&
+                    isFrameSyncEnabled()) &&
+                    (getRelatedCamSyncInfo()->sync_control ==
+                    CAM_SYNC_RELATED_SENSORS_ON)) {
+                if (getRelatedCamSyncInfo()->mode == CAM_MODE_PRIMARY) {
+                    buf.type = MM_CAMERA_REQ_FRAME_SYNC_BUF;
+                    buf.num_buf_requested = numSnapshots;
+                    rc = pPicChannel->takePicture(&buf);
+                    if (rc != NO_ERROR) {
+                        LOGE("FS_DBG cannot take ZSL picture, stop pproc");
+                        if (NO_ERROR != waitDeferredWork(mReprocJob)) {
+                            LOGE("Reprocess Deferred work failed");
+                            return UNKNOWN_ERROR;
+                        }
+                        if (NO_ERROR != waitDeferredWork(mJpegJob)) {
+                            LOGE("Jpeg Deferred work failed");
+                            return UNKNOWN_ERROR;
+                        }
+                        m_postprocessor.stop();
+                        return rc;
+                    }
+                    LOGI("PRIMARY camera: send frame sync takePicture!!");
+                }
+            } else {
+                buf.type = MM_CAMERA_REQ_SUPER_BUF;
+                buf.num_buf_requested = numSnapshots;
+                buf.num_retro_buf_requested = numRetroSnapshots;
+                rc = pPicChannel->takePicture(&buf);
+                if (rc != NO_ERROR) {
+                    LOGE("cannot take ZSL picture, stop pproc");
+                        if (NO_ERROR != waitDeferredWork(mReprocJob)) {
+                            LOGE("Reprocess Deferred work failed");
+                            return UNKNOWN_ERROR;
+                        }
+                        if (NO_ERROR != waitDeferredWork(mJpegJob)) {
+                            LOGE("Jpeg Deferred work failed");
+                            return UNKNOWN_ERROR;
+                        }
+                    m_postprocessor.stop();
+                    return rc;
+                }
+            }
+        } else {
+            LOGE("ZSL channel is NULL");
+            return UNKNOWN_ERROR;
+        }
+    } else {
+
+        // start snapshot
+        if (mParameters.isJpegPictureFormat() ||
+                mParameters.isNV16PictureFormat() ||
+                mParameters.isNV21PictureFormat()) {
+
+            //STOP Preview for Non ZSL use case
+            stopPreview();
+
+            //Config CAPTURE channels
+            rc = declareSnapshotStreams();
+            if (NO_ERROR != rc) {
+                return rc;
+            }
+
+            rc = addCaptureChannel();
+            if ((rc == NO_ERROR) &&
+                    (NULL != m_channels[QCAMERA_CH_TYPE_CAPTURE])) {
+
+                if (!mParameters.getofflineRAW()) {
+                    rc = configureOnlineRotation(
+                        *m_channels[QCAMERA_CH_TYPE_CAPTURE]);
+                    if (rc != NO_ERROR) {
+                        LOGE("online rotation failed");
+                        delChannel(QCAMERA_CH_TYPE_CAPTURE);
+                        return rc;
+                    }
+                }
+
+                DeferWorkArgs args;
+                memset(&args, 0, sizeof(DeferWorkArgs));
+
+                args.pprocArgs = m_channels[QCAMERA_CH_TYPE_CAPTURE];
+
+                // No need to wait for mInitPProcJob here, because it was
+                // queued in startPreview, and will definitely be processed before
+                // mReprocJob can begin.
+                mReprocJob = queueDeferredWork(CMD_DEF_PPROC_START,
+                        args);
+                if (mReprocJob == 0) {
+                    LOGE("Failure: Unable to start pproc");
+                    return -ENOMEM;
+                }
+
+                // Create JPEG session
+                mJpegJob = queueDeferredWork(CMD_DEF_CREATE_JPEG_SESSION,
+                        args);
+                if (mJpegJob == 0) {
+                    LOGE("Failed to queue CREATE_JPEG_SESSION");
+                    if (NO_ERROR != waitDeferredWork(mReprocJob)) {
+                        LOGE("Reprocess Deferred work was failed");
+                    }
+                    m_postprocessor.stop();
+                    return -ENOMEM;
+                }
+
+                // start catpure channel
+                rc =  m_channels[QCAMERA_CH_TYPE_CAPTURE]->start();
+                if (rc != NO_ERROR) {
+                    LOGE("cannot start capture channel");
+                    if (NO_ERROR != waitDeferredWork(mReprocJob)) {
+                        LOGE("Reprocess Deferred work failed");
+                        return UNKNOWN_ERROR;
+                    }
+                    if (NO_ERROR != waitDeferredWork(mJpegJob)) {
+                        LOGE("Jpeg Deferred work failed");
+                        return UNKNOWN_ERROR;
+                    }
+                    delChannel(QCAMERA_CH_TYPE_CAPTURE);
+                    return rc;
+                }
+
+                QCameraPicChannel *pCapChannel =
+                    (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_CAPTURE];
+                if (NULL != pCapChannel) {
+                    if (mParameters.isUbiFocusEnabled() ||
+                            mParameters.isUbiRefocus() ||
+                            mParameters.isChromaFlashEnabled()) {
+                        rc = startAdvancedCapture(pCapChannel);
+                        if (rc != NO_ERROR) {
+                            LOGE("cannot start advanced capture");
+                            return rc;
+                        }
+                    }
+                }
+                if ( mLongshotEnabled ) {
+                    rc = longShot();
+                    if (NO_ERROR != rc) {
+                        if (NO_ERROR != waitDeferredWork(mReprocJob)) {
+                            LOGE("Reprocess Deferred work failed");
+                            return UNKNOWN_ERROR;
+                        }
+                        if (NO_ERROR != waitDeferredWork(mJpegJob)) {
+                            LOGE("Jpeg Deferred work failed");
+                            return UNKNOWN_ERROR;
+                        }
+                        delChannel(QCAMERA_CH_TYPE_CAPTURE);
+                        return rc;
+                    }
+                }
+            } else {
+                LOGE("cannot add capture channel");
+                delChannel(QCAMERA_CH_TYPE_CAPTURE);
+                return rc;
+            }
+        } else {
+            // Stop Preview before taking NZSL snapshot
+            stopPreview();
+
+            rc = mParameters.updateRAW(gCamCapability[mCameraId]->raw_dim[0]);
+            if (NO_ERROR != rc) {
+                LOGE("Raw dimension update failed %d", rc);
+                return rc;
+            }
+
+            rc = declareSnapshotStreams();
+            if (NO_ERROR != rc) {
+                LOGE("RAW stream info configuration failed %d", rc);
+                return rc;
+            }
+
+            rc = addChannel(QCAMERA_CH_TYPE_RAW);
+            if (rc == NO_ERROR) {
+                // start postprocessor
+                if (NO_ERROR != waitDeferredWork(mInitPProcJob)) {
+                    LOGE("Reprocess Deferred work failed");
+                    return UNKNOWN_ERROR;
+                }
+
+                rc = m_postprocessor.start(m_channels[QCAMERA_CH_TYPE_RAW]);
+                if (rc != NO_ERROR) {
+                    LOGE("cannot start postprocessor");
+                    delChannel(QCAMERA_CH_TYPE_RAW);
+                    return rc;
+                }
+
+                rc = startChannel(QCAMERA_CH_TYPE_RAW);
+                if (rc != NO_ERROR) {
+                    LOGE("cannot start raw channel");
+                    m_postprocessor.stop();
+                    delChannel(QCAMERA_CH_TYPE_RAW);
+                    return rc;
+                }
+            } else {
+                LOGE("cannot add raw channel");
+                return rc;
+            }
+        }
+    }
+
+    //When take picture, stop sending preview callbacks to APP
+    m_stateMachine.setPreviewCallbackNeeded(false);
+    LOGI("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : configureOnlineRotation
+ *
+ * DESCRIPTION: Configure backend with expected rotation for snapshot stream
+ *
+ * PARAMETERS :
+ *    @ch     : Channel containing a snapshot stream
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::configureOnlineRotation(QCameraChannel &ch)
+{
+    int rc = NO_ERROR;
+    uint32_t streamId = 0;
+    QCameraStream *pStream = NULL;
+
+    for (uint8_t i = 0; i < ch.getNumOfStreams(); i++) {
+        QCameraStream *stream = ch.getStreamByIndex(i);
+        if ((NULL != stream) &&
+                ((CAM_STREAM_TYPE_SNAPSHOT == stream->getMyType())
+                || (CAM_STREAM_TYPE_RAW == stream->getMyType()))) {
+            pStream = stream;
+            break;
+        }
+    }
+
+    if (NULL == pStream) {
+        LOGE("No snapshot stream found!");
+        return BAD_VALUE;
+    }
+
+    streamId = pStream->getMyServerID();
+    // Update online rotation configuration
+    rc = mParameters.addOnlineRotation(mParameters.getJpegRotation(), streamId,
+            mParameters.getDeviceRotation());
+    if (rc != NO_ERROR) {
+        LOGE("addOnlineRotation failed %d", rc);
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : declareSnapshotStreams
+ *
+ * DESCRIPTION: Configure backend with expected snapshot streams
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::declareSnapshotStreams()
+{
+    int rc = NO_ERROR;
+
+    // Update stream info configuration
+    rc = mParameters.setStreamConfigure(true, mLongshotEnabled, false);
+    if (rc != NO_ERROR) {
+        LOGE("setStreamConfigure failed %d", rc);
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : longShot
+ *
+ * DESCRIPTION: Queue one more ZSL frame
+ *              in the longshot pipe.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::longShot()
+{
+    int32_t rc = NO_ERROR;
+    uint8_t numSnapshots = mParameters.getNumOfSnapshots();
+    QCameraPicChannel *pChannel = NULL;
+
+    if (mParameters.isZSLMode()) {
+        pChannel = (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_ZSL];
+    } else {
+        pChannel = (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_CAPTURE];
+    }
+
+    if (NULL != pChannel) {
+        mm_camera_req_buf_t buf;
+        memset(&buf, 0x0, sizeof(buf));
+        buf.type = MM_CAMERA_REQ_SUPER_BUF;
+        buf.num_buf_requested = numSnapshots;
+        rc = pChannel->takePicture(&buf);
+    } else {
+        LOGE("Capture channel not initialized!");
+        rc = NO_INIT;
+        goto end;
+    }
+
+end:
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stopCaptureChannel
+ *
+ * DESCRIPTION: Stops capture channel
+ *
+ * PARAMETERS :
+ *   @destroy : Set to true to stop and delete camera channel.
+ *              Set to false to only stop capture channel.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::stopCaptureChannel(bool destroy)
+{
+    int rc = NO_ERROR;
+    if (mParameters.isJpegPictureFormat() ||
+        mParameters.isNV16PictureFormat() ||
+        mParameters.isNV21PictureFormat()) {
+        rc = stopChannel(QCAMERA_CH_TYPE_CAPTURE);
+        if (destroy && (NO_ERROR == rc)) {
+            // Destroy camera channel but dont release context
+            waitDeferredWork(mJpegJob);
+            rc = delChannel(QCAMERA_CH_TYPE_CAPTURE, false);
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : cancelPicture
+ *
+ * DESCRIPTION: cancel picture impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::cancelPicture()
+{
+    waitDeferredWork(mReprocJob);
+    waitDeferredWork(mJpegJob);
+
+    //stop post processor
+    m_postprocessor.stop();
+
+    unconfigureAdvancedCapture();
+    LOGH("Enable display frames again");
+    setDisplaySkip(FALSE);
+
+    if (!mLongshotEnabled) {
+        m_perfLock.lock_rel();
+    }
+
+    if (mParameters.isZSLMode()) {
+        QCameraPicChannel *pPicChannel = NULL;
+        if (mParameters.getofflineRAW()) {
+            pPicChannel = (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_RAW];
+        } else {
+            pPicChannel = (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_ZSL];
+        }
+        if (NULL != pPicChannel) {
+            pPicChannel->cancelPicture();
+            stopRAWChannel();
+            stopAdvancedCapture(pPicChannel);
+        }
+    } else {
+
+        // normal capture case
+        if (mParameters.isJpegPictureFormat() ||
+            mParameters.isNV16PictureFormat() ||
+            mParameters.isNV21PictureFormat()) {
+            stopChannel(QCAMERA_CH_TYPE_CAPTURE);
+            delChannel(QCAMERA_CH_TYPE_CAPTURE);
+        } else {
+            stopChannel(QCAMERA_CH_TYPE_RAW);
+            delChannel(QCAMERA_CH_TYPE_RAW);
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : captureDone
+ *
+ * DESCRIPTION: Function called when the capture is completed before encoding
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::captureDone()
+{
+    qcamera_sm_internal_evt_payload_t *payload =
+       (qcamera_sm_internal_evt_payload_t *)
+       malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+    if (NULL != payload) {
+        memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+        payload->evt_type = QCAMERA_INTERNAL_EVT_ZSL_CAPTURE_DONE;
+        int32_t rc = processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+        if (rc != NO_ERROR) {
+            LOGE("processEvt ZSL capture done failed");
+            free(payload);
+            payload = NULL;
+        }
+    } else {
+        LOGE("No memory for ZSL capture done event");
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : Live_Snapshot_thread
+ *
+ * DESCRIPTION: Seperate thread for taking live snapshot during recording
+ *
+ * PARAMETERS : @data - pointer to QCamera2HardwareInterface class object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void* Live_Snapshot_thread (void* data)
+{
+
+    QCamera2HardwareInterface *hw = reinterpret_cast<QCamera2HardwareInterface *>(data);
+    if (!hw) {
+        LOGE("take_picture_thread: NULL camera device");
+        return (void *)BAD_VALUE;
+    }
+    if (hw->bLiveSnapshot) {
+        hw->takeLiveSnapshot_internal();
+    } else {
+        hw->cancelLiveSnapshot_internal();
+    }
+    return (void* )NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : Int_Pic_thread
+ *
+ * DESCRIPTION: Seperate thread for taking snapshot triggered by camera backend
+ *
+ * PARAMETERS : @data - pointer to QCamera2HardwareInterface class object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void* Int_Pic_thread (void* data)
+{
+    int rc = NO_ERROR;
+
+    QCamera2HardwareInterface *hw = reinterpret_cast<QCamera2HardwareInterface *>(data);
+
+    if (!hw) {
+        LOGE("take_picture_thread: NULL camera device");
+        return (void *)BAD_VALUE;
+    }
+
+    bool JpegMemOpt = false;
+    char raw_format[PROPERTY_VALUE_MAX];
+
+    memset(raw_format, 0, sizeof(raw_format));
+
+    rc = hw->takeBackendPic_internal(&JpegMemOpt, &raw_format[0]);
+    if (rc == NO_ERROR) {
+        hw->checkIntPicPending(JpegMemOpt, &raw_format[0]);
+    } else {
+        //Snapshot attempt not successful, we need to do cleanup here
+        hw->clearIntPendingEvents();
+    }
+
+    return (void* )NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : takeLiveSnapshot
+ *
+ * DESCRIPTION: take live snapshot during recording
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::takeLiveSnapshot()
+{
+    int rc = NO_ERROR;
+    if (mLiveSnapshotThread != 0) {
+        pthread_join(mLiveSnapshotThread,NULL);
+        mLiveSnapshotThread = 0;
+    }
+    bLiveSnapshot = true;
+    rc= pthread_create(&mLiveSnapshotThread, NULL, Live_Snapshot_thread, (void *) this);
+    if (!rc) {
+        pthread_setname_np(mLiveSnapshotThread, "CAM_liveSnap");
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : takePictureInternal
+ *
+ * DESCRIPTION: take snapshot triggered by backend
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::takePictureInternal()
+{
+    int rc = NO_ERROR;
+    rc= pthread_create(&mIntPicThread, NULL, Int_Pic_thread, (void *) this);
+    if (!rc) {
+        pthread_setname_np(mIntPicThread, "CAM_IntPic");
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : checkIntPicPending
+ *
+ * DESCRIPTION: timed wait for jpeg completion event, and send
+ *                        back completion event to backend
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::checkIntPicPending(bool JpegMemOpt, char *raw_format)
+{
+    bool bSendToBackend = true;
+    cam_int_evt_params_t params;
+    int rc = NO_ERROR;
+
+    struct timespec   ts;
+    struct timeval    tp;
+    gettimeofday(&tp, NULL);
+    ts.tv_sec  = tp.tv_sec + 5;
+    ts.tv_nsec = tp.tv_usec * 1000;
+
+    if (true == m_bIntJpegEvtPending ||
+        (true == m_bIntRawEvtPending)) {
+        //Waiting in HAL for snapshot taken notification
+        pthread_mutex_lock(&m_int_lock);
+        rc = pthread_cond_timedwait(&m_int_cond, &m_int_lock, &ts);
+        if (ETIMEDOUT == rc || 0x0 == m_BackendFileName[0]) {
+            //Hit a timeout, or some spurious activity
+            bSendToBackend = false;
+        }
+
+        if (true == m_bIntJpegEvtPending) {
+            params.event_type = 0;
+            mParameters.getStreamFormat(CAM_STREAM_TYPE_SNAPSHOT, params.picture_format);
+        } else if (true == m_bIntRawEvtPending) {
+            params.event_type = 1;
+            mParameters.getStreamFormat(CAM_STREAM_TYPE_RAW, params.picture_format);
+        }
+        pthread_mutex_unlock(&m_int_lock);
+
+        if (true == m_bIntJpegEvtPending) {
+            //Attempting to restart preview after taking JPEG snapshot
+            lockAPI();
+            rc = processAPI(QCAMERA_SM_EVT_SNAPSHOT_DONE, NULL);
+            unlockAPI();
+            m_postprocessor.setJpegMemOpt(JpegMemOpt);
+        } else if (true == m_bIntRawEvtPending) {
+            //Attempting to restart preview after taking RAW snapshot
+            stopChannel(QCAMERA_CH_TYPE_RAW);
+            delChannel(QCAMERA_CH_TYPE_RAW);
+            //restoring the old raw format
+            property_set("persist.camera.raw.format", raw_format);
+        }
+
+        if (true == bSendToBackend) {
+            //send event back to server with the file path
+            params.dim = m_postprocessor.m_dst_dim;
+            memcpy(&params.path[0], &m_BackendFileName[0], QCAMERA_MAX_FILEPATH_LENGTH);
+            memset(&m_BackendFileName[0], 0x0, QCAMERA_MAX_FILEPATH_LENGTH);
+            params.size = mBackendFileSize;
+            rc = mParameters.setIntEvent(params);
+        }
+
+        clearIntPendingEvents();
+    }
+
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : takeBackendPic_internal
+ *
+ * DESCRIPTION: take snapshot triggered by backend
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::takeBackendPic_internal(bool *JpegMemOpt, char *raw_format)
+{
+    int rc = NO_ERROR;
+    qcamera_api_result_t apiResult;
+
+    lockAPI();
+    //Set rotation value from user settings as Jpeg rotation
+    //to configure back-end modules.
+    mParameters.setJpegRotation(mParameters.getRotation());
+
+    setRetroPicture(0);
+    /* Prepare snapshot in case LED needs to be flashed */
+    if (mFlashNeeded == 1 || mParameters.isChromaFlashEnabled()) {
+        // Start Preparing for normal Frames
+        LOGH("Start Prepare Snapshot");
+        /* Prepare snapshot in case LED needs to be flashed */
+        rc = processAPI(QCAMERA_SM_EVT_PREPARE_SNAPSHOT, NULL);
+        if (rc == NO_ERROR) {
+            waitAPIResult(QCAMERA_SM_EVT_PREPARE_SNAPSHOT, &apiResult);
+            rc = apiResult.status;
+        }
+        LOGH("Prep Snapshot done rc = %d", rc);
+        mPrepSnapRun = true;
+    }
+    unlockAPI();
+
+    if (true == m_bIntJpegEvtPending) {
+        //Attempting to take JPEG snapshot
+        if (NO_ERROR != waitDeferredWork(mInitPProcJob)) {
+            LOGE("Init PProc Deferred work failed");
+            return UNKNOWN_ERROR;
+        }
+        *JpegMemOpt = m_postprocessor.getJpegMemOpt();
+        m_postprocessor.setJpegMemOpt(false);
+
+        /* capture */
+        lockAPI();
+        LOGH("Capturing internal snapshot");
+        rc = processAPI(QCAMERA_SM_EVT_TAKE_PICTURE, NULL);
+        if (rc == NO_ERROR) {
+            waitAPIResult(QCAMERA_SM_EVT_TAKE_PICTURE, &apiResult);
+            rc = apiResult.status;
+        }
+        unlockAPI();
+    } else if (true == m_bIntRawEvtPending) {
+        //Attempting to take RAW snapshot
+        (void)JpegMemOpt;
+        stopPreview();
+
+        //getting the existing raw format type
+        property_get("persist.camera.raw.format", raw_format, "17");
+        //setting it to a default know value for this task
+        property_set("persist.camera.raw.format", "18");
+
+        rc = addChannel(QCAMERA_CH_TYPE_RAW);
+        if (rc == NO_ERROR) {
+            // start postprocessor
+            if (NO_ERROR != waitDeferredWork(mInitPProcJob)) {
+                LOGE("Init PProc Deferred work failed");
+                return UNKNOWN_ERROR;
+            }
+            rc = m_postprocessor.start(m_channels[QCAMERA_CH_TYPE_RAW]);
+            if (rc != NO_ERROR) {
+                LOGE("cannot start postprocessor");
+                delChannel(QCAMERA_CH_TYPE_RAW);
+                return rc;
+            }
+
+            rc = startChannel(QCAMERA_CH_TYPE_RAW);
+            if (rc != NO_ERROR) {
+                LOGE("cannot start raw channel");
+                m_postprocessor.stop();
+                delChannel(QCAMERA_CH_TYPE_RAW);
+                return rc;
+            }
+        } else {
+            LOGE("cannot add raw channel");
+            return rc;
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : clearIntPendingEvents
+ *
+ * DESCRIPTION: clear internal pending events pertaining to backend
+ *                        snapshot requests
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+void QCamera2HardwareInterface::clearIntPendingEvents()
+{
+    int rc = NO_ERROR;
+
+    if (true == m_bIntRawEvtPending) {
+        preparePreview();
+        startPreview();
+    }
+    if (true == m_bIntJpegEvtPending) {
+        if (false == mParameters.isZSLMode()) {
+            lockAPI();
+            rc = processAPI(QCAMERA_SM_EVT_START_PREVIEW, NULL);
+            unlockAPI();
+        }
+    }
+
+    pthread_mutex_lock(&m_int_lock);
+    if (true == m_bIntJpegEvtPending) {
+        m_bIntJpegEvtPending = false;
+    } else if (true == m_bIntRawEvtPending) {
+        m_bIntRawEvtPending = false;
+    }
+    pthread_mutex_unlock(&m_int_lock);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : takeLiveSnapshot_internal
+ *
+ * DESCRIPTION: take live snapshot during recording
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::takeLiveSnapshot_internal()
+{
+    int rc = NO_ERROR;
+
+    QCameraChannel *pChannel = NULL;
+
+    //Set rotation value from user settings as Jpeg rotation
+    //to configure back-end modules.
+    mParameters.setJpegRotation(mParameters.getRotation());
+
+    // Configure advanced capture
+    rc = configureAdvancedCapture();
+    if (rc != NO_ERROR) {
+        LOGE("Unsupported capture call");
+        goto end;
+    }
+
+    if (isLowPowerMode()) {
+        pChannel = m_channels[QCAMERA_CH_TYPE_VIDEO];
+    } else {
+        pChannel = m_channels[QCAMERA_CH_TYPE_SNAPSHOT];
+    }
+
+    if (NULL == pChannel) {
+        LOGE("Snapshot/Video channel not initialized");
+        rc = NO_INIT;
+        goto end;
+    }
+
+    DeferWorkArgs args;
+    memset(&args, 0, sizeof(DeferWorkArgs));
+
+    args.pprocArgs = pChannel;
+
+    // No need to wait for mInitPProcJob here, because it was
+    // queued in startPreview, and will definitely be processed before
+    // mReprocJob can begin.
+    mReprocJob = queueDeferredWork(CMD_DEF_PPROC_START,
+            args);
+    if (mReprocJob == 0) {
+        LOGE("Failed to queue CMD_DEF_PPROC_START");
+        rc = -ENOMEM;
+        goto end;
+    }
+
+    // Create JPEG session
+    mJpegJob = queueDeferredWork(CMD_DEF_CREATE_JPEG_SESSION,
+            args);
+    if (mJpegJob == 0) {
+        LOGE("Failed to queue CREATE_JPEG_SESSION");
+        if (NO_ERROR != waitDeferredWork(mReprocJob)) {
+            LOGE("Reprocess Deferred work was failed");
+        }
+        m_postprocessor.stop();
+        rc = -ENOMEM;
+        goto end;
+    }
+
+    if (isLowPowerMode()) {
+        mm_camera_req_buf_t buf;
+        memset(&buf, 0x0, sizeof(buf));
+        buf.type = MM_CAMERA_REQ_SUPER_BUF;
+        buf.num_buf_requested = 1;
+        rc = ((QCameraVideoChannel*)pChannel)->takePicture(&buf);
+        goto end;
+    }
+
+    //Disable reprocess for 4K liveshot case
+    if (!mParameters.is4k2kVideoResolution()) {
+        rc = configureOnlineRotation(*m_channels[QCAMERA_CH_TYPE_SNAPSHOT]);
+        if (rc != NO_ERROR) {
+            LOGE("online rotation failed");
+            if (NO_ERROR != waitDeferredWork(mReprocJob)) {
+                LOGE("Reprocess Deferred work was failed");
+            }
+            if (NO_ERROR != waitDeferredWork(mJpegJob)) {
+                LOGE("Jpeg Deferred work was failed");
+            }
+            m_postprocessor.stop();
+            return rc;
+        }
+    }
+
+    if ((NULL != pChannel) && (mParameters.isTNRSnapshotEnabled())) {
+        QCameraStream *pStream = NULL;
+        for (uint32_t i = 0 ; i < pChannel->getNumOfStreams(); i++ ) {
+            pStream = pChannel->getStreamByIndex(i);
+            if ((NULL != pStream) &&
+                    (CAM_STREAM_TYPE_SNAPSHOT == pStream->getMyType())) {
+                break;
+            }
+        }
+        if (pStream != NULL) {
+            LOGD("REQUEST_FRAMES event for TNR snapshot");
+            cam_stream_parm_buffer_t param;
+            memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+            param.type = CAM_STREAM_PARAM_TYPE_REQUEST_FRAMES;
+            param.frameRequest.enableStream = 1;
+            rc = pStream->setParameter(param);
+            if (rc != NO_ERROR) {
+                LOGE("Stream Event REQUEST_FRAMES failed");
+            }
+            goto end;
+        }
+    }
+
+    // start snapshot channel
+    if ((rc == NO_ERROR) && (NULL != pChannel)) {
+        // Do not link metadata stream for 4K2k resolution
+        // as CPP processing would be done on snapshot stream and not
+        // reprocess stream
+        if (!mParameters.is4k2kVideoResolution()) {
+            // Find and try to link a metadata stream from preview channel
+            QCameraChannel *pMetaChannel = NULL;
+            QCameraStream *pMetaStream = NULL;
+            QCameraStream *pPreviewStream = NULL;
+
+            if (m_channels[QCAMERA_CH_TYPE_PREVIEW] != NULL) {
+                pMetaChannel = m_channels[QCAMERA_CH_TYPE_PREVIEW];
+                uint32_t streamNum = pMetaChannel->getNumOfStreams();
+                QCameraStream *pStream = NULL;
+                for (uint32_t i = 0 ; i < streamNum ; i++ ) {
+                    pStream = pMetaChannel->getStreamByIndex(i);
+                    if (NULL != pStream) {
+                        if (CAM_STREAM_TYPE_METADATA == pStream->getMyType()) {
+                            pMetaStream = pStream;
+                        } else if ((CAM_STREAM_TYPE_PREVIEW == pStream->getMyType())
+                                && (!mParameters.isHfrMode())) {
+                            // Do not link preview stream for HFR live snapshot.
+                            // Thumbnail will not be derived from preview for HFR live snapshot.
+                            pPreviewStream = pStream;
+                        }
+                    }
+                }
+            }
+
+            if ((NULL != pMetaChannel) && (NULL != pMetaStream)) {
+                rc = pChannel->linkStream(pMetaChannel, pMetaStream);
+                if (NO_ERROR != rc) {
+                    LOGE("Metadata stream link failed %d", rc);
+                }
+            }
+            if ((NULL != pMetaChannel) && (NULL != pPreviewStream)) {
+                rc = pChannel->linkStream(pMetaChannel, pPreviewStream);
+                if (NO_ERROR != rc) {
+                    LOGE("Preview stream link failed %d", rc);
+                }
+            }
+        }
+        rc = pChannel->start();
+    }
+
+end:
+    if (rc != NO_ERROR) {
+        rc = processAPI(QCAMERA_SM_EVT_CANCEL_PICTURE, NULL);
+        rc = sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : cancelLiveSnapshot
+ *
+ * DESCRIPTION: cancel current live snapshot request
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::cancelLiveSnapshot()
+{
+    int rc = NO_ERROR;
+    if (mLiveSnapshotThread != 0) {
+        pthread_join(mLiveSnapshotThread,NULL);
+        mLiveSnapshotThread = 0;
+    }
+    bLiveSnapshot = false;
+    rc= pthread_create(&mLiveSnapshotThread, NULL, Live_Snapshot_thread, (void *) this);
+    if (!rc) {
+        pthread_setname_np(mLiveSnapshotThread, "CAM_cancel_liveSnap");
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : cancelLiveSnapshot_internal
+ *
+ * DESCRIPTION: cancel live snapshot during recording
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::cancelLiveSnapshot_internal() {
+    int rc = NO_ERROR;
+
+    unconfigureAdvancedCapture();
+    LOGH("Enable display frames again");
+    setDisplaySkip(FALSE);
+
+    if (!mLongshotEnabled) {
+        m_perfLock.lock_rel();
+    }
+
+    //stop post processor
+    m_postprocessor.stop();
+
+    // stop snapshot channel
+    if (!mParameters.isTNRSnapshotEnabled()) {
+        rc = stopChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+    } else {
+        QCameraChannel *pChannel = m_channels[QCAMERA_CH_TYPE_SNAPSHOT];
+        if (NULL != pChannel) {
+            QCameraStream *pStream = NULL;
+            for (uint32_t i = 0 ; i < pChannel->getNumOfStreams(); i++ ) {
+                pStream = pChannel->getStreamByIndex(i);
+                if ((NULL != pStream) &&
+                        (CAM_STREAM_TYPE_SNAPSHOT ==
+                        pStream->getMyType())) {
+                    break;
+                }
+            }
+            if (pStream != NULL) {
+                LOGD("REQUEST_FRAMES event for TNR snapshot");
+                cam_stream_parm_buffer_t param;
+                memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+                param.type = CAM_STREAM_PARAM_TYPE_REQUEST_FRAMES;
+                param.frameRequest.enableStream = 0;
+                rc = pStream->setParameter(param);
+                if (rc != NO_ERROR) {
+                    LOGE("Stream Event REQUEST_FRAMES failed");
+                }
+            }
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : putParameters
+ *
+ * DESCRIPTION: put parameters string impl
+ *
+ * PARAMETERS :
+ *   @parms   : parameters string to be released
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::putParameters(char *parms)
+{
+    free(parms);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : sendCommand
+ *
+ * DESCRIPTION: send command impl
+ *
+ * PARAMETERS :
+ *   @command : command to be executed
+ *   @arg1    : optional argument 1
+ *   @arg2    : optional argument 2
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::sendCommand(int32_t command,
+        __unused int32_t &arg1, __unused int32_t &arg2)
+{
+    int rc = NO_ERROR;
+
+    switch (command) {
+#ifndef VANILLA_HAL
+    case CAMERA_CMD_LONGSHOT_ON:
+        m_perfLock.lock_acq();
+        arg1 = arg2 = 0;
+        // Longshot can only be enabled when image capture
+        // is not active.
+        if ( !m_stateMachine.isCaptureRunning() ) {
+            LOGI("Longshot Enabled");
+            mLongshotEnabled = true;
+            rc = mParameters.setLongshotEnable(mLongshotEnabled);
+
+            // Due to recent buffer count optimizations
+            // ZSL might run with considerably less buffers
+            // when not in longshot mode. Preview needs to
+            // restart in this case.
+            if (isZSLMode() && m_stateMachine.isPreviewRunning()) {
+                QCameraChannel *pChannel = NULL;
+                QCameraStream *pSnapStream = NULL;
+                pChannel = m_channels[QCAMERA_CH_TYPE_ZSL];
+                if (NULL != pChannel) {
+                    QCameraStream *pStream = NULL;
+                    for (uint32_t i = 0; i < pChannel->getNumOfStreams(); i++) {
+                        pStream = pChannel->getStreamByIndex(i);
+                        if (pStream != NULL) {
+                            if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
+                                pSnapStream = pStream;
+                                break;
+                            }
+                        }
+                    }
+                    if (NULL != pSnapStream) {
+                        uint8_t required = 0;
+                        required = getBufNumRequired(CAM_STREAM_TYPE_SNAPSHOT);
+                        if (pSnapStream->getBufferCount() < required) {
+                            // We restart here, to reset the FPS and no
+                            // of buffers as per the requirement of longshot usecase.
+                            arg1 = QCAMERA_SM_EVT_RESTART_PERVIEW;
+                            if (getRelatedCamSyncInfo()->sync_control ==
+                                    CAM_SYNC_RELATED_SENSORS_ON) {
+                                arg2 = QCAMERA_SM_EVT_DELAYED_RESTART;
+                            }
+                        }
+                    }
+                }
+            }
+            //
+            mPrepSnapRun = false;
+            mCACDoneReceived = FALSE;
+        } else {
+            rc = NO_INIT;
+        }
+        break;
+    case CAMERA_CMD_LONGSHOT_OFF:
+        m_perfLock.lock_rel();
+        if ( mLongshotEnabled && m_stateMachine.isCaptureRunning() ) {
+            cancelPicture();
+            processEvt(QCAMERA_SM_EVT_SNAPSHOT_DONE, NULL);
+            QCameraChannel *pZSLChannel = m_channels[QCAMERA_CH_TYPE_ZSL];
+            if (isZSLMode() && (NULL != pZSLChannel) && mPrepSnapRun) {
+                mCameraHandle->ops->stop_zsl_snapshot(
+                        mCameraHandle->camera_handle,
+                        pZSLChannel->getMyHandle());
+            }
+        }
+        mPrepSnapRun = false;
+        LOGI("Longshot Disabled");
+        mLongshotEnabled = false;
+        rc = mParameters.setLongshotEnable(mLongshotEnabled);
+        mCACDoneReceived = FALSE;
+        break;
+    case CAMERA_CMD_HISTOGRAM_ON:
+    case CAMERA_CMD_HISTOGRAM_OFF:
+        rc = setHistogram(command == CAMERA_CMD_HISTOGRAM_ON? true : false);
+        LOGH("Histogram -> %s",
+              mParameters.isHistogramEnabled() ? "Enabled" : "Disabled");
+        break;
+#endif
+    case CAMERA_CMD_START_FACE_DETECTION:
+    case CAMERA_CMD_STOP_FACE_DETECTION:
+        mParameters.setFaceDetectionOption(command == CAMERA_CMD_START_FACE_DETECTION? true : false);
+        rc = setFaceDetection(command == CAMERA_CMD_START_FACE_DETECTION? true : false);
+        LOGH("FaceDetection -> %s",
+              mParameters.isFaceDetectionEnabled() ? "Enabled" : "Disabled");
+        break;
+#ifndef VANILLA_HAL
+    case CAMERA_CMD_HISTOGRAM_SEND_DATA:
+#endif
+    default:
+        rc = NO_ERROR;
+        break;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : registerFaceImage
+ *
+ * DESCRIPTION: register face image impl
+ *
+ * PARAMETERS :
+ *   @img_ptr : ptr to image buffer
+ *   @config  : ptr to config struct about input image info
+ *   @faceID  : [OUT] face ID to uniquely identifiy the registered face image
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::registerFaceImage(void *img_ptr,
+                                                 cam_pp_offline_src_config_t *config,
+                                                 int32_t &faceID)
+{
+    int rc = NO_ERROR;
+    faceID = -1;
+
+    if (img_ptr == NULL || config == NULL) {
+        LOGE("img_ptr or config is NULL");
+        return BAD_VALUE;
+    }
+
+    // allocate ion memory for source image
+    QCameraHeapMemory *imgBuf = new QCameraHeapMemory(QCAMERA_ION_USE_CACHE);
+    if (imgBuf == NULL) {
+        LOGE("Unable to new heap memory obj for image buf");
+        return NO_MEMORY;
+    }
+
+    rc = imgBuf->allocate(1, config->input_buf_planes.plane_info.frame_len, NON_SECURE);
+    if (rc < 0) {
+        LOGE("Unable to allocate heap memory for image buf");
+        delete imgBuf;
+        return NO_MEMORY;
+    }
+
+    void *pBufPtr = imgBuf->getPtr(0);
+    if (pBufPtr == NULL) {
+        LOGE("image buf is NULL");
+        imgBuf->deallocate();
+        delete imgBuf;
+        return NO_MEMORY;
+    }
+    memcpy(pBufPtr, img_ptr, config->input_buf_planes.plane_info.frame_len);
+
+    cam_pp_feature_config_t pp_feature;
+    memset(&pp_feature, 0, sizeof(cam_pp_feature_config_t));
+    pp_feature.feature_mask = CAM_QCOM_FEATURE_REGISTER_FACE;
+    QCameraReprocessChannel *pChannel =
+        addOfflineReprocChannel(*config, pp_feature, NULL, NULL);
+
+    if (pChannel == NULL) {
+        LOGE("fail to add offline reprocess channel");
+        imgBuf->deallocate();
+        delete imgBuf;
+        return UNKNOWN_ERROR;
+    }
+
+    rc = pChannel->start();
+    if (rc != NO_ERROR) {
+        LOGE("Cannot start reprocess channel");
+        imgBuf->deallocate();
+        delete imgBuf;
+        delete pChannel;
+        return rc;
+    }
+
+    ssize_t bufSize = imgBuf->getSize(0);
+    if (BAD_INDEX != bufSize) {
+        rc = pChannel->doReprocess(imgBuf->getFd(0), imgBuf->getPtr(0),
+                (size_t)bufSize, faceID);
+    } else {
+        LOGE("Failed to retrieve buffer size (bad index)");
+        return UNKNOWN_ERROR;
+    }
+
+    // done with register face image, free imgbuf and delete reprocess channel
+    imgBuf->deallocate();
+    delete imgBuf;
+    imgBuf = NULL;
+    pChannel->stop();
+    delete pChannel;
+    pChannel = NULL;
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : release
+ *
+ * DESCRIPTION: release camera resource impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::release()
+{
+    // stop and delete all channels
+    for (int i = 0; i <QCAMERA_CH_TYPE_MAX ; i++) {
+        if (m_channels[i] != NULL) {
+            stopChannel((qcamera_ch_type_enum_t)i);
+            delChannel((qcamera_ch_type_enum_t)i);
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : dump
+ *
+ * DESCRIPTION: camera status dump impl
+ *
+ * PARAMETERS :
+ *   @fd      : fd for the buffer to be dumped with camera status
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::dump(int fd)
+{
+    dprintf(fd, "\n Camera HAL information Begin \n");
+    dprintf(fd, "Camera ID: %d \n", mCameraId);
+    dprintf(fd, "StoreMetaDataInFrame: %d \n", mStoreMetaDataInFrame);
+    dprintf(fd, "\n Configuration: %s", mParameters.dump().string());
+    dprintf(fd, "\n State Information: %s", m_stateMachine.dump().string());
+    dprintf(fd, "\n Camera HAL information End \n");
+
+    /* send UPDATE_DEBUG_LEVEL to the backend so that they can read the
+       debug level property */
+    mParameters.updateDebugLevel();
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : processAPI
+ *
+ * DESCRIPTION: process API calls from upper layer
+ *
+ * PARAMETERS :
+ *   @api         : API to be processed
+ *   @api_payload : ptr to API payload if any
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::processAPI(qcamera_sm_evt_enum_t api, void *api_payload)
+{
+    int ret = DEAD_OBJECT;
+
+    if (m_smThreadActive) {
+        ret = m_stateMachine.procAPI(api, api_payload);
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : processEvt
+ *
+ * DESCRIPTION: process Evt from backend via mm-camera-interface
+ *
+ * PARAMETERS :
+ *   @evt         : event type to be processed
+ *   @evt_payload : ptr to event payload if any
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::processEvt(qcamera_sm_evt_enum_t evt, void *evt_payload)
+{
+    return m_stateMachine.procEvt(evt, evt_payload);
+}
+
+/*===========================================================================
+ * FUNCTION   : processSyncEvt
+ *
+ * DESCRIPTION: process synchronous Evt from backend
+ *
+ * PARAMETERS :
+ *   @evt         : event type to be processed
+ *   @evt_payload : ptr to event payload if any
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::processSyncEvt(qcamera_sm_evt_enum_t evt, void *evt_payload)
+{
+    int rc = NO_ERROR;
+
+    pthread_mutex_lock(&m_evtLock);
+    rc =  processEvt(evt, evt_payload);
+    if (rc == NO_ERROR) {
+        memset(&m_evtResult, 0, sizeof(qcamera_api_result_t));
+        while (m_evtResult.request_api != evt) {
+            pthread_cond_wait(&m_evtCond, &m_evtLock);
+        }
+        rc =  m_evtResult.status;
+    }
+    pthread_mutex_unlock(&m_evtLock);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : evtHandle
+ *
+ * DESCRIPTION: Function registerd to mm-camera-interface to handle backend events
+ *
+ * PARAMETERS :
+ *   @camera_handle : event type to be processed
+ *   @evt           : ptr to event
+ *   @user_data     : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
+                                          mm_camera_event_t *evt,
+                                          void *user_data)
+{
+    QCamera2HardwareInterface *obj = (QCamera2HardwareInterface *)user_data;
+    if (obj && evt) {
+        mm_camera_event_t *payload =
+            (mm_camera_event_t *)malloc(sizeof(mm_camera_event_t));
+        if (NULL != payload) {
+            *payload = *evt;
+            //peek into the event, if this is an eztune event from server,
+            //then we don't need to post it to the SM Qs, we shud directly
+            //spawn a thread and get the job done (jpeg or raw snapshot)
+            switch (payload->server_event_type) {
+                case CAM_EVENT_TYPE_INT_TAKE_JPEG:
+                    //Received JPEG trigger from eztune
+                    if (false == obj->m_bIntJpegEvtPending) {
+                        pthread_mutex_lock(&obj->m_int_lock);
+                        obj->m_bIntJpegEvtPending = true;
+                        pthread_mutex_unlock(&obj->m_int_lock);
+                        obj->takePictureInternal();
+                    }
+                    free(payload);
+                    break;
+                case CAM_EVENT_TYPE_INT_TAKE_RAW:
+                    //Received RAW trigger from eztune
+                    if (false == obj->m_bIntRawEvtPending) {
+                        pthread_mutex_lock(&obj->m_int_lock);
+                        obj->m_bIntRawEvtPending = true;
+                        pthread_mutex_unlock(&obj->m_int_lock);
+                        obj->takePictureInternal();
+                    }
+                    free(payload);
+                    break;
+                case CAM_EVENT_TYPE_DAEMON_DIED:
+                    {
+                        Mutex::Autolock l(obj->mDefLock);
+                        obj->mDefCond.broadcast();
+                        LOGH("broadcast mDefCond signal\n");
+                    }
+                default:
+                    obj->processEvt(QCAMERA_SM_EVT_EVT_NOTIFY, payload);
+                    break;
+            }
+        }
+    } else {
+        LOGE("NULL user_data");
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : jpegEvtHandle
+ *
+ * DESCRIPTION: Function registerd to mm-jpeg-interface to handle jpeg events
+ *
+ * PARAMETERS :
+ *   @status    : status of jpeg job
+ *   @client_hdl: jpeg client handle
+ *   @jobId     : jpeg job Id
+ *   @p_ouput   : ptr to jpeg output result struct
+ *   @userdata  : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::jpegEvtHandle(jpeg_job_status_t status,
+                                              uint32_t /*client_hdl*/,
+                                              uint32_t jobId,
+                                              mm_jpeg_output_t *p_output,
+                                              void *userdata)
+{
+    QCamera2HardwareInterface *obj = (QCamera2HardwareInterface *)userdata;
+    if (obj) {
+        qcamera_jpeg_evt_payload_t *payload =
+            (qcamera_jpeg_evt_payload_t *)malloc(sizeof(qcamera_jpeg_evt_payload_t));
+        if (NULL != payload) {
+            memset(payload, 0, sizeof(qcamera_jpeg_evt_payload_t));
+            payload->status = status;
+            payload->jobId = jobId;
+            if (p_output != NULL) {
+                payload->out_data = *p_output;
+            }
+            obj->processEvt(QCAMERA_SM_EVT_JPEG_EVT_NOTIFY, payload);
+        }
+    } else {
+        LOGE("NULL user_data");
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : thermalEvtHandle
+ *
+ * DESCRIPTION: routine to handle thermal event notification
+ *
+ * PARAMETERS :
+ *   @level      : thermal level
+ *   @userdata   : userdata passed in during registration
+ *   @data       : opaque data from thermal client
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::thermalEvtHandle(
+        qcamera_thermal_level_enum_t *level, void *userdata, void *data)
+{
+    if (!mCameraOpened) {
+        LOGH("Camera is not opened, no need to handle thermal evt");
+        return NO_ERROR;
+    }
+
+    // Make sure thermal events are logged
+    LOGH("level = %d, userdata = %p, data = %p",
+         *level, userdata, data);
+    //We don't need to lockAPI, waitAPI here. QCAMERA_SM_EVT_THERMAL_NOTIFY
+    // becomes an aync call. This also means we can only pass payload
+    // by value, not by address.
+    return processAPI(QCAMERA_SM_EVT_THERMAL_NOTIFY, (void *)level);
+}
+
+/*===========================================================================
+ * FUNCTION   : sendEvtNotify
+ *
+ * DESCRIPTION: send event notify to notify thread
+ *
+ * PARAMETERS :
+ *   @msg_type: msg type to be sent
+ *   @ext1    : optional extension1
+ *   @ext2    : optional extension2
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::sendEvtNotify(int32_t msg_type,
+                                                 int32_t ext1,
+                                                 int32_t ext2)
+{
+    qcamera_callback_argm_t cbArg;
+    memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+    cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
+    cbArg.msg_type = msg_type;
+    cbArg.ext1 = ext1;
+    cbArg.ext2 = ext2;
+    return m_cbNotifier.notifyCallback(cbArg);
+}
+
+/*===========================================================================
+ * FUNCTION   : processAEInfo
+ *
+ * DESCRIPTION: process AE updates
+ *
+ * PARAMETERS :
+ *   @ae_params: current AE parameters
+ *
+ * RETURN     : None
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processAEInfo(cam_3a_params_t &ae_params)
+{
+    mParameters.updateAEInfo(ae_params);
+    if (mParameters.isInstantAECEnabled()) {
+        // Reset Instant AEC info only if instant aec enabled.
+        bool bResetInstantAec = false;
+        if (ae_params.settled) {
+            // If AEC settled, reset instant AEC
+            bResetInstantAec = true;
+        } else if ((mParameters.isInstantCaptureEnabled()) &&
+                (mInstantAecFrameCount >= mParameters.getAecFrameBoundValue())) {
+            // if AEC not settled, and instant capture enabled,
+            // reset instant AEC only when frame count is
+            // more or equal to AEC frame bound value.
+            bResetInstantAec = true;
+        } else if ((mParameters.isInstantAECEnabled()) &&
+                (mInstantAecFrameCount >= mParameters.getAecSkipDisplayFrameBound())) {
+            // if AEC not settled, and only instant AEC enabled,
+            // reset instant AEC only when frame count is
+            // more or equal to AEC skip display frame bound value.
+            bResetInstantAec = true;
+        }
+
+        if (bResetInstantAec) {
+            LOGD("setting instant AEC to false");
+            mParameters.setInstantAEC(false, true);
+            mInstantAecFrameCount = 0;
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : processFocusPositionInfo
+ *
+ * DESCRIPTION: process AF updates
+ *
+ * PARAMETERS :
+ *   @cur_pos_info: current lens position
+ *
+ * RETURN     : None
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processFocusPositionInfo(cam_focus_pos_info_t &cur_pos_info)
+{
+    mParameters.updateCurrentFocusPosition(cur_pos_info);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : processAutoFocusEvent
+ *
+ * DESCRIPTION: process auto focus event
+ *
+ * PARAMETERS :
+ *   @focus_data: struct containing auto focus result info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processAutoFocusEvent(cam_auto_focus_data_t &focus_data)
+{
+    int32_t ret = NO_ERROR;
+    LOGH("E");
+
+    if (getRelatedCamSyncInfo()->mode == CAM_MODE_SECONDARY) {
+        // Ignore focus updates
+        LOGH("X Secondary Camera, no need to process!! ");
+        return ret;
+    }
+    cam_focus_mode_type focusMode = mParameters.getFocusMode();
+    LOGH("[AF_DBG]  focusMode=%d, focusState=%d",
+             focusMode, focus_data.focus_state);
+
+    switch (focusMode) {
+    case CAM_FOCUS_MODE_AUTO:
+    case CAM_FOCUS_MODE_MACRO:
+        // ignore AF event if AF was already cancelled meanwhile
+        if (!mActiveAF) {
+            break;
+        }
+        // If the HAL focus mode is different from AF INFINITY focus mode, send event to app
+        if ((focus_data.focus_mode == CAM_FOCUS_MODE_INFINITY) &&
+                (focus_data.focus_state == CAM_AF_STATE_INACTIVE)) {
+            ret = sendEvtNotify(CAMERA_MSG_FOCUS, true, 0);
+            mActiveAF = false; // reset the mActiveAF in this special case
+            break;
+        }
+
+        //while transitioning from CAF->Auto/Macro, we might receive CAF related
+        //events (PASSIVE_*) due to timing. Ignore such events if any.
+        if ((focus_data.focus_state == CAM_AF_STATE_PASSIVE_SCAN) ||
+                (focus_data.focus_state == CAM_AF_STATE_PASSIVE_FOCUSED) ||
+                (focus_data.focus_state == CAM_AF_STATE_PASSIVE_UNFOCUSED)) {
+            break;
+        }
+
+        //This is just an intermediate update to HAL indicating focus is in progress. No need
+        //to send this event to app. Same applies to INACTIVE state as well.
+        if ((focus_data.focus_state == CAM_AF_STATE_ACTIVE_SCAN) ||
+                (focus_data.focus_state == CAM_AF_STATE_INACTIVE)) {
+            break;
+        }
+        // update focus distance
+        mParameters.updateFocusDistances(&focus_data.focus_dist);
+
+        //flush any old snapshot frames in ZSL Q which are not focused.
+        if (mParameters.isZSLMode() && focus_data.flush_info.needFlush ) {
+            QCameraPicChannel *pZSLChannel =
+                    (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_ZSL];
+            if (NULL != pZSLChannel) {
+                //flush the zsl-buffer
+                uint32_t flush_frame_idx = focus_data.flush_info.focused_frame_idx;
+                LOGD("flush the zsl-buffer before frame = %u.", flush_frame_idx);
+                pZSLChannel->flushSuperbuffer(flush_frame_idx);
+            }
+        }
+
+        //send event to app finally
+        LOGI("Send AF DOne event to app");
+        ret = sendEvtNotify(CAMERA_MSG_FOCUS,
+                            (focus_data.focus_state == CAM_AF_STATE_FOCUSED_LOCKED), 0);
+        break;
+    case CAM_FOCUS_MODE_CONTINOUS_VIDEO:
+    case CAM_FOCUS_MODE_CONTINOUS_PICTURE:
+
+        // If the HAL focus mode is different from AF INFINITY focus mode, send event to app
+        if ((focus_data.focus_mode == CAM_FOCUS_MODE_INFINITY) &&
+                (focus_data.focus_state == CAM_AF_STATE_INACTIVE)) {
+            ret = sendEvtNotify(CAMERA_MSG_FOCUS, false, 0);
+            mActiveAF = false; // reset the mActiveAF in this special case
+            break;
+        }
+
+        //If AutoFocus() is triggered while in CAF mode, ignore all CAF events (PASSIVE_*) and
+        //process/wait for only ACTIVE_* events.
+        if (((focus_data.focus_state == CAM_AF_STATE_PASSIVE_FOCUSED) ||
+                (focus_data.focus_state == CAM_AF_STATE_PASSIVE_UNFOCUSED) ||
+                (focus_data.focus_state == CAM_AF_STATE_PASSIVE_SCAN)) && mActiveAF) {
+            break;
+        }
+
+        //These are the AF states for which we need to send notification to app in CAF mode.
+        //This includes both regular CAF (PASSIVE) events as well as ACTIVE events ( in case
+        //AF is triggered while in CAF mode)
+        if ((focus_data.focus_state == CAM_AF_STATE_PASSIVE_FOCUSED) ||
+                (focus_data.focus_state == CAM_AF_STATE_PASSIVE_UNFOCUSED) ||
+                (focus_data.focus_state == CAM_AF_STATE_FOCUSED_LOCKED) ||
+                (focus_data.focus_state == CAM_AF_STATE_NOT_FOCUSED_LOCKED)) {
+
+            // update focus distance
+            mParameters.updateFocusDistances(&focus_data.focus_dist);
+
+            if (mParameters.isZSLMode() && focus_data.flush_info.needFlush ) {
+                QCameraPicChannel *pZSLChannel =
+                        (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_ZSL];
+                if (NULL != pZSLChannel) {
+                    //flush the zsl-buffer
+                    uint32_t flush_frame_idx = focus_data.flush_info.focused_frame_idx;
+                    LOGD("flush the zsl-buffer before frame = %u.", flush_frame_idx);
+                    pZSLChannel->flushSuperbuffer(flush_frame_idx);
+                }
+            }
+
+            if (mActiveAF) {
+                LOGI("Send AF Done event to app");
+            }
+            ret = sendEvtNotify(CAMERA_MSG_FOCUS,
+                    ((focus_data.focus_state == CAM_AF_STATE_PASSIVE_FOCUSED) ||
+                    (focus_data.focus_state == CAM_AF_STATE_FOCUSED_LOCKED)), 0);
+        }
+        ret = sendEvtNotify(CAMERA_MSG_FOCUS_MOVE,
+                (focus_data.focus_state == CAM_AF_STATE_PASSIVE_SCAN), 0);
+        break;
+    case CAM_FOCUS_MODE_INFINITY:
+    case CAM_FOCUS_MODE_FIXED:
+    case CAM_FOCUS_MODE_EDOF:
+    default:
+        LOGH("no ops for autofocus event in focusmode %d", focusMode);
+        break;
+    }
+
+    //Reset mActiveAF once we receive focus done event
+    if ((focus_data.focus_state == CAM_AF_STATE_FOCUSED_LOCKED) ||
+            (focus_data.focus_state == CAM_AF_STATE_NOT_FOCUSED_LOCKED)) {
+        mActiveAF = false;
+    }
+
+    LOGH("X");
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : processZoomEvent
+ *
+ * DESCRIPTION: process zoom event
+ *
+ * PARAMETERS :
+ *   @crop_info : crop info as a result of zoom operation
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processZoomEvent(cam_crop_data_t &crop_info)
+{
+    int32_t ret = NO_ERROR;
+
+    for (int i = 0; i < QCAMERA_CH_TYPE_MAX; i++) {
+        if (m_channels[i] != NULL) {
+            ret = m_channels[i]->processZoomDone(mPreviewWindow, crop_info);
+        }
+    }
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : processZSLCaptureDone
+ *
+ * DESCRIPTION: process ZSL capture done events
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processZSLCaptureDone()
+{
+    int rc = NO_ERROR;
+
+    if (++mInputCount >= mParameters.getBurstCountForAdvancedCapture()) {
+        rc = unconfigureAdvancedCapture();
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : processRetroAECUnlock
+ *
+ * DESCRIPTION: process retro burst AEC unlock events
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processRetroAECUnlock()
+{
+    int rc = NO_ERROR;
+
+    LOGH("LED assisted AF Release AEC Lock");
+    rc = mParameters.setAecLock("false");
+    if (NO_ERROR != rc) {
+        LOGE("Error setting AEC lock");
+        return rc;
+    }
+
+    rc = mParameters.commitParameters();
+    if (NO_ERROR != rc) {
+        LOGE("Error during camera parameter commit");
+    } else {
+        m_bLedAfAecLock = FALSE;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : processHDRData
+ *
+ * DESCRIPTION: process HDR scene events
+ *
+ * PARAMETERS :
+ *   @hdr_scene : HDR scene event data
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processHDRData(
+        __unused cam_asd_hdr_scene_data_t hdr_scene)
+{
+    int rc = NO_ERROR;
+
+#ifndef VANILLA_HAL
+    if (hdr_scene.is_hdr_scene &&
+      (hdr_scene.hdr_confidence > HDR_CONFIDENCE_THRESHOLD) &&
+      mParameters.isAutoHDREnabled()) {
+        m_HDRSceneEnabled = true;
+    } else {
+        m_HDRSceneEnabled = false;
+    }
+    mParameters.setHDRSceneEnable(m_HDRSceneEnabled);
+
+    if ( msgTypeEnabled(CAMERA_MSG_META_DATA) ) {
+
+        size_t data_len = sizeof(int);
+        size_t buffer_len = 1 *sizeof(int)       //meta type
+                          + 1 *sizeof(int)       //data len
+                          + 1 *sizeof(int);      //data
+        camera_memory_t *hdrBuffer = mGetMemory(-1,
+                                                 buffer_len,
+                                                 1,
+                                                 mCallbackCookie);
+        if ( NULL == hdrBuffer ) {
+            LOGE("Not enough memory for auto HDR data");
+            return NO_MEMORY;
+        }
+
+        int *pHDRData = (int *)hdrBuffer->data;
+        if (pHDRData == NULL) {
+            LOGE("memory data ptr is NULL");
+            return UNKNOWN_ERROR;
+        }
+
+        pHDRData[0] = CAMERA_META_DATA_HDR;
+        pHDRData[1] = (int)data_len;
+        pHDRData[2] = m_HDRSceneEnabled;
+
+        qcamera_callback_argm_t cbArg;
+        memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+        cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+        cbArg.msg_type = CAMERA_MSG_META_DATA;
+        cbArg.data = hdrBuffer;
+        cbArg.user_data = hdrBuffer;
+        cbArg.cookie = this;
+        cbArg.release_cb = releaseCameraMemory;
+        rc = m_cbNotifier.notifyCallback(cbArg);
+        if (rc != NO_ERROR) {
+            LOGE("fail sending auto HDR notification");
+            hdrBuffer->release(hdrBuffer);
+        }
+    }
+
+    LOGH("hdr_scene_data: processHDRData: %d %f",
+          hdr_scene.is_hdr_scene,
+          hdr_scene.hdr_confidence);
+
+#endif
+  return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : transAwbMetaToParams
+ *
+ * DESCRIPTION: translate awb params from metadata callback to QCameraParametersIntf
+ *
+ * PARAMETERS :
+ *   @awb_params : awb params from metadata callback
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::transAwbMetaToParams(cam_awb_params_t &awb_params)
+{
+    mParameters.updateAWBParams(awb_params);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : processPrepSnapshotDone
+ *
+ * DESCRIPTION: process prep snapshot done event
+ *
+ * PARAMETERS :
+ *   @prep_snapshot_state  : state of prepare snapshot done. In other words,
+ *                           i.e. whether need future frames for capture.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processPrepSnapshotDoneEvent(
+                        cam_prep_snapshot_state_t prep_snapshot_state)
+{
+    int32_t ret = NO_ERROR;
+    LOGI("[KPI Perf]: Received PREPARE SANSPHOT Done event state = %d",
+            prep_snapshot_state);
+    if (m_channels[QCAMERA_CH_TYPE_ZSL] &&
+        prep_snapshot_state == NEED_FUTURE_FRAME) {
+        LOGH("already handled in mm-camera-intf, no ops here");
+        if (isRetroPicture()) {
+            mParameters.setAecLock("true");
+            mParameters.commitParameters();
+            m_bLedAfAecLock = TRUE;
+        }
+    }
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : processASDUpdate
+ *
+ * DESCRIPTION: process ASD update event
+ *
+ * PARAMETERS :
+ *   @scene: selected scene mode
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processASDUpdate(
+        __unused cam_asd_decision_t asd_decision)
+{
+    if ( msgTypeEnabled(CAMERA_MSG_META_DATA) ) {
+        size_t data_len = sizeof(cam_auto_scene_t);
+        size_t buffer_len = 1 *sizeof(int)       //meta type
+                + 1 *sizeof(int)       //data len
+                + data_len;            //data
+        camera_memory_t *asdBuffer = mGetMemory(-1,
+                buffer_len, 1, mCallbackCookie);
+        if ( NULL == asdBuffer ) {
+            LOGE("Not enough memory for histogram data");
+            return NO_MEMORY;
+        }
+
+        int *pASDData = (int *)asdBuffer->data;
+        if (pASDData == NULL) {
+            LOGE("memory data ptr is NULL");
+            return UNKNOWN_ERROR;
+        }
+
+#ifndef VANILLA_HAL
+        pASDData[0] = CAMERA_META_DATA_ASD;
+        pASDData[1] = (int)data_len;
+        pASDData[2] = asd_decision.detected_scene;
+
+        qcamera_callback_argm_t cbArg;
+        memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+        cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+        cbArg.msg_type = CAMERA_MSG_META_DATA;
+        cbArg.data = asdBuffer;
+        cbArg.user_data = asdBuffer;
+        cbArg.cookie = this;
+        cbArg.release_cb = releaseCameraMemory;
+        int32_t rc = m_cbNotifier.notifyCallback(cbArg);
+        if (rc != NO_ERROR) {
+            LOGE("fail sending notification");
+            asdBuffer->release(asdBuffer);
+        }
+#endif
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : processJpegNotify
+ *
+ * DESCRIPTION: process jpeg event
+ *
+ * PARAMETERS :
+ *   @jpeg_evt: ptr to jpeg event payload
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processJpegNotify(qcamera_jpeg_evt_payload_t *jpeg_evt)
+{
+    return m_postprocessor.processJpegEvt(jpeg_evt);
+}
+
+/*===========================================================================
+ * FUNCTION   : lockAPI
+ *
+ * DESCRIPTION: lock to process API
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::lockAPI()
+{
+    pthread_mutex_lock(&m_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : waitAPIResult
+ *
+ * DESCRIPTION: wait for API result coming back. This is a blocking call, it will
+ *              return only cerntain API event type arrives
+ *
+ * PARAMETERS :
+ *   @api_evt : API event type
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::waitAPIResult(qcamera_sm_evt_enum_t api_evt,
+        qcamera_api_result_t *apiResult)
+{
+    LOGD("wait for API result of evt (%d)", api_evt);
+    int resultReceived = 0;
+    while  (!resultReceived) {
+        pthread_cond_wait(&m_cond, &m_lock);
+        if (m_apiResultList != NULL) {
+            api_result_list *apiResultList = m_apiResultList;
+            api_result_list *apiResultListPrevious = m_apiResultList;
+            while (apiResultList != NULL) {
+                if (apiResultList->result.request_api == api_evt) {
+                    resultReceived = 1;
+                    *apiResult = apiResultList->result;
+                    apiResultListPrevious->next = apiResultList->next;
+                    if (apiResultList == m_apiResultList) {
+                        m_apiResultList = apiResultList->next;
+                    }
+                    free(apiResultList);
+                    break;
+                }
+                else {
+                    apiResultListPrevious = apiResultList;
+                    apiResultList = apiResultList->next;
+                }
+            }
+        }
+    }
+    LOGD("return (%d) from API result wait for evt (%d)",
+           apiResult->status, api_evt);
+}
+
+
+/*===========================================================================
+ * FUNCTION   : unlockAPI
+ *
+ * DESCRIPTION: API processing is done, unlock
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::unlockAPI()
+{
+    pthread_mutex_unlock(&m_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : signalAPIResult
+ *
+ * DESCRIPTION: signal condition viarable that cerntain API event type arrives
+ *
+ * PARAMETERS :
+ *   @result  : API result
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::signalAPIResult(qcamera_api_result_t *result)
+{
+
+    pthread_mutex_lock(&m_lock);
+    api_result_list *apiResult = (api_result_list *)malloc(sizeof(api_result_list));
+    if (apiResult == NULL) {
+        LOGE("ERROR: malloc for api result failed, Result will not be sent");
+        goto malloc_failed;
+    }
+    apiResult->result = *result;
+    apiResult->next = NULL;
+    if (m_apiResultList == NULL) m_apiResultList = apiResult;
+    else {
+        api_result_list *apiResultList = m_apiResultList;
+        while(apiResultList->next != NULL) apiResultList = apiResultList->next;
+        apiResultList->next = apiResult;
+    }
+malloc_failed:
+    pthread_cond_broadcast(&m_cond);
+    pthread_mutex_unlock(&m_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : signalEvtResult
+ *
+ * DESCRIPTION: signal condition variable that certain event was processed
+ *
+ * PARAMETERS :
+ *   @result  : Event result
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::signalEvtResult(qcamera_api_result_t *result)
+{
+    pthread_mutex_lock(&m_evtLock);
+    m_evtResult = *result;
+    pthread_cond_signal(&m_evtCond);
+    pthread_mutex_unlock(&m_evtLock);
+}
+
+int32_t QCamera2HardwareInterface::prepareRawStream(QCameraChannel *curChannel)
+{
+    int32_t rc = NO_ERROR;
+    cam_dimension_t str_dim,max_dim;
+    QCameraChannel *pChannel;
+
+    max_dim.width = 0;
+    max_dim.height = 0;
+
+    for (int j = 0; j < QCAMERA_CH_TYPE_MAX; j++) {
+        if (m_channels[j] != NULL) {
+            pChannel = m_channels[j];
+            for (uint8_t i = 0; i < pChannel->getNumOfStreams(); i++) {
+                QCameraStream *pStream = pChannel->getStreamByIndex(i);
+                if (pStream != NULL) {
+                    if ((pStream->isTypeOf(CAM_STREAM_TYPE_METADATA))
+                            || (pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW))) {
+                        continue;
+                    }
+                    pStream->getFrameDimension(str_dim);
+                    if (str_dim.width > max_dim.width) {
+                        max_dim.width = str_dim.width;
+                    }
+                    if (str_dim.height > max_dim.height) {
+                        max_dim.height = str_dim.height;
+                    }
+                }
+            }
+        }
+    }
+
+    for (uint8_t i = 0; i < curChannel->getNumOfStreams(); i++) {
+        QCameraStream *pStream = curChannel->getStreamByIndex(i);
+        if (pStream != NULL) {
+            if ((pStream->isTypeOf(CAM_STREAM_TYPE_METADATA))
+                    || (pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW))) {
+                continue;
+            }
+            pStream->getFrameDimension(str_dim);
+            if (str_dim.width > max_dim.width) {
+                max_dim.width = str_dim.width;
+            }
+            if (str_dim.height > max_dim.height) {
+                max_dim.height = str_dim.height;
+            }
+        }
+    }
+    rc = mParameters.updateRAW(max_dim);
+    return rc;
+}
+/*===========================================================================
+ * FUNCTION   : addStreamToChannel
+ *
+ * DESCRIPTION: add a stream into a channel
+ *
+ * PARAMETERS :
+ *   @pChannel   : ptr to channel obj
+ *   @streamType : type of stream to be added
+ *   @streamCB   : callback of stream
+ *   @userData   : user data ptr to callback
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addStreamToChannel(QCameraChannel *pChannel,
+                                                      cam_stream_type_t streamType,
+                                                      stream_cb_routine streamCB,
+                                                      void *userData)
+{
+    int32_t rc = NO_ERROR;
+
+    if (streamType == CAM_STREAM_TYPE_RAW) {
+        prepareRawStream(pChannel);
+    }
+    QCameraHeapMemory *pStreamInfo = allocateStreamInfoBuf(streamType);
+    if (pStreamInfo == NULL) {
+        LOGE("no mem for stream info buf");
+        return NO_MEMORY;
+    }
+    uint8_t minStreamBufNum = getBufNumRequired(streamType);
+    bool bDynAllocBuf = false;
+    if (isZSLMode() && streamType == CAM_STREAM_TYPE_SNAPSHOT) {
+        bDynAllocBuf = true;
+    }
+
+    cam_padding_info_t padding_info;
+
+    if (streamType == CAM_STREAM_TYPE_ANALYSIS) {
+        cam_analysis_info_t analysisInfo;
+        cam_feature_mask_t featureMask;
+
+        featureMask = 0;
+        mParameters.getStreamPpMask(CAM_STREAM_TYPE_ANALYSIS, featureMask);
+        rc = mParameters.getAnalysisInfo(
+                ((mParameters.getRecordingHintValue() == true) &&
+                 mParameters.fdModeInVideo()),
+                FALSE,
+                featureMask,
+                &analysisInfo);
+        if (rc != NO_ERROR) {
+            LOGE("getAnalysisInfo failed, ret = %d", rc);
+            return rc;
+        }
+
+        padding_info = analysisInfo.analysis_padding_info;
+    } else {
+        padding_info =
+                gCamCapability[mCameraId]->padding_info;
+        if (streamType == CAM_STREAM_TYPE_PREVIEW) {
+            padding_info.width_padding = mSurfaceStridePadding;
+            padding_info.height_padding = CAM_PAD_TO_2;
+        }
+        if((!needReprocess())
+                || (streamType != CAM_STREAM_TYPE_SNAPSHOT)
+                || (!mParameters.isLLNoiseEnabled())) {
+            padding_info.offset_info.offset_x = 0;
+            padding_info.offset_info.offset_y = 0;
+        }
+    }
+
+    bool deferAllocation = needDeferred(streamType);
+    LOGD("deferAllocation = %d bDynAllocBuf = %d, stream type = %d",
+            deferAllocation, bDynAllocBuf, streamType);
+    rc = pChannel->addStream(*this,
+            pStreamInfo,
+            NULL,
+            minStreamBufNum,
+            &padding_info,
+            streamCB, userData,
+            bDynAllocBuf,
+            deferAllocation);
+
+    if (rc != NO_ERROR) {
+        LOGE("add stream type (%d) failed, ret = %d",
+               streamType, rc);
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addPreviewChannel
+ *
+ * DESCRIPTION: add a preview channel that contains a preview stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addPreviewChannel()
+{
+    int32_t rc = NO_ERROR;
+    QCameraChannel *pChannel = NULL;
+    char value[PROPERTY_VALUE_MAX];
+    bool raw_yuv = false;
+
+
+    if (m_channels[QCAMERA_CH_TYPE_PREVIEW] != NULL) {
+        // if we had preview channel before, delete it first
+        delete m_channels[QCAMERA_CH_TYPE_PREVIEW];
+        m_channels[QCAMERA_CH_TYPE_PREVIEW] = NULL;
+    }
+
+    pChannel = new QCameraChannel(mCameraHandle->camera_handle,
+                                  mCameraHandle->ops);
+    if (NULL == pChannel) {
+        LOGE("no mem for preview channel");
+        return NO_MEMORY;
+    }
+
+    // preview only channel, don't need bundle attr and cb
+    rc = pChannel->init(NULL, NULL, NULL);
+    if (rc != NO_ERROR) {
+        LOGE("init preview channel failed, ret = %d", rc);
+        return rc;
+    }
+
+    // meta data stream always coexists with preview if applicable
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA,
+                            metadata_stream_cb_routine, this);
+    if (rc != NO_ERROR) {
+        LOGE("add metadata stream failed, ret = %d", rc);
+        return rc;
+    }
+
+    if (isRdiMode()) {
+        rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_RAW,
+                                rdi_mode_stream_cb_routine, this);
+    } else {
+        if (isNoDisplayMode()) {
+            rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
+                                    nodisplay_preview_stream_cb_routine, this);
+        } else {
+            rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
+                                    preview_stream_cb_routine, this);
+            pChannel->setStreamSyncCB(CAM_STREAM_TYPE_PREVIEW,
+                    synchronous_stream_cb_routine);
+        }
+    }
+
+    if (((mParameters.fdModeInVideo())
+            || (mParameters.getDcrf() == true)
+            || (mParameters.getRecordingHintValue() != true))
+            && (!mParameters.isSecureMode())) {
+        rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_ANALYSIS,
+                NULL, this);
+        if (rc != NO_ERROR) {
+            LOGE("add Analysis stream failed, ret = %d", rc);
+            return rc;
+        }
+    }
+
+    property_get("persist.camera.raw_yuv", value, "0");
+    raw_yuv = atoi(value) > 0 ? true : false;
+    if ( raw_yuv ) {
+        rc = addStreamToChannel(pChannel,CAM_STREAM_TYPE_RAW,
+                preview_raw_stream_cb_routine,this);
+        if ( rc != NO_ERROR ) {
+            LOGE("add raw stream failed, ret = %d", __FUNCTION__, rc);
+            delete pChannel;
+            return rc;
+        }
+    }
+
+    if (rc != NO_ERROR) {
+        LOGE("add preview stream failed, ret = %d", rc);
+        delete pChannel;
+        return rc;
+    }
+
+    m_channels[QCAMERA_CH_TYPE_PREVIEW] = pChannel;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addVideoChannel
+ *
+ * DESCRIPTION: add a video channel that contains a video stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addVideoChannel()
+{
+    int32_t rc = NO_ERROR;
+    QCameraVideoChannel *pChannel = NULL;
+
+    if (m_channels[QCAMERA_CH_TYPE_VIDEO] != NULL) {
+        // if we had video channel before, delete it first
+        delete m_channels[QCAMERA_CH_TYPE_VIDEO];
+        m_channels[QCAMERA_CH_TYPE_VIDEO] = NULL;
+    }
+
+    pChannel = new QCameraVideoChannel(mCameraHandle->camera_handle,
+                                       mCameraHandle->ops);
+    if (NULL == pChannel) {
+        LOGE("no mem for video channel");
+        return NO_MEMORY;
+    }
+
+    if (isLowPowerMode()) {
+        mm_camera_channel_attr_t attr;
+        memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+        attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
+        attr.look_back = 0; //wait for future frame for liveshot
+        attr.post_frame_skip = mParameters.getZSLBurstInterval();
+        attr.water_mark = 1; //hold min buffers possible in Q
+        attr.max_unmatched_frames = mParameters.getMaxUnmatchedFramesInQueue();
+        rc = pChannel->init(&attr, snapshot_channel_cb_routine, this);
+    } else {
+        // preview only channel, don't need bundle attr and cb
+        rc = pChannel->init(NULL, NULL, NULL);
+    }
+
+    if (rc != 0) {
+        LOGE("init video channel failed, ret = %d", rc);
+        delete pChannel;
+        return rc;
+    }
+
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_VIDEO,
+                            video_stream_cb_routine, this);
+    if (rc != NO_ERROR) {
+        LOGE("add video stream failed, ret = %d", rc);
+        delete pChannel;
+        return rc;
+    }
+
+    m_channels[QCAMERA_CH_TYPE_VIDEO] = pChannel;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addSnapshotChannel
+ *
+ * DESCRIPTION: add a snapshot channel that contains a snapshot stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ * NOTE       : Add this channel for live snapshot usecase. Regular capture will
+ *              use addCaptureChannel.
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addSnapshotChannel()
+{
+    int32_t rc = NO_ERROR;
+    QCameraChannel *pChannel = NULL;
+
+    if (m_channels[QCAMERA_CH_TYPE_SNAPSHOT] != NULL) {
+        // if we had ZSL channel before, delete it first
+        delete m_channels[QCAMERA_CH_TYPE_SNAPSHOT];
+        m_channels[QCAMERA_CH_TYPE_SNAPSHOT] = NULL;
+    }
+
+    pChannel = new QCameraChannel(mCameraHandle->camera_handle,
+                                  mCameraHandle->ops);
+    if (NULL == pChannel) {
+        LOGE("no mem for snapshot channel");
+        return NO_MEMORY;
+    }
+
+    mm_camera_channel_attr_t attr;
+    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
+    attr.look_back = 0; //wait for future frame for liveshot
+    attr.post_frame_skip = mParameters.getZSLBurstInterval();
+    attr.water_mark = 1; //hold min buffers possible in Q
+    attr.max_unmatched_frames = mParameters.getMaxUnmatchedFramesInQueue();
+    attr.priority = MM_CAMERA_SUPER_BUF_PRIORITY_LOW;
+    rc = pChannel->init(&attr, snapshot_channel_cb_routine, this);
+    if (rc != NO_ERROR) {
+        LOGE("init snapshot channel failed, ret = %d", rc);
+        delete pChannel;
+        return rc;
+    }
+
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_SNAPSHOT,
+            NULL, NULL);
+    if (rc != NO_ERROR) {
+        LOGE("add snapshot stream failed, ret = %d", rc);
+        delete pChannel;
+        return rc;
+    }
+
+    m_channels[QCAMERA_CH_TYPE_SNAPSHOT] = pChannel;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addRawChannel
+ *
+ * DESCRIPTION: add a raw channel that contains a raw image stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addRawChannel()
+{
+    int32_t rc = NO_ERROR;
+    QCameraChannel *pChannel = NULL;
+
+    if (m_channels[QCAMERA_CH_TYPE_RAW] != NULL) {
+        // if we had raw channel before, delete it first
+        delete m_channels[QCAMERA_CH_TYPE_RAW];
+        m_channels[QCAMERA_CH_TYPE_RAW] = NULL;
+    }
+
+    pChannel = new QCameraChannel(mCameraHandle->camera_handle,
+                                  mCameraHandle->ops);
+    if (NULL == pChannel) {
+        LOGE("no mem for raw channel");
+        return NO_MEMORY;
+    }
+
+    if (mParameters.getofflineRAW()) {
+        mm_camera_channel_attr_t attr;
+        memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+        attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
+        attr.look_back = mParameters.getZSLBackLookCount();
+        attr.post_frame_skip = mParameters.getZSLBurstInterval();
+        attr.water_mark = 1;
+        attr.max_unmatched_frames = mParameters.getMaxUnmatchedFramesInQueue();
+        rc = pChannel->init(&attr, raw_channel_cb_routine, this);
+        if (rc != NO_ERROR) {
+            LOGE("init RAW channel failed, ret = %d", rc);
+            delete pChannel;
+            return rc;
+        }
+    } else {
+        rc = pChannel->init(NULL, NULL, NULL);
+        if (rc != NO_ERROR) {
+            LOGE("init raw channel failed, ret = %d", rc);
+            delete pChannel;
+            return rc;
+        }
+    }
+
+    if (!mParameters.isZSLMode()) {
+        // meta data stream always coexists with snapshot in regular RAW capture case
+        rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA,
+                metadata_stream_cb_routine, this);
+        if (rc != NO_ERROR) {
+            LOGE("add metadata stream failed, ret = %d", rc);
+            delete pChannel;
+            return rc;
+        }
+    }
+
+    if (mParameters.getofflineRAW()) {
+        rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_RAW,
+                NULL, this);
+    } else {
+        rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_RAW,
+                raw_stream_cb_routine, this);
+    }
+    if (rc != NO_ERROR) {
+        LOGE("add snapshot stream failed, ret = %d", rc);
+        delete pChannel;
+        return rc;
+    }
+    m_channels[QCAMERA_CH_TYPE_RAW] = pChannel;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addZSLChannel
+ *
+ * DESCRIPTION: add a ZSL channel that contains a preview stream and
+ *              a snapshot stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addZSLChannel()
+{
+    int32_t rc = NO_ERROR;
+    QCameraPicChannel *pChannel = NULL;
+    char value[PROPERTY_VALUE_MAX];
+    bool raw_yuv = false;
+
+    if (m_channels[QCAMERA_CH_TYPE_ZSL] != NULL) {
+        // if we had ZSL channel before, delete it first
+        delete m_channels[QCAMERA_CH_TYPE_ZSL];
+        m_channels[QCAMERA_CH_TYPE_ZSL] = NULL;
+    }
+
+    pChannel = new QCameraPicChannel(mCameraHandle->camera_handle,
+                                     mCameraHandle->ops);
+    if (NULL == pChannel) {
+        LOGE("no mem for ZSL channel");
+        return NO_MEMORY;
+    }
+
+    // ZSL channel, init with bundle attr and cb
+    mm_camera_channel_attr_t attr;
+    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+    if (mParameters.isSceneSelectionEnabled()) {
+        attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
+    } else {
+        attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
+    }
+    attr.look_back = mParameters.getZSLBackLookCount();
+    attr.post_frame_skip = mParameters.getZSLBurstInterval();
+    if (mParameters.isOEMFeatEnabled()) {
+        attr.post_frame_skip++;
+    }
+    attr.water_mark = mParameters.getZSLQueueDepth();
+    attr.max_unmatched_frames = mParameters.getMaxUnmatchedFramesInQueue();
+    attr.user_expected_frame_id =
+        mParameters.isInstantCaptureEnabled() ? (uint8_t)mParameters.getAecFrameBoundValue() : 0;
+
+    //Enabled matched queue
+    if (isFrameSyncEnabled()) {
+        LOGH("Enabling frame sync for dual camera, camera Id: %d",
+                 mCameraId);
+        attr.enable_frame_sync = 1;
+    }
+    rc = pChannel->init(&attr,
+                        zsl_channel_cb,
+                        this);
+    if (rc != 0) {
+        LOGE("init ZSL channel failed, ret = %d", rc);
+        delete pChannel;
+        return rc;
+    }
+
+    // meta data stream always coexists with preview if applicable
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA,
+                            metadata_stream_cb_routine, this);
+    if (rc != NO_ERROR) {
+        LOGE("add metadata stream failed, ret = %d", rc);
+        delete pChannel;
+        return rc;
+    }
+
+    if (isNoDisplayMode()) {
+        rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
+                                nodisplay_preview_stream_cb_routine, this);
+    } else {
+        rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
+                                preview_stream_cb_routine, this);
+        pChannel->setStreamSyncCB(CAM_STREAM_TYPE_PREVIEW,
+                synchronous_stream_cb_routine);
+    }
+    if (rc != NO_ERROR) {
+        LOGE("add preview stream failed, ret = %d", rc);
+        delete pChannel;
+        return rc;
+    }
+
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_SNAPSHOT,
+                            NULL, this);
+    if (rc != NO_ERROR) {
+        LOGE("add snapshot stream failed, ret = %d", rc);
+        delete pChannel;
+        return rc;
+    }
+
+    if (!mParameters.isSecureMode()) {
+        rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_ANALYSIS,
+                NULL, this);
+        if (rc != NO_ERROR) {
+            LOGE("add Analysis stream failed, ret = %d", rc);
+            delete pChannel;
+            return rc;
+        }
+    }
+
+    property_get("persist.camera.raw_yuv", value, "0");
+    raw_yuv = atoi(value) > 0 ? true : false;
+    if (raw_yuv) {
+        rc = addStreamToChannel(pChannel,
+                                CAM_STREAM_TYPE_RAW,
+                                NULL,
+                                this);
+        if (rc != NO_ERROR) {
+            LOGE("add raw stream failed, ret = %d", rc);
+            delete pChannel;
+            return rc;
+        }
+    }
+
+    m_channels[QCAMERA_CH_TYPE_ZSL] = pChannel;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addCaptureChannel
+ *
+ * DESCRIPTION: add a capture channel that contains a snapshot stream
+ *              and a postview stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ * NOTE       : Add this channel for regular capture usecase.
+ *              For Live snapshot usecase, use addSnapshotChannel.
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addCaptureChannel()
+{
+    int32_t rc = NO_ERROR;
+    QCameraPicChannel *pChannel = NULL;
+    char value[PROPERTY_VALUE_MAX];
+    bool raw_yuv = false;
+
+    if (m_channels[QCAMERA_CH_TYPE_CAPTURE] != NULL) {
+        delete m_channels[QCAMERA_CH_TYPE_CAPTURE];
+        m_channels[QCAMERA_CH_TYPE_CAPTURE] = NULL;
+    }
+
+    pChannel = new QCameraPicChannel(mCameraHandle->camera_handle,
+                                  mCameraHandle->ops);
+    if (NULL == pChannel) {
+        LOGE("no mem for capture channel");
+        return NO_MEMORY;
+    }
+
+    // Capture channel, only need snapshot and postview streams start together
+    mm_camera_channel_attr_t attr;
+    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+    if ( mLongshotEnabled ) {
+        attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
+        attr.look_back = mParameters.getZSLBackLookCount();
+        attr.water_mark = mParameters.getZSLQueueDepth();
+    } else {
+        attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
+    }
+    attr.max_unmatched_frames = mParameters.getMaxUnmatchedFramesInQueue();
+
+    rc = pChannel->init(&attr,
+                        capture_channel_cb_routine,
+                        this);
+    if (rc != NO_ERROR) {
+        LOGE("init capture channel failed, ret = %d", rc);
+        return rc;
+    }
+
+    // meta data stream always coexists with snapshot in regular capture case
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA,
+                            metadata_stream_cb_routine, this);
+    if (rc != NO_ERROR) {
+        LOGE("add metadata stream failed, ret = %d", rc);
+        return rc;
+    }
+
+    if (!mLongshotEnabled) {
+        rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_POSTVIEW,
+                                NULL, this);
+
+        if (rc != NO_ERROR) {
+            LOGE("add postview stream failed, ret = %d", rc);
+            return rc;
+        }
+    } else {
+        rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
+                                preview_stream_cb_routine, this);
+
+        if (rc != NO_ERROR) {
+            LOGE("add preview stream failed, ret = %d", rc);
+            return rc;
+        }
+        pChannel->setStreamSyncCB(CAM_STREAM_TYPE_PREVIEW,
+                synchronous_stream_cb_routine);
+    }
+
+    if (!mParameters.getofflineRAW()) {
+        rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_SNAPSHOT,
+                NULL, this);
+        if (rc != NO_ERROR) {
+            LOGE("add snapshot stream failed, ret = %d", rc);
+            return rc;
+        }
+    }
+
+    stream_cb_routine stream_cb = NULL;
+    property_get("persist.camera.raw_yuv", value, "0");
+    raw_yuv = atoi(value) > 0 ? true : false;
+
+    if (raw_yuv) {
+        stream_cb = snapshot_raw_stream_cb_routine;
+    }
+
+    if ((raw_yuv) || (mParameters.getofflineRAW())) {
+        rc = addStreamToChannel(pChannel,
+                CAM_STREAM_TYPE_RAW, stream_cb, this);
+        if (rc != NO_ERROR) {
+            LOGE("add raw stream failed, ret = %d", rc);
+            return rc;
+        }
+    }
+
+    m_channels[QCAMERA_CH_TYPE_CAPTURE] = pChannel;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addMetaDataChannel
+ *
+ * DESCRIPTION: add a meta data channel that contains a metadata stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addMetaDataChannel()
+{
+    int32_t rc = NO_ERROR;
+    QCameraChannel *pChannel = NULL;
+
+    if (m_channels[QCAMERA_CH_TYPE_METADATA] != NULL) {
+        delete m_channels[QCAMERA_CH_TYPE_METADATA];
+        m_channels[QCAMERA_CH_TYPE_METADATA] = NULL;
+    }
+
+    pChannel = new QCameraChannel(mCameraHandle->camera_handle,
+                                  mCameraHandle->ops);
+    if (NULL == pChannel) {
+        LOGE("no mem for metadata channel");
+        return NO_MEMORY;
+    }
+
+    rc = pChannel->init(NULL,
+                        NULL,
+                        NULL);
+    if (rc != NO_ERROR) {
+        LOGE("init metadata channel failed, ret = %d", rc);
+        delete pChannel;
+        return rc;
+    }
+
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA,
+                            metadata_stream_cb_routine, this);
+    if (rc != NO_ERROR) {
+        LOGE("add metadata stream failed, ret = %d", rc);
+        delete pChannel;
+        return rc;
+    }
+
+    m_channels[QCAMERA_CH_TYPE_METADATA] = pChannel;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addCallbackChannel
+ *
+ * DESCRIPTION: add a callback channel that contains a callback stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addCallbackChannel()
+{
+    int32_t rc = NO_ERROR;
+    QCameraChannel *pChannel = NULL;
+
+    if (m_channels[QCAMERA_CH_TYPE_CALLBACK] != NULL) {
+        delete m_channels[QCAMERA_CH_TYPE_CALLBACK];
+        m_channels[QCAMERA_CH_TYPE_CALLBACK] = NULL;
+    }
+
+    pChannel = new QCameraChannel(mCameraHandle->camera_handle,
+            mCameraHandle->ops);
+    if (NULL == pChannel) {
+        LOGE("no mem for callback channel");
+        return NO_MEMORY;
+    }
+
+    rc = pChannel->init(NULL, NULL, this);
+    if (rc != NO_ERROR) {
+        LOGE("init callback channel failed, ret = %d",
+                 rc);
+        delete pChannel;
+        return rc;
+    }
+
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_CALLBACK,
+            callback_stream_cb_routine, this);
+    if (rc != NO_ERROR) {
+        LOGE("add callback stream failed, ret = %d", rc);
+        delete pChannel;
+        return rc;
+    }
+
+    m_channels[QCAMERA_CH_TYPE_CALLBACK] = pChannel;
+    return rc;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : addAnalysisChannel
+ *
+ * DESCRIPTION: add a analysis channel that contains a analysis stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addAnalysisChannel()
+{
+    int32_t rc = NO_ERROR;
+    QCameraChannel *pChannel = NULL;
+
+    if (m_channels[QCAMERA_CH_TYPE_ANALYSIS] != NULL) {
+        delete m_channels[QCAMERA_CH_TYPE_ANALYSIS];
+        m_channels[QCAMERA_CH_TYPE_ANALYSIS] = NULL;
+    }
+
+    pChannel = new QCameraChannel(mCameraHandle->camera_handle,
+                                  mCameraHandle->ops);
+    if (NULL == pChannel) {
+        LOGE("no mem for metadata channel");
+        return NO_MEMORY;
+    }
+
+    rc = pChannel->init(NULL, NULL, this);
+    if (rc != NO_ERROR) {
+        LOGE("init Analysis channel failed, ret = %d", rc);
+        delete pChannel;
+        return rc;
+    }
+
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_ANALYSIS,
+                            NULL, this);
+    if (rc != NO_ERROR) {
+        LOGE("add Analysis stream failed, ret = %d", rc);
+        delete pChannel;
+        return rc;
+    }
+
+    m_channels[QCAMERA_CH_TYPE_ANALYSIS] = pChannel;
+    return rc;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : getPPConfig
+ *
+ * DESCRIPTION: get Post processing configaration data
+ *
+ * PARAMETERS :
+ * @pp config:  pp config structure pointer,
+ * @curIndex:  current pp channel index
+ * @multipass: Flag if multipass prcessing enabled.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::getPPConfig(cam_pp_feature_config_t &pp_config,
+        int8_t curIndex, bool multipass)
+{
+    int32_t rc = NO_ERROR;
+
+    if (multipass) {
+        LOGW("Multi pass enabled. Total Pass = %d, cur index = %d",
+                mParameters.getReprocCount(), curIndex);
+    }
+
+    LOGH("Supported pproc feature mask = %llx",
+            gCamCapability[mCameraId]->qcom_supported_feature_mask);
+    cam_feature_mask_t feature_mask = gCamCapability[mCameraId]->qcom_supported_feature_mask;
+    int32_t zoomLevel = mParameters.getParmZoomLevel();
+    uint32_t rotation = mParameters.getJpegRotation();
+    int32_t effect = mParameters.getEffectValue();
+
+    pp_config.cur_reproc_count = curIndex + 1;
+    pp_config.total_reproc_count = mParameters.getReprocCount();
+
+    switch(curIndex) {
+        case 0:
+            //Configure feature mask for first pass of reprocessing
+            //check if any effects are enabled
+            if ((CAM_EFFECT_MODE_OFF != effect) &&
+                (feature_mask & CAM_QCOM_FEATURE_EFFECT)) {
+                pp_config.feature_mask |= CAM_QCOM_FEATURE_EFFECT;
+                pp_config.effect = effect;
+            }
+
+            //check for features that need to be enabled by default like sharpness
+            //(if supported by hw).
+            if ((feature_mask & CAM_QCOM_FEATURE_SHARPNESS) &&
+                !mParameters.isOptiZoomEnabled()) {
+                pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
+                pp_config.sharpness = mParameters.getSharpness();
+            }
+
+            //check if zoom is enabled
+            if ((zoomLevel > 0) && (feature_mask & CAM_QCOM_FEATURE_CROP)) {
+                pp_config.feature_mask |= CAM_QCOM_FEATURE_CROP;
+            }
+
+            if (mParameters.isWNREnabled() &&
+                (feature_mask & CAM_QCOM_FEATURE_DENOISE2D)) {
+                pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
+                pp_config.denoise2d.denoise_enable = 1;
+                pp_config.denoise2d.process_plates =
+                        mParameters.getDenoiseProcessPlate(CAM_INTF_PARM_WAVELET_DENOISE);
+            }
+
+            if (isCACEnabled()) {
+                pp_config.feature_mask |= CAM_QCOM_FEATURE_CAC;
+            }
+
+            //check if rotation is required
+            if ((feature_mask & CAM_QCOM_FEATURE_ROTATION) && (rotation > 0)) {
+                pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
+                if (rotation == 0) {
+                    pp_config.rotation = ROTATE_0;
+                } else if (rotation == 90) {
+                    pp_config.rotation = ROTATE_90;
+                } else if (rotation == 180) {
+                    pp_config.rotation = ROTATE_180;
+                } else if (rotation == 270) {
+                    pp_config.rotation = ROTATE_270;
+                }
+            }
+
+            if (mParameters.isHDREnabled()){
+                pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
+                pp_config.hdr_param.hdr_enable = 1;
+                pp_config.hdr_param.hdr_need_1x = mParameters.isHDR1xFrameEnabled();
+                pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
+            } else {
+                pp_config.feature_mask &= ~CAM_QCOM_FEATURE_HDR;
+                pp_config.hdr_param.hdr_enable = 0;
+            }
+
+            //check if scaling is enabled
+            if ((feature_mask & CAM_QCOM_FEATURE_SCALE) &&
+                mParameters.isReprocScaleEnabled() &&
+                mParameters.isUnderReprocScaling()){
+                pp_config.feature_mask |= CAM_QCOM_FEATURE_SCALE;
+                mParameters.getPicSizeFromAPK(
+                        pp_config.scale_param.output_width,
+                        pp_config.scale_param.output_height);
+            }
+
+            if(mParameters.isUbiFocusEnabled()) {
+                pp_config.feature_mask |= CAM_QCOM_FEATURE_UBIFOCUS;
+            } else {
+                pp_config.feature_mask &= ~CAM_QCOM_FEATURE_UBIFOCUS;
+            }
+
+            if(mParameters.isUbiRefocus()) {
+                pp_config.feature_mask |= CAM_QCOM_FEATURE_REFOCUS;
+                pp_config.misc_buf_param.misc_buffer_index = 0;
+            } else {
+                pp_config.feature_mask &= ~CAM_QCOM_FEATURE_REFOCUS;
+            }
+
+            if(mParameters.isChromaFlashEnabled()) {
+                pp_config.feature_mask |= CAM_QCOM_FEATURE_CHROMA_FLASH;
+                pp_config.flash_value = CAM_FLASH_ON;
+            } else {
+                pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CHROMA_FLASH;
+            }
+
+            if(mParameters.isOptiZoomEnabled() && (0 <= zoomLevel)) {
+                pp_config.feature_mask |= CAM_QCOM_FEATURE_OPTIZOOM;
+                pp_config.zoom_level = (uint8_t) zoomLevel;
+            } else {
+                pp_config.feature_mask &= ~CAM_QCOM_FEATURE_OPTIZOOM;
+            }
+
+            if (mParameters.getofflineRAW()) {
+                pp_config.feature_mask |= CAM_QCOM_FEATURE_RAW_PROCESSING;
+            }
+
+            if (mParameters.isTruePortraitEnabled()) {
+                pp_config.feature_mask |= CAM_QCOM_FEATURE_TRUEPORTRAIT;
+                pp_config.misc_buf_param.misc_buffer_index = 0;
+            } else {
+                pp_config.feature_mask &= ~CAM_QCOM_FEATURE_TRUEPORTRAIT;
+            }
+
+            if(mParameters.isStillMoreEnabled()) {
+                pp_config.feature_mask |= CAM_QCOM_FEATURE_STILLMORE;
+            } else {
+                pp_config.feature_mask &= ~CAM_QCOM_FEATURE_STILLMORE;
+            }
+
+            if (mParameters.isOEMFeatEnabled()) {
+                pp_config.feature_mask |= CAM_OEM_FEATURE_1;
+            }
+
+            if (mParameters.getCDSMode() != CAM_CDS_MODE_OFF) {
+                if (feature_mask & CAM_QCOM_FEATURE_DSDN) {
+                    pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
+                } else {
+                    pp_config.feature_mask |= CAM_QCOM_FEATURE_CDS;
+                }
+            }
+
+            if ((multipass) &&
+                    (m_postprocessor.getPPChannelCount() > 1)) {
+                pp_config.feature_mask &= ~CAM_QCOM_FEATURE_PP_PASS_2;
+                pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
+                pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
+                pp_config.feature_mask &= ~CAM_QCOM_FEATURE_DSDN;
+                pp_config.feature_mask |= CAM_QCOM_FEATURE_CROP;
+            } else {
+                pp_config.feature_mask |= CAM_QCOM_FEATURE_SCALE;
+            }
+
+            cam_dimension_t thumb_src_dim;
+            cam_dimension_t thumb_dst_dim;
+            mParameters.getThumbnailSize(&(thumb_dst_dim.width), &(thumb_dst_dim.height));
+            mParameters.getStreamDimension(CAM_STREAM_TYPE_POSTVIEW,thumb_src_dim);
+            if ((thumb_dst_dim.width != thumb_src_dim.width) ||
+                    (thumb_dst_dim.height != thumb_src_dim.height)) {
+                if (thumb_dst_dim.width != 0 && thumb_dst_dim.height != 0) {
+                    pp_config.feature_mask |= CAM_QCOM_FEATURE_CROP;
+                }
+            }
+
+            break;
+
+        case 1:
+            //Configure feature mask for second pass of reprocessing
+            pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_PASS_2;
+            if ((feature_mask & CAM_QCOM_FEATURE_ROTATION) && (rotation > 0)) {
+                pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
+                if (rotation == 0) {
+                    pp_config.rotation = ROTATE_0;
+                } else if (rotation == 90) {
+                    pp_config.rotation = ROTATE_90;
+                } else if (rotation == 180) {
+                    pp_config.rotation = ROTATE_180;
+                } else if (rotation == 270) {
+                    pp_config.rotation = ROTATE_270;
+                }
+            }
+            if (mParameters.getCDSMode() != CAM_CDS_MODE_OFF) {
+                if (feature_mask & CAM_QCOM_FEATURE_DSDN) {
+                    pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
+                } else {
+                    pp_config.feature_mask |= CAM_QCOM_FEATURE_CDS;
+                }
+            }
+            pp_config.feature_mask &= ~CAM_QCOM_FEATURE_RAW_PROCESSING;
+            pp_config.feature_mask &= ~CAM_QCOM_FEATURE_METADATA_PROCESSING;
+            break;
+
+    }
+    LOGH("pproc feature mask set = %llx pass count = %d",
+             pp_config.feature_mask, curIndex);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addReprocChannel
+ *
+ * DESCRIPTION: add a reprocess channel that will do reprocess on frames
+ *              coming from input channel
+ *
+ * PARAMETERS :
+ *   @pInputChannel : ptr to input channel whose frames will be post-processed
+ *   @cur_channel_index : Current channel index in multipass
+ *
+ * RETURN     : Ptr to the newly created channel obj. NULL if failed.
+ *==========================================================================*/
+QCameraReprocessChannel *QCamera2HardwareInterface::addReprocChannel(
+        QCameraChannel *pInputChannel, int8_t cur_channel_index)
+{
+    int32_t rc = NO_ERROR;
+    QCameraReprocessChannel *pChannel = NULL;
+    uint32_t burst_cnt = mParameters.getNumOfSnapshots();
+
+    if (pInputChannel == NULL) {
+        LOGE("input channel obj is NULL");
+        return NULL;
+    }
+
+    pChannel = new QCameraReprocessChannel(mCameraHandle->camera_handle,
+                                           mCameraHandle->ops);
+    if (NULL == pChannel) {
+        LOGE("no mem for reprocess channel");
+        return NULL;
+    }
+
+    // Capture channel, only need snapshot and postview streams start together
+    mm_camera_channel_attr_t attr;
+    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
+    attr.max_unmatched_frames = mParameters.getMaxUnmatchedFramesInQueue();
+    rc = pChannel->init(&attr,
+                        postproc_channel_cb_routine,
+                        this);
+    if (rc != NO_ERROR) {
+        LOGE("init reprocess channel failed, ret = %d", rc);
+        delete pChannel;
+        return NULL;
+    }
+
+    // pp feature config
+    cam_pp_feature_config_t pp_config;
+    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
+
+    rc = getPPConfig(pp_config, cur_channel_index,
+            ((mParameters.getReprocCount() > 1) ? TRUE : FALSE));
+    if (rc != NO_ERROR){
+        LOGE("Error while creating PP config");
+        delete pChannel;
+        return NULL;
+    }
+
+    uint8_t minStreamBufNum = getBufNumRequired(CAM_STREAM_TYPE_OFFLINE_PROC);
+
+    //WNR and HDR happen inline. No extra buffers needed.
+    cam_feature_mask_t temp_feature_mask = pp_config.feature_mask;
+    temp_feature_mask &= ~CAM_QCOM_FEATURE_HDR;
+    if (temp_feature_mask && mParameters.isHDREnabled()) {
+        minStreamBufNum = (uint8_t)(1 + mParameters.getNumOfExtraHDRInBufsIfNeeded());
+    }
+
+    if (mParameters.isStillMoreEnabled()) {
+        cam_still_more_t stillmore_config = mParameters.getStillMoreSettings();
+        pp_config.burst_cnt = stillmore_config.burst_count;
+        LOGH("Stillmore burst %d", pp_config.burst_cnt);
+
+        // getNumOfExtraBuffersForImageProc returns 1 less buffer assuming
+        // number of capture is already added. In the case of liveshot,
+        // stillmore burst is 1. This is to account for the premature decrement
+        if (mParameters.getNumOfExtraBuffersForImageProc() == 0) {
+            minStreamBufNum += 1;
+        }
+    }
+
+    if (mParameters.getManualCaptureMode() >= CAM_MANUAL_CAPTURE_TYPE_3) {
+        minStreamBufNum += mParameters.getReprocCount() - 1;
+        burst_cnt = mParameters.getReprocCount();
+        if (cur_channel_index == 0) {
+            pChannel->setReprocCount(2);
+        } else {
+            pChannel->setReprocCount(1);
+        }
+    } else {
+        pChannel->setReprocCount(1);
+    }
+
+    // Add non inplace image lib buffers only when ppproc is present,
+    // becuase pproc is non inplace and input buffers for img lib
+    // are output for pproc and this number of extra buffers is required
+    // If pproc is not there, input buffers for imglib are from snapshot stream
+    uint8_t imglib_extra_bufs = mParameters.getNumOfExtraBuffersForImageProc();
+    if (temp_feature_mask && imglib_extra_bufs) {
+        // 1 is added because getNumOfExtraBuffersForImageProc returns extra
+        // buffers assuming number of capture is already added
+        minStreamBufNum = (uint8_t)(minStreamBufNum + imglib_extra_bufs + 1);
+    }
+
+    //Mask out features that are already processed in snapshot stream.
+    cam_feature_mask_t snapshot_feature_mask = 0;
+    mParameters.getStreamPpMask(CAM_STREAM_TYPE_SNAPSHOT, snapshot_feature_mask);
+
+    pp_config.feature_mask &= ~snapshot_feature_mask;
+    LOGH("Snapshot feature mask: 0x%llx, reproc feature mask: 0x%llx",
+            snapshot_feature_mask, pp_config.feature_mask);
+
+    bool offlineReproc = isRegularCapture();
+    if (m_postprocessor.mOfflineDataBufs != NULL) {
+        offlineReproc = TRUE;
+    }
+
+    cam_padding_info_t paddingInfo = gCamCapability[mCameraId]->padding_info;
+    paddingInfo.offset_info.offset_x = 0;
+    paddingInfo.offset_info.offset_y = 0;
+    rc = pChannel->addReprocStreamsFromSource(*this,
+                                              pp_config,
+                                              pInputChannel,
+                                              minStreamBufNum,
+                                              burst_cnt,
+                                              &paddingInfo,
+                                              mParameters,
+                                              mLongshotEnabled,
+                                              offlineReproc);
+    if (rc != NO_ERROR) {
+        delete pChannel;
+        return NULL;
+    }
+
+    return pChannel;
+}
+
+/*===========================================================================
+ * FUNCTION   : addOfflineReprocChannel
+ *
+ * DESCRIPTION: add a offline reprocess channel contains one reproc stream,
+ *              that will do reprocess on frames coming from external images
+ *
+ * PARAMETERS :
+ *   @img_config  : offline reporcess image info
+ *   @pp_feature  : pp feature config
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+QCameraReprocessChannel *QCamera2HardwareInterface::addOfflineReprocChannel(
+                                            cam_pp_offline_src_config_t &img_config,
+                                            cam_pp_feature_config_t &pp_feature,
+                                            stream_cb_routine stream_cb,
+                                            void *userdata)
+{
+    int32_t rc = NO_ERROR;
+    QCameraReprocessChannel *pChannel = NULL;
+
+    pChannel = new QCameraReprocessChannel(mCameraHandle->camera_handle,
+                                           mCameraHandle->ops);
+    if (NULL == pChannel) {
+        LOGE("no mem for reprocess channel");
+        return NULL;
+    }
+
+    rc = pChannel->init(NULL, NULL, NULL);
+    if (rc != NO_ERROR) {
+        LOGE("init reprocess channel failed, ret = %d", rc);
+        delete pChannel;
+        return NULL;
+    }
+
+    QCameraHeapMemory *pStreamInfo = allocateStreamInfoBuf(CAM_STREAM_TYPE_OFFLINE_PROC);
+    if (pStreamInfo == NULL) {
+        LOGE("no mem for stream info buf");
+        delete pChannel;
+        return NULL;
+    }
+
+    cam_stream_info_t *streamInfoBuf = (cam_stream_info_t *)pStreamInfo->getPtr(0);
+    memset(streamInfoBuf, 0, sizeof(cam_stream_info_t));
+    streamInfoBuf->stream_type = CAM_STREAM_TYPE_OFFLINE_PROC;
+    streamInfoBuf->fmt = img_config.input_fmt;
+    streamInfoBuf->dim = img_config.input_dim;
+    streamInfoBuf->buf_planes = img_config.input_buf_planes;
+    streamInfoBuf->streaming_mode = CAM_STREAMING_MODE_BURST;
+    streamInfoBuf->num_of_burst = img_config.num_of_bufs;
+
+    streamInfoBuf->reprocess_config.pp_type = CAM_OFFLINE_REPROCESS_TYPE;
+    streamInfoBuf->reprocess_config.offline = img_config;
+    streamInfoBuf->reprocess_config.pp_feature_config = pp_feature;
+
+    rc = pChannel->addStream(*this,
+            pStreamInfo, NULL, img_config.num_of_bufs,
+            &gCamCapability[mCameraId]->padding_info,
+            stream_cb, userdata, false);
+
+    if (rc != NO_ERROR) {
+        LOGE("add reprocess stream failed, ret = %d", rc);
+        delete pChannel;
+        return NULL;
+    }
+
+    return pChannel;
+}
+
+/*===========================================================================
+ * FUNCTION   : addChannel
+ *
+ * DESCRIPTION: add a channel by its type
+ *
+ * PARAMETERS :
+ *   @ch_type : channel type
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addChannel(qcamera_ch_type_enum_t ch_type)
+{
+    int32_t rc = UNKNOWN_ERROR;
+    switch (ch_type) {
+    case QCAMERA_CH_TYPE_ZSL:
+        rc = addZSLChannel();
+        break;
+    case QCAMERA_CH_TYPE_CAPTURE:
+        rc = addCaptureChannel();
+        break;
+    case QCAMERA_CH_TYPE_PREVIEW:
+        rc = addPreviewChannel();
+        break;
+    case QCAMERA_CH_TYPE_VIDEO:
+        rc = addVideoChannel();
+        break;
+    case QCAMERA_CH_TYPE_SNAPSHOT:
+        rc = addSnapshotChannel();
+        break;
+    case QCAMERA_CH_TYPE_RAW:
+        rc = addRawChannel();
+        break;
+    case QCAMERA_CH_TYPE_METADATA:
+        rc = addMetaDataChannel();
+        break;
+    case QCAMERA_CH_TYPE_CALLBACK:
+        rc = addCallbackChannel();
+        break;
+    case QCAMERA_CH_TYPE_ANALYSIS:
+        rc = addAnalysisChannel();
+        break;
+    default:
+        break;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : delChannel
+ *
+ * DESCRIPTION: delete a channel by its type
+ *
+ * PARAMETERS :
+ *   @ch_type : channel type
+ *   @destroy : delete context as well
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::delChannel(qcamera_ch_type_enum_t ch_type,
+                                              bool destroy)
+{
+    if (m_channels[ch_type] != NULL) {
+        if (destroy) {
+            delete m_channels[ch_type];
+            m_channels[ch_type] = NULL;
+        } else {
+            m_channels[ch_type]->deleteChannel();
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : startChannel
+ *
+ * DESCRIPTION: start a channel by its type
+ *
+ * PARAMETERS :
+ *   @ch_type : channel type
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::startChannel(qcamera_ch_type_enum_t ch_type)
+{
+    int32_t rc = UNKNOWN_ERROR;
+    if (m_channels[ch_type] != NULL) {
+        rc = m_channels[ch_type]->start();
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stopChannel
+ *
+ * DESCRIPTION: stop a channel by its type
+ *
+ * PARAMETERS :
+ *   @ch_type : channel type
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::stopChannel(qcamera_ch_type_enum_t ch_type)
+{
+    int32_t rc = UNKNOWN_ERROR;
+    if (m_channels[ch_type] != NULL) {
+        rc = m_channels[ch_type]->stop();
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : preparePreview
+ *
+ * DESCRIPTION: add channels needed for preview
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::preparePreview()
+{
+    ATRACE_CALL();
+    int32_t rc = NO_ERROR;
+
+    LOGI("E");
+    rc = mParameters.setStreamConfigure(false, false, false);
+    if (rc != NO_ERROR) {
+        LOGE("setStreamConfigure failed %d", rc);
+        return rc;
+    }
+
+    if (mParameters.isZSLMode() && mParameters.getRecordingHintValue() != true) {
+        rc = addChannel(QCAMERA_CH_TYPE_ZSL);
+        if (rc != NO_ERROR) {
+            LOGE("failed!! rc = %d", rc);
+            return rc;
+        }
+
+        if (mParameters.isUBWCEnabled()) {
+            cam_format_t fmt;
+            mParameters.getStreamFormat(CAM_STREAM_TYPE_PREVIEW, fmt);
+            if (fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
+                rc = addChannel(QCAMERA_CH_TYPE_CALLBACK);
+                if (rc != NO_ERROR) {
+                    delChannel(QCAMERA_CH_TYPE_ZSL);
+                    LOGE("failed!! rc = %d", rc);
+                    return rc;
+                }
+            }
+        }
+
+        if (mParameters.getofflineRAW()) {
+            addChannel(QCAMERA_CH_TYPE_RAW);
+        }
+    } else {
+        bool recordingHint = mParameters.getRecordingHintValue();
+        if(!isRdiMode() && recordingHint) {
+            //stop face detection,longshot,etc if turned ON in Camera mode
+#ifndef VANILLA_HAL
+            int32_t arg; //dummy arg
+            if (isLongshotEnabled()) {
+                sendCommand(CAMERA_CMD_LONGSHOT_OFF, arg, arg);
+            }
+            if (mParameters.isFaceDetectionEnabled()
+                    && (!mParameters.fdModeInVideo())) {
+                sendCommand(CAMERA_CMD_STOP_FACE_DETECTION, arg, arg);
+            }
+            if (mParameters.isHistogramEnabled()) {
+                sendCommand(CAMERA_CMD_HISTOGRAM_OFF, arg, arg);
+            }
+#endif
+            //Don't create snapshot channel for liveshot, if low power mode is set.
+            //Use video stream instead.
+            if (!isLowPowerMode()) {
+               rc = addChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+               if (rc != NO_ERROR) {
+                   return rc;
+               }
+            }
+
+            rc = addChannel(QCAMERA_CH_TYPE_VIDEO);
+            if (rc != NO_ERROR) {
+                delChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+                LOGE("failed!! rc = %d", rc);
+                return rc;
+            }
+        }
+
+        rc = addChannel(QCAMERA_CH_TYPE_PREVIEW);
+        if (!isRdiMode() && (rc != NO_ERROR)) {
+            if (recordingHint) {
+                delChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+                delChannel(QCAMERA_CH_TYPE_VIDEO);
+            }
+        }
+
+        if (mParameters.isUBWCEnabled() && !recordingHint) {
+            cam_format_t fmt;
+            mParameters.getStreamFormat(CAM_STREAM_TYPE_PREVIEW, fmt);
+            if (fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
+                rc = addChannel(QCAMERA_CH_TYPE_CALLBACK);
+                if (rc != NO_ERROR) {
+                    delChannel(QCAMERA_CH_TYPE_PREVIEW);
+                    if (!isRdiMode()) {
+                        delChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+                        delChannel(QCAMERA_CH_TYPE_VIDEO);
+                    }
+                    LOGE("failed!! rc = %d", rc);
+                    return rc;
+                }
+            }
+        }
+
+        if (NO_ERROR != rc) {
+            delChannel(QCAMERA_CH_TYPE_PREVIEW);
+            LOGE("failed!! rc = %d", rc);
+        }
+    }
+
+    LOGI("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : unpreparePreview
+ *
+ * DESCRIPTION: delete channels for preview
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::unpreparePreview()
+{
+    delChannel(QCAMERA_CH_TYPE_ZSL);
+    delChannel(QCAMERA_CH_TYPE_PREVIEW);
+    delChannel(QCAMERA_CH_TYPE_VIDEO);
+    delChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+    delChannel(QCAMERA_CH_TYPE_CALLBACK);
+    delChannel(QCAMERA_CH_TYPE_RAW);
+}
+
+/*===========================================================================
+ * FUNCTION   : playShutter
+ *
+ * DESCRIPTION: send request to play shutter sound
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::playShutter(){
+     if (mNotifyCb == NULL ||
+         msgTypeEnabledWithLock(CAMERA_MSG_SHUTTER) == 0){
+         LOGD("shutter msg not enabled or NULL cb");
+         return;
+     }
+     LOGH("CAMERA_MSG_SHUTTER ");
+     qcamera_callback_argm_t cbArg;
+     memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+     cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
+     cbArg.msg_type = CAMERA_MSG_SHUTTER;
+     cbArg.ext1 = 0;
+     cbArg.ext2 = false;
+     m_cbNotifier.notifyCallback(cbArg);
+}
+
+/*===========================================================================
+ * FUNCTION   : getChannelByHandle
+ *
+ * DESCRIPTION: return a channel by its handle
+ *
+ * PARAMETERS :
+ *   @channelHandle : channel handle
+ *
+ * RETURN     : a channel obj if found, NULL if not found
+ *==========================================================================*/
+QCameraChannel *QCamera2HardwareInterface::getChannelByHandle(uint32_t channelHandle)
+{
+    for(int i = 0; i < QCAMERA_CH_TYPE_MAX; i++) {
+        if (m_channels[i] != NULL &&
+            m_channels[i]->getMyHandle() == channelHandle) {
+            return m_channels[i];
+        }
+    }
+
+    return NULL;
+}
+/*===========================================================================
+ * FUNCTION   : needPreviewFDCallback
+ *
+ * DESCRIPTION: decides if needPreviewFDCallback
+ *
+ * PARAMETERS :
+ *   @num_faces : number of faces
+ *
+ * RETURN     : bool type of status
+ *              true  -- success
+ *              fale -- failure code
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needPreviewFDCallback(uint8_t num_faces)
+{
+    if (num_faces == 0 && mNumPreviewFaces == 0) {
+        return false;
+    }
+
+    return true;
+}
+
+/*===========================================================================
+ * FUNCTION   : processFaceDetectionReuslt
+ *
+ * DESCRIPTION: process face detection reuslt
+ *
+ * PARAMETERS :
+ *   @faces_data : ptr to face processing result struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processFaceDetectionResult(cam_faces_data_t *faces_data)
+{
+    if (!mParameters.isFaceDetectionEnabled()) {
+        LOGH("FaceDetection not enabled, no ops here");
+        return NO_ERROR;
+    }
+
+    qcamera_face_detect_type_t fd_type = faces_data->detection_data.fd_type;
+    cam_face_detection_data_t *detect_data = &(faces_data->detection_data);
+    if ((NULL == mDataCb) ||
+        (fd_type == QCAMERA_FD_PREVIEW && !msgTypeEnabled(CAMERA_MSG_PREVIEW_METADATA)) ||
+        (!needPreviewFDCallback(detect_data->num_faces_detected))
+#ifndef VANILLA_HAL
+        || (fd_type == QCAMERA_FD_SNAPSHOT && !msgTypeEnabled(CAMERA_MSG_META_DATA))
+#endif
+        ) {
+        LOGH("metadata msgtype not enabled, no ops here");
+        return NO_ERROR;
+    }
+
+    if ((fd_type == QCAMERA_FD_PREVIEW) && (detect_data->update_flag == FALSE)) {
+        // Don't send callback to app if this is skipped by fd at backend
+        return NO_ERROR;
+    }
+
+    cam_dimension_t display_dim;
+    mParameters.getStreamDimension(CAM_STREAM_TYPE_PREVIEW, display_dim);
+    if (display_dim.width <= 0 || display_dim.height <= 0) {
+        LOGE("Invalid preview width or height (%d x %d)",
+               display_dim.width, display_dim.height);
+        return UNKNOWN_ERROR;
+    }
+
+    // process face detection result
+    // need separate face detection in preview or snapshot type
+    size_t faceResultSize = 0;
+    size_t data_len = 0;
+    if(fd_type == QCAMERA_FD_PREVIEW){
+        //fd for preview frames
+        faceResultSize = sizeof(camera_frame_metadata_t);
+        faceResultSize += sizeof(camera_face_t) * MAX_ROI;
+    }else if(fd_type == QCAMERA_FD_SNAPSHOT){
+#ifndef VANILLA_HAL
+        // fd for snapshot frames
+        //check if face is detected in this frame
+        if(detect_data->num_faces_detected > 0){
+            data_len = sizeof(camera_frame_metadata_t) +
+                    sizeof(camera_face_t) * detect_data->num_faces_detected;
+        }else{
+            //no face
+            data_len = 0;
+        }
+#endif
+        faceResultSize = 1 *sizeof(int)    //meta data type
+                       + 1 *sizeof(int)    // meta data len
+                       + data_len;         //data
+    }
+
+    camera_memory_t *faceResultBuffer = mGetMemory(-1,
+                                                   faceResultSize,
+                                                   1,
+                                                   mCallbackCookie);
+    if ( NULL == faceResultBuffer ) {
+        LOGE("Not enough memory for face result data");
+        return NO_MEMORY;
+    }
+
+    unsigned char *pFaceResult = ( unsigned char * ) faceResultBuffer->data;
+    memset(pFaceResult, 0, faceResultSize);
+    unsigned char *faceData = NULL;
+    if(fd_type == QCAMERA_FD_PREVIEW){
+        faceData = pFaceResult;
+        mNumPreviewFaces = detect_data->num_faces_detected;
+    }else if(fd_type == QCAMERA_FD_SNAPSHOT){
+#ifndef VANILLA_HAL
+        //need fill meta type and meta data len first
+        int *data_header = (int* )pFaceResult;
+        data_header[0] = CAMERA_META_DATA_FD;
+        data_header[1] = (int)data_len;
+
+        if(data_len <= 0){
+            //if face is not valid or do not have face, return
+            qcamera_callback_argm_t cbArg;
+            memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+            cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+            cbArg.msg_type = CAMERA_MSG_META_DATA;
+            cbArg.data = faceResultBuffer;
+            cbArg.user_data = faceResultBuffer;
+            cbArg.cookie = this;
+            cbArg.release_cb = releaseCameraMemory;
+            int32_t rc = m_cbNotifier.notifyCallback(cbArg);
+            if (rc != NO_ERROR) {
+                LOGE("fail sending notification");
+                faceResultBuffer->release(faceResultBuffer);
+            }
+            return rc;
+        }
+#endif
+        faceData = pFaceResult + 2 *sizeof(int); //skip two int length
+    }
+
+    camera_frame_metadata_t *roiData = (camera_frame_metadata_t * ) faceData;
+    camera_face_t *faces = (camera_face_t *) ( faceData + sizeof(camera_frame_metadata_t) );
+
+    roiData->number_of_faces = detect_data->num_faces_detected;
+    roiData->faces = faces;
+    if (roiData->number_of_faces > 0) {
+        for (int i = 0; i < roiData->number_of_faces; i++) {
+            faces[i].id = detect_data->faces[i].face_id;
+            faces[i].score = detect_data->faces[i].score;
+
+            // left
+            faces[i].rect[0] = MAP_TO_DRIVER_COORDINATE(
+                    detect_data->faces[i].face_boundary.left,
+                    display_dim.width, 2000, -1000);
+
+            // top
+            faces[i].rect[1] = MAP_TO_DRIVER_COORDINATE(
+                    detect_data->faces[i].face_boundary.top,
+                    display_dim.height, 2000, -1000);
+
+            // right
+            faces[i].rect[2] = faces[i].rect[0] +
+                    MAP_TO_DRIVER_COORDINATE(
+                    detect_data->faces[i].face_boundary.width,
+                    display_dim.width, 2000, 0);
+
+             // bottom
+            faces[i].rect[3] = faces[i].rect[1] +
+                    MAP_TO_DRIVER_COORDINATE(
+                    detect_data->faces[i].face_boundary.height,
+                    display_dim.height, 2000, 0);
+
+            if (faces_data->landmark_valid) {
+                // Center of left eye
+                faces[i].left_eye[0] = MAP_TO_DRIVER_COORDINATE(
+                        faces_data->landmark_data.face_landmarks[i].left_eye_center.x,
+                        display_dim.width, 2000, -1000);
+                faces[i].left_eye[1] = MAP_TO_DRIVER_COORDINATE(
+                        faces_data->landmark_data.face_landmarks[i].left_eye_center.y,
+                        display_dim.height, 2000, -1000);
+
+                // Center of right eye
+                faces[i].right_eye[0] = MAP_TO_DRIVER_COORDINATE(
+                        faces_data->landmark_data.face_landmarks[i].right_eye_center.x,
+                        display_dim.width, 2000, -1000);
+                faces[i].right_eye[1] = MAP_TO_DRIVER_COORDINATE(
+                        faces_data->landmark_data.face_landmarks[i].right_eye_center.y,
+                        display_dim.height, 2000, -1000);
+
+                // Center of mouth
+                faces[i].mouth[0] = MAP_TO_DRIVER_COORDINATE(
+                        faces_data->landmark_data.face_landmarks[i].mouth_center.x,
+                        display_dim.width, 2000, -1000);
+                faces[i].mouth[1] = MAP_TO_DRIVER_COORDINATE(
+                        faces_data->landmark_data.face_landmarks[i].mouth_center.y,
+                        display_dim.height, 2000, -1000);
+            } else {
+                // return -2000 if invalid
+                faces[i].left_eye[0] = -2000;
+                faces[i].left_eye[1] = -2000;
+
+                faces[i].right_eye[0] = -2000;
+                faces[i].right_eye[1] = -2000;
+
+                faces[i].mouth[0] = -2000;
+                faces[i].mouth[1] = -2000;
+            }
+
+#ifndef VANILLA_HAL
+#ifdef TARGET_TS_MAKEUP
+            mFaceRect.left = detect_data->faces[i].face_boundary.left;
+            mFaceRect.top = detect_data->faces[i].face_boundary.top;
+            mFaceRect.right = detect_data->faces[i].face_boundary.width+mFaceRect.left;
+            mFaceRect.bottom = detect_data->faces[i].face_boundary.height+mFaceRect.top;
+#endif
+            if (faces_data->smile_valid) {
+                faces[i].smile_degree = faces_data->smile_data.smile[i].smile_degree;
+                faces[i].smile_score = faces_data->smile_data.smile[i].smile_confidence;
+            }
+            if (faces_data->blink_valid) {
+                faces[i].blink_detected = faces_data->blink_data.blink[i].blink_detected;
+                faces[i].leye_blink = faces_data->blink_data.blink[i].left_blink;
+                faces[i].reye_blink = faces_data->blink_data.blink[i].right_blink;
+            }
+            if (faces_data->recog_valid) {
+                faces[i].face_recognised = faces_data->recog_data.face_rec[i].face_recognised;
+            }
+            if (faces_data->gaze_valid) {
+                faces[i].gaze_angle = faces_data->gaze_data.gaze[i].gaze_angle;
+                faces[i].updown_dir = faces_data->gaze_data.gaze[i].updown_dir;
+                faces[i].leftright_dir = faces_data->gaze_data.gaze[i].leftright_dir;
+                faces[i].roll_dir = faces_data->gaze_data.gaze[i].roll_dir;
+                faces[i].left_right_gaze = faces_data->gaze_data.gaze[i].left_right_gaze;
+                faces[i].top_bottom_gaze = faces_data->gaze_data.gaze[i].top_bottom_gaze;
+            }
+#endif
+
+        }
+    }
+    else{
+#ifdef TARGET_TS_MAKEUP
+        memset(&mFaceRect,-1,sizeof(mFaceRect));
+#endif
+    }
+    qcamera_callback_argm_t cbArg;
+    memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+    cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+    if(fd_type == QCAMERA_FD_PREVIEW){
+        cbArg.msg_type = CAMERA_MSG_PREVIEW_METADATA;
+    }
+#ifndef VANILLA_HAL
+    else if(fd_type == QCAMERA_FD_SNAPSHOT){
+        cbArg.msg_type = CAMERA_MSG_META_DATA;
+    }
+#endif
+    cbArg.data = faceResultBuffer;
+    cbArg.metadata = roiData;
+    cbArg.user_data = faceResultBuffer;
+    cbArg.cookie = this;
+    cbArg.release_cb = releaseCameraMemory;
+    int32_t rc = m_cbNotifier.notifyCallback(cbArg);
+    if (rc != NO_ERROR) {
+        LOGE("fail sending notification");
+        faceResultBuffer->release(faceResultBuffer);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseCameraMemory
+ *
+ * DESCRIPTION: releases camera memory objects
+ *
+ * PARAMETERS :
+ *   @data    : buffer to be released
+ *   @cookie  : context data
+ *   @cbStatus: callback status
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::releaseCameraMemory(void *data,
+                                                    void */*cookie*/,
+                                                    int32_t /*cbStatus*/)
+{
+    camera_memory_t *mem = ( camera_memory_t * ) data;
+    if ( NULL != mem ) {
+        mem->release(mem);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : returnStreamBuffer
+ *
+ * DESCRIPTION: returns back a stream buffer
+ *
+ * PARAMETERS :
+ *   @data    : buffer to be released
+ *   @cookie  : context data
+ *   @cbStatus: callback status
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::returnStreamBuffer(void *data,
+                                                   void *cookie,
+                                                   int32_t /*cbStatus*/)
+{
+    QCameraStream *stream = ( QCameraStream * ) cookie;
+    int idx = *((int *)data);
+    if ((NULL != stream) && (0 <= idx)) {
+        stream->bufDone((uint32_t)idx);
+    } else {
+        LOGE("Cannot return buffer %d %p", idx, cookie);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : processHistogramStats
+ *
+ * DESCRIPTION: process histogram stats
+ *
+ * PARAMETERS :
+ *   @hist_data : ptr to histogram stats struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processHistogramStats(
+        __unused cam_hist_stats_t &stats_data)
+{
+#ifndef VANILLA_HAL
+    if (!mParameters.isHistogramEnabled()) {
+        LOGH("Histogram not enabled, no ops here");
+        return NO_ERROR;
+    }
+
+    camera_memory_t *histBuffer = mGetMemory(-1,
+                                             sizeof(cam_histogram_data_t),
+                                             1,
+                                             mCallbackCookie);
+    if ( NULL == histBuffer ) {
+        LOGE("Not enough memory for histogram data");
+        return NO_MEMORY;
+    }
+
+    cam_histogram_data_t *pHistData = (cam_histogram_data_t *)histBuffer->data;
+    if (pHistData == NULL) {
+        LOGE("memory data ptr is NULL");
+        return UNKNOWN_ERROR;
+    }
+
+    switch (stats_data.type) {
+    case CAM_HISTOGRAM_TYPE_BAYER:
+        *pHistData = stats_data.bayer_stats.gb_stats;
+        break;
+    case CAM_HISTOGRAM_TYPE_YUV:
+        *pHistData = stats_data.yuv_stats;
+        break;
+    }
+
+    qcamera_callback_argm_t cbArg;
+    memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+    cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+    cbArg.msg_type = CAMERA_MSG_STATS_DATA;
+    cbArg.data = histBuffer;
+    cbArg.user_data = histBuffer;
+    cbArg.cookie = this;
+    cbArg.release_cb = releaseCameraMemory;
+    int32_t rc = m_cbNotifier.notifyCallback(cbArg);
+    if (rc != NO_ERROR) {
+        LOGE("fail sending notification");
+        histBuffer->release(histBuffer);
+    }
+#endif
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : calcThermalLevel
+ *
+ * DESCRIPTION: Calculates the target fps range depending on
+ *              the thermal level.
+ *              Note that this function can be called from QCameraParametersIntf
+ *              while mutex is held. So it should not call back into
+ *              QCameraParametersIntf causing deadlock.
+ *
+ * PARAMETERS :
+ *   @level      : received thermal level
+ *   @minFPS     : minimum configured fps range
+ *   @maxFPS     : maximum configured fps range
+ *   @minVideoFps: minimum configured fps range
+ *   @maxVideoFps: maximum configured fps range
+ *   @adjustedRange : target fps range
+ *   @skipPattern : target skip pattern
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::calcThermalLevel(
+            qcamera_thermal_level_enum_t level,
+            const int minFPSi,
+            const int maxFPSi,
+            const float &minVideoFps,
+            const float &maxVideoFps,
+            cam_fps_range_t &adjustedRange,
+            enum msm_vfe_frame_skip_pattern &skipPattern)
+{
+    const float minFPS = (float)minFPSi;
+    const float maxFPS = (float)maxFPSi;
+
+    LOGH("level: %d, preview minfps %f, preview maxfpS %f, "
+              "video minfps %f, video maxfpS %f",
+             level, minFPS, maxFPS, minVideoFps, maxVideoFps);
+
+    switch(level) {
+    case QCAMERA_THERMAL_NO_ADJUSTMENT:
+        {
+            adjustedRange.min_fps = minFPS / 1000.0f;
+            adjustedRange.max_fps = maxFPS / 1000.0f;
+            adjustedRange.video_min_fps = minVideoFps / 1000.0f;
+            adjustedRange.video_max_fps = maxVideoFps / 1000.0f;
+            skipPattern = NO_SKIP;
+        }
+        break;
+    case QCAMERA_THERMAL_SLIGHT_ADJUSTMENT:
+        {
+            adjustedRange.min_fps = minFPS / 1000.0f;
+            adjustedRange.max_fps = maxFPS / 1000.0f;
+            adjustedRange.min_fps -= 0.1f * adjustedRange.min_fps;
+            adjustedRange.max_fps -= 0.1f * adjustedRange.max_fps;
+            adjustedRange.video_min_fps = minVideoFps / 1000.0f;
+            adjustedRange.video_max_fps = maxVideoFps / 1000.0f;
+            adjustedRange.video_min_fps -= 0.1f * adjustedRange.video_min_fps;
+            adjustedRange.video_max_fps -= 0.1f * adjustedRange.video_max_fps;
+            if ( adjustedRange.min_fps < 1 ) {
+                adjustedRange.min_fps = 1;
+            }
+            if ( adjustedRange.max_fps < 1 ) {
+                adjustedRange.max_fps = 1;
+            }
+            if ( adjustedRange.video_min_fps < 1 ) {
+                adjustedRange.video_min_fps = 1;
+            }
+            if ( adjustedRange.video_max_fps < 1 ) {
+                adjustedRange.video_max_fps = 1;
+            }
+            skipPattern = EVERY_2FRAME;
+        }
+        break;
+    case QCAMERA_THERMAL_BIG_ADJUSTMENT:
+        {
+            adjustedRange.min_fps = minFPS / 1000.0f;
+            adjustedRange.max_fps = maxFPS / 1000.0f;
+            adjustedRange.min_fps -= 0.2f * adjustedRange.min_fps;
+            adjustedRange.max_fps -= 0.2f * adjustedRange.max_fps;
+            adjustedRange.video_min_fps = minVideoFps / 1000.0f;
+            adjustedRange.video_max_fps = maxVideoFps / 1000.0f;
+            adjustedRange.video_min_fps -= 0.2f * adjustedRange.video_min_fps;
+            adjustedRange.video_max_fps -= 0.2f * adjustedRange.video_max_fps;
+            if ( adjustedRange.min_fps < 1 ) {
+                adjustedRange.min_fps = 1;
+            }
+            if ( adjustedRange.max_fps < 1 ) {
+                adjustedRange.max_fps = 1;
+            }
+            if ( adjustedRange.video_min_fps < 1 ) {
+                adjustedRange.video_min_fps = 1;
+            }
+            if ( adjustedRange.video_max_fps < 1 ) {
+                adjustedRange.video_max_fps = 1;
+            }
+            skipPattern = EVERY_4FRAME;
+        }
+        break;
+    case QCAMERA_THERMAL_MAX_ADJUSTMENT:
+        {
+            // Stop Preview?
+            // Set lowest min FPS for now
+            adjustedRange.min_fps = minFPS/1000.0f;
+            adjustedRange.max_fps = minFPS/1000.0f;
+            cam_capability_t *capability = gCamCapability[mCameraId];
+            for (size_t i = 0;
+                     i < capability->fps_ranges_tbl_cnt;
+                     i++) {
+                if (capability->fps_ranges_tbl[i].min_fps <
+                        adjustedRange.min_fps) {
+                    adjustedRange.min_fps =
+                            capability->fps_ranges_tbl[i].min_fps;
+                    adjustedRange.max_fps = adjustedRange.min_fps;
+                }
+            }
+            skipPattern = MAX_SKIP;
+            adjustedRange.video_min_fps = adjustedRange.min_fps;
+            adjustedRange.video_max_fps = adjustedRange.max_fps;
+        }
+        break;
+    case QCAMERA_THERMAL_SHUTDOWN:
+        {
+            // send error notify
+            LOGE("Received shutdown thermal level. Closing camera");
+            sendEvtNotify(CAMERA_MSG_ERROR, CAMERA_ERROR_SERVER_DIED, 0);
+        }
+        break;
+    default:
+        {
+            LOGW("Invalid thermal level %d", level);
+            return BAD_VALUE;
+        }
+        break;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : recalcFPSRange
+ *
+ * DESCRIPTION: adjust the configured fps range regarding
+ *              the last thermal level.
+ *
+ * PARAMETERS :
+ *   @minFPS      : minimum configured fps range
+ *   @maxFPS      : maximum configured fps range
+ *   @minVideoFPS : minimum configured video fps
+ *   @maxVideoFPS : maximum configured video fps
+ *   @adjustedRange : target fps range
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::recalcFPSRange(int &minFPS, int &maxFPS,
+        const float &minVideoFPS, const float &maxVideoFPS,
+        cam_fps_range_t &adjustedRange)
+{
+    enum msm_vfe_frame_skip_pattern skipPattern;
+    calcThermalLevel(mThermalLevel,
+                     minFPS,
+                     maxFPS,
+                     minVideoFPS,
+                     maxVideoFPS,
+                     adjustedRange,
+                     skipPattern);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : updateThermalLevel
+ *
+ * DESCRIPTION: update thermal level depending on thermal events
+ *
+ * PARAMETERS :
+ *   @level   : thermal level
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::updateThermalLevel(void *thermal_level)
+{
+    int ret = NO_ERROR;
+    cam_fps_range_t adjustedRange;
+    int minFPS, maxFPS;
+    float minVideoFPS, maxVideoFPS;
+    enum msm_vfe_frame_skip_pattern skipPattern;
+    qcamera_thermal_level_enum_t level = *(qcamera_thermal_level_enum_t *)thermal_level;
+
+
+    if (!mCameraOpened) {
+        LOGH("Camera is not opened, no need to update camera parameters");
+        return NO_ERROR;
+    }
+    if (mParameters.getRecordingHintValue()) {
+        LOGH("Thermal mitigation isn't enabled in camcorder mode");
+        return NO_ERROR;
+    }
+
+    mParameters.getPreviewFpsRange(&minFPS, &maxFPS);
+    qcamera_thermal_mode thermalMode = mParameters.getThermalMode();
+    if (mParameters.isHfrMode()) {
+        cam_fps_range_t hfrFpsRange;
+        mParameters.getHfrFps(hfrFpsRange);
+        minVideoFPS = hfrFpsRange.video_min_fps;
+        maxVideoFPS = hfrFpsRange.video_max_fps;
+    } else {
+        minVideoFPS = minFPS;
+        maxVideoFPS = maxFPS;
+    }
+
+    calcThermalLevel(level, minFPS, maxFPS, minVideoFPS, maxVideoFPS,
+            adjustedRange, skipPattern);
+    mThermalLevel = level;
+
+    if (thermalMode == QCAMERA_THERMAL_ADJUST_FPS)
+        ret = mParameters.adjustPreviewFpsRange(&adjustedRange);
+    else if (thermalMode == QCAMERA_THERMAL_ADJUST_FRAMESKIP)
+        ret = mParameters.setFrameSkip(skipPattern);
+    else
+        LOGW("Incorrect thermal mode %d", thermalMode);
+
+    return ret;
+
+}
+
+/*===========================================================================
+ * FUNCTION   : updateParameters
+ *
+ * DESCRIPTION: update parameters
+ *
+ * PARAMETERS :
+ *   @parms       : input parameters string
+ *   @needRestart : output, flag to indicate if preview restart is needed
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::updateParameters(const char *parms, bool &needRestart)
+{
+    int rc = NO_ERROR;
+
+    String8 str = String8(parms);
+    rc =  mParameters.updateParameters(str, needRestart);
+    setNeedRestart(needRestart);
+
+    // update stream based parameter settings
+    for (int i = 0; i < QCAMERA_CH_TYPE_MAX; i++) {
+        if (m_channels[i] != NULL) {
+            m_channels[i]->UpdateStreamBasedParameters(mParameters);
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : commitParameterChanges
+ *
+ * DESCRIPTION: commit parameter changes to the backend to take effect
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ * NOTE       : This function must be called after updateParameters.
+ *              Otherwise, no change will be passed to backend to take effect.
+ *==========================================================================*/
+int QCamera2HardwareInterface::commitParameterChanges()
+{
+    int rc = NO_ERROR;
+    rc = mParameters.commitParameters();
+    if (rc == NO_ERROR) {
+        // update number of snapshot based on committed parameters setting
+        rc = mParameters.setNumOfSnapshot();
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : needDebugFps
+ *
+ * DESCRIPTION: if fps log info need to be printed out
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: need print out fps log
+ *              false: no need to print out fps log
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needDebugFps()
+{
+    bool needFps = false;
+    needFps = mParameters.isFpsDebugEnabled();
+    return needFps;
+}
+
+/*===========================================================================
+ * FUNCTION   : isCACEnabled
+ *
+ * DESCRIPTION: if CAC is enabled
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCamera2HardwareInterface::isCACEnabled()
+{
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.feature.cac", prop, "0");
+    int enableCAC = atoi(prop);
+    return enableCAC == 1;
+}
+
+/*===========================================================================
+ * FUNCTION   : is4k2kResolution
+ *
+ * DESCRIPTION: if resolution is 4k x 2k or true 4k x 2k
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCamera2HardwareInterface::is4k2kResolution(cam_dimension_t* resolution)
+{
+   bool enabled = false;
+   if ((resolution->width == 4096 && resolution->height == 2160) ||
+       (resolution->width == 3840 && resolution->height == 2160) ) {
+      enabled = true;
+   }
+   return enabled;
+}
+
+/*===========================================================================
+ * FUNCTION   : isPreviewRestartEnabled
+ *
+ * DESCRIPTION: Check whether preview should be restarted automatically
+ *              during image capture.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCamera2HardwareInterface::isPreviewRestartEnabled()
+{
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.feature.restart", prop, "0");
+    int earlyRestart = atoi(prop);
+    return earlyRestart == 1;
+}
+
+/*===========================================================================
+ * FUNCTION   : needReprocess
+ *
+ * DESCRIPTION: if reprocess is needed
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needReprocess()
+{
+    bool needReprocess = false;
+
+    if (!mParameters.isJpegPictureFormat() &&
+        !mParameters.isNV21PictureFormat()) {
+        // RAW image, no need to reprocess
+        return false;
+    }
+
+    //Disable reprocess for 4K liveshot case but enable if lowpower mode
+    if (mParameters.is4k2kVideoResolution() && mParameters.getRecordingHintValue()
+            && !isLowPowerMode()) {
+        return false;
+    }
+
+    // pp feature config
+    cam_pp_feature_config_t pp_config;
+    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
+
+    //Decide whether to do reprocess or not based on
+    //ppconfig obtained in the first pass.
+    getPPConfig(pp_config);
+
+    if (pp_config.feature_mask > 0) {
+        needReprocess = true;
+    }
+
+    LOGH("needReprocess %s", needReprocess ? "true" : "false");
+    return needReprocess;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : needRotationReprocess
+ *
+ * DESCRIPTION: if rotation needs to be done by reprocess in pp
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needRotationReprocess()
+{
+    if (!mParameters.isJpegPictureFormat() &&
+        !mParameters.isNV21PictureFormat()) {
+        // RAW image, no need to reprocess
+        return false;
+    }
+
+    //Disable reprocess for 4K liveshot case
+    if (mParameters.is4k2kVideoResolution() && mParameters.getRecordingHintValue()
+            && !isLowPowerMode()) {
+        //Disable reprocess for 4K liveshot case
+        return false;
+    }
+
+    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
+            CAM_QCOM_FEATURE_ROTATION) > 0 &&
+            (mParameters.getJpegRotation() > 0)) {
+        // current rotation is not zero, and pp has the capability to process rotation
+        LOGH("need to do reprocess for rotation=%d",
+                 mParameters.getJpegRotation());
+        return true;
+    }
+
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : getThumbnailSize
+ *
+ * DESCRIPTION: get user set thumbnail size
+ *
+ * PARAMETERS :
+ *   @dim     : output of thumbnail dimension
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::getThumbnailSize(cam_dimension_t &dim)
+{
+    mParameters.getThumbnailSize(&dim.width, &dim.height);
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegQuality
+ *
+ * DESCRIPTION: get user set jpeg quality
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : jpeg quality setting
+ *==========================================================================*/
+uint32_t QCamera2HardwareInterface::getJpegQuality()
+{
+    uint32_t quality = 0;
+    quality =  mParameters.getJpegQuality();
+    return quality;
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifData
+ *
+ * DESCRIPTION: get exif data to be passed into jpeg encoding
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : exif data from user setting and GPS
+ *==========================================================================*/
+QCameraExif *QCamera2HardwareInterface::getExifData()
+{
+    QCameraExif *exif = new QCameraExif();
+    if (exif == NULL) {
+        LOGE("No memory for QCameraExif");
+        return NULL;
+    }
+
+    int32_t rc = NO_ERROR;
+
+    // add exif entries
+    String8 dateTime, subSecTime;
+    rc = mParameters.getExifDateTime(dateTime, subSecTime);
+    if(rc == NO_ERROR) {
+        exif->addEntry(EXIFTAGID_DATE_TIME, EXIF_ASCII,
+                (uint32_t)(dateTime.length() + 1), (void *)dateTime.string());
+        exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_ORIGINAL, EXIF_ASCII,
+                (uint32_t)(dateTime.length() + 1), (void *)dateTime.string());
+        exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_DIGITIZED, EXIF_ASCII,
+                (uint32_t)(dateTime.length() + 1), (void *)dateTime.string());
+        exif->addEntry(EXIFTAGID_SUBSEC_TIME, EXIF_ASCII,
+                (uint32_t)(subSecTime.length() + 1), (void *)subSecTime.string());
+        exif->addEntry(EXIFTAGID_SUBSEC_TIME_ORIGINAL, EXIF_ASCII,
+                (uint32_t)(subSecTime.length() + 1), (void *)subSecTime.string());
+        exif->addEntry(EXIFTAGID_SUBSEC_TIME_DIGITIZED, EXIF_ASCII,
+                (uint32_t)(subSecTime.length() + 1), (void *)subSecTime.string());
+    } else {
+        LOGW("getExifDateTime failed");
+    }
+
+    rat_t focalLength;
+    rc = mParameters.getExifFocalLength(&focalLength);
+    if (rc == NO_ERROR) {
+        exif->addEntry(EXIFTAGID_FOCAL_LENGTH,
+                       EXIF_RATIONAL,
+                       1,
+                       (void *)&(focalLength));
+    } else {
+        LOGW("getExifFocalLength failed");
+    }
+
+    uint16_t isoSpeed = mParameters.getExifIsoSpeed();
+    if (getSensorType() != CAM_SENSOR_YUV) {
+        exif->addEntry(EXIFTAGID_ISO_SPEED_RATING,
+                       EXIF_SHORT,
+                       1,
+                       (void *)&(isoSpeed));
+    }
+
+    char gpsProcessingMethod[EXIF_ASCII_PREFIX_SIZE + GPS_PROCESSING_METHOD_SIZE];
+    uint32_t count = 0;
+
+    /*gps data might not be available */
+    rc = mParameters.getExifGpsProcessingMethod(gpsProcessingMethod, count);
+    if(rc == NO_ERROR) {
+        exif->addEntry(EXIFTAGID_GPS_PROCESSINGMETHOD,
+                       EXIF_ASCII,
+                       count,
+                       (void *)gpsProcessingMethod);
+    } else {
+        LOGW("getExifGpsProcessingMethod failed");
+    }
+
+    rat_t latitude[3];
+    char latRef[2];
+    rc = mParameters.getExifLatitude(latitude, latRef);
+    if(rc == NO_ERROR) {
+        exif->addEntry(EXIFTAGID_GPS_LATITUDE,
+                       EXIF_RATIONAL,
+                       3,
+                       (void *)latitude);
+        exif->addEntry(EXIFTAGID_GPS_LATITUDE_REF,
+                       EXIF_ASCII,
+                       2,
+                       (void *)latRef);
+    } else {
+        LOGW("getExifLatitude failed");
+    }
+
+    rat_t longitude[3];
+    char lonRef[2];
+    rc = mParameters.getExifLongitude(longitude, lonRef);
+    if(rc == NO_ERROR) {
+        exif->addEntry(EXIFTAGID_GPS_LONGITUDE,
+                       EXIF_RATIONAL,
+                       3,
+                       (void *)longitude);
+
+        exif->addEntry(EXIFTAGID_GPS_LONGITUDE_REF,
+                       EXIF_ASCII,
+                       2,
+                       (void *)lonRef);
+    } else {
+        LOGW("getExifLongitude failed");
+    }
+
+    rat_t altitude;
+    char altRef;
+    rc = mParameters.getExifAltitude(&altitude, &altRef);
+    if(rc == NO_ERROR) {
+        exif->addEntry(EXIFTAGID_GPS_ALTITUDE,
+                       EXIF_RATIONAL,
+                       1,
+                       (void *)&(altitude));
+
+        exif->addEntry(EXIFTAGID_GPS_ALTITUDE_REF,
+                       EXIF_BYTE,
+                       1,
+                       (void *)&altRef);
+    } else {
+        LOGW("getExifAltitude failed");
+    }
+
+    char gpsDateStamp[20];
+    rat_t gpsTimeStamp[3];
+    rc = mParameters.getExifGpsDateTimeStamp(gpsDateStamp, 20, gpsTimeStamp);
+    if(rc == NO_ERROR) {
+        exif->addEntry(EXIFTAGID_GPS_DATESTAMP,
+                       EXIF_ASCII,
+                       (uint32_t)(strlen(gpsDateStamp) + 1),
+                       (void *)gpsDateStamp);
+
+        exif->addEntry(EXIFTAGID_GPS_TIMESTAMP,
+                       EXIF_RATIONAL,
+                       3,
+                       (void *)gpsTimeStamp);
+    } else {
+        LOGW("getExifGpsDataTimeStamp failed");
+    }
+
+#ifdef ENABLE_MODEL_INFO_EXIF
+
+    char value[PROPERTY_VALUE_MAX];
+    if (property_get("persist.sys.exif.make", value, "") > 0 ||
+            property_get("ro.product.manufacturer", value, "QCOM-AA") > 0) {
+        exif->addEntry(EXIFTAGID_MAKE,
+                EXIF_ASCII, strlen(value) + 1, (void *)value);
+    } else {
+        LOGW("getExifMaker failed");
+    }
+
+    if (property_get("persist.sys.exif.model", value, "") > 0 ||
+            property_get("ro.product.model", value, "QCAM-AA") > 0) {
+        exif->addEntry(EXIFTAGID_MODEL,
+                EXIF_ASCII, strlen(value) + 1, (void *)value);
+    } else {
+        LOGW("getExifModel failed");
+    }
+
+    if (property_get("ro.build.description", value, "QCAM-AA") > 0) {
+        exif->addEntry(EXIFTAGID_SOFTWARE, EXIF_ASCII,
+                (uint32_t)(strlen(value) + 1), (void *)value);
+    } else {
+        LOGW("getExifSoftware failed");
+    }
+
+#endif
+
+    if (mParameters.useJpegExifRotation()) {
+        int16_t orientation;
+        switch (mParameters.getJpegExifRotation()) {
+        case 0:
+            orientation = 1;
+            break;
+        case 90:
+            orientation = 6;
+            break;
+        case 180:
+            orientation = 3;
+            break;
+        case 270:
+            orientation = 8;
+            break;
+        default:
+            orientation = 1;
+            break;
+        }
+        exif->addEntry(EXIFTAGID_ORIENTATION,
+                EXIF_SHORT,
+                1,
+                (void *)&orientation);
+        exif->addEntry(EXIFTAGID_TN_ORIENTATION,
+                EXIF_SHORT,
+                1,
+                (void *)&orientation);
+    }
+
+    return exif;
+}
+
+/*===========================================================================
+ * FUNCTION   : setHistogram
+ *
+ * DESCRIPTION: set if histogram should be enabled
+ *
+ * PARAMETERS :
+ *   @histogram_en : bool flag if histogram should be enabled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::setHistogram(bool histogram_en)
+{
+    return mParameters.setHistogram(histogram_en);
+}
+
+/*===========================================================================
+ * FUNCTION   : setFaceDetection
+ *
+ * DESCRIPTION: set if face detection should be enabled
+ *
+ * PARAMETERS :
+ *   @enabled : bool flag if face detection should be enabled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::setFaceDetection(bool enabled)
+{
+    return mParameters.setFaceDetection(enabled, true);
+}
+
+/*===========================================================================
+ * FUNCTION   : isCaptureShutterEnabled
+ *
+ * DESCRIPTION: Check whether shutter should be triggered immediately after
+ *              capture
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : true - regular capture
+ *              false - other type of capture
+ *==========================================================================*/
+bool QCamera2HardwareInterface::isCaptureShutterEnabled()
+{
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.feature.shutter", prop, "0");
+    int enableShutter = atoi(prop);
+    return enableShutter == 1;
+}
+
+/*===========================================================================
+ * FUNCTION   : needProcessPreviewFrame
+ *
+ * DESCRIPTION: returns whether preview frame need to be displayed
+ *
+ * PARAMETERS :
+ *   @frameID : frameID of frame to be processed
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needProcessPreviewFrame(uint32_t frameID)
+{
+    return ((m_stateMachine.isPreviewRunning()) &&
+            (!isDisplayFrameToSkip(frameID)) &&
+            (!mParameters.isInstantAECEnabled()));
+}
+
+/*===========================================================================
+ * FUNCTION   : needSendPreviewCallback
+ *
+ * DESCRIPTION: returns whether preview frame need to callback to APP
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : true - need preview frame callbck
+ *              false - not send preview frame callback
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needSendPreviewCallback()
+{
+    return m_stateMachine.isPreviewRunning()
+            && (mDataCb != NULL)
+            && (msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0)
+            && m_stateMachine.isPreviewCallbackNeeded();
+};
+
+/*===========================================================================
+ * FUNCTION   : setDisplaySkip
+ *
+ * DESCRIPTION: set range of frames to skip for preview
+ *
+ * PARAMETERS :
+ *   @enabled : TRUE to start skipping frame to display
+                FALSE to stop skipping frame to display
+ *   @skipCnt : Number of frame to skip. 0 by default
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::setDisplaySkip(bool enabled, uint8_t skipCnt)
+{
+    pthread_mutex_lock(&mGrallocLock);
+    if (enabled) {
+        setDisplayFrameSkip();
+        setDisplayFrameSkip(mLastPreviewFrameID + skipCnt + 1);
+    } else {
+        setDisplayFrameSkip(mFrameSkipStart, (mLastPreviewFrameID + skipCnt + 1));
+    }
+    pthread_mutex_unlock(&mGrallocLock);
+}
+
+/*===========================================================================
+ * FUNCTION   : setDisplayFrameSkip
+ *
+ * DESCRIPTION: set range of frames to skip for preview
+ *
+ * PARAMETERS :
+ *   @start   : frameId to start skip
+ *   @end     : frameId to stop skip
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::setDisplayFrameSkip(uint32_t start,
+        uint32_t end)
+{
+    if (start == 0) {
+        mFrameSkipStart = 0;
+        mFrameSkipEnd = 0;
+        return;
+    }
+    if ((mFrameSkipStart == 0) || (mFrameSkipStart > start)) {
+        mFrameSkipStart = start;
+    }
+    if ((end == 0) || (end > mFrameSkipEnd)) {
+        mFrameSkipEnd = end;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : isDisplayFrameToSkip
+ *
+ * DESCRIPTION: function to determin if input frame falls under skip range
+ *
+ * PARAMETERS :
+ *   @frameId : frameId to verify
+ *
+ * RETURN     : true : need to skip
+ *              false: no need to skip
+ *==========================================================================*/
+bool QCamera2HardwareInterface::isDisplayFrameToSkip(uint32_t frameId)
+{
+    return ((mFrameSkipStart != 0) && (frameId >= mFrameSkipStart) &&
+            (frameId <= mFrameSkipEnd || mFrameSkipEnd == 0)) ? TRUE : FALSE;
+}
+
+/*===========================================================================
+ * FUNCTION   : prepareHardwareForSnapshot
+ *
+ * DESCRIPTION: prepare hardware for snapshot, such as LED
+ *
+ * PARAMETERS :
+ *   @afNeeded: flag indicating if Auto Focus needs to be done during preparation
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::prepareHardwareForSnapshot(int32_t afNeeded)
+{
+    ATRACE_CALL();
+    LOGI("[KPI Perf]: Send PREPARE SANSPHOT event");
+    return mCameraHandle->ops->prepare_snapshot(mCameraHandle->camera_handle,
+                                                afNeeded);
+}
+
+/*===========================================================================
+ * FUNCTION   : needFDMetadata
+ *
+ * DESCRIPTION: check whether we need process Face Detection metadata in this chanel
+ *
+ * PARAMETERS :
+ *   @channel_type: channel type
+ *
+  * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needFDMetadata(qcamera_ch_type_enum_t channel_type)
+{
+    //Note: Currently we only process ZSL channel
+    bool value = false;
+    if(channel_type == QCAMERA_CH_TYPE_ZSL){
+        //check if FD requirement is enabled
+        if(mParameters.isSnapshotFDNeeded() &&
+           mParameters.isFaceDetectionEnabled()){
+            value = true;
+            LOGH("Face Detection metadata is required in ZSL mode.");
+        }
+    }
+
+    return value;
+}
+
+/*===========================================================================
+ * FUNCTION   : deferredWorkRoutine
+ *
+ * DESCRIPTION: data process routine that executes deferred tasks
+ *
+ * PARAMETERS :
+ *   @data    : user data ptr (QCamera2HardwareInterface)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void *QCamera2HardwareInterface::deferredWorkRoutine(void *obj)
+{
+    int running = 1;
+    int ret;
+    uint8_t is_active = FALSE;
+    int32_t job_status = 0;
+
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)obj;
+    QCameraCmdThread *cmdThread = &pme->mDeferredWorkThread;
+    cmdThread->setName("CAM_defrdWrk");
+
+    do {
+        do {
+            ret = cam_sem_wait(&cmdThread->cmd_sem);
+            if (ret != 0 && errno != EINVAL) {
+                LOGE("cam_sem_wait error (%s)",
+                         strerror(errno));
+                return NULL;
+            }
+        } while (ret != 0);
+
+        // we got notified about new cmd avail in cmd queue
+        camera_cmd_type_t cmd = cmdThread->getCmd();
+        LOGD("cmd: %d", cmd);
+        switch (cmd) {
+        case CAMERA_CMD_TYPE_START_DATA_PROC:
+            LOGH("start data proc");
+            is_active = TRUE;
+            break;
+        case CAMERA_CMD_TYPE_STOP_DATA_PROC:
+            LOGH("stop data proc");
+            is_active = FALSE;
+            // signal cmd is completed
+            cam_sem_post(&cmdThread->sync_sem);
+            break;
+        case CAMERA_CMD_TYPE_DO_NEXT_JOB:
+            {
+                DefWork *dw =
+                    reinterpret_cast<DefWork *>(pme->mCmdQueue.dequeue());
+
+                if ( NULL == dw ) {
+                    LOGE("Invalid deferred work");
+                    break;
+                }
+
+                switch( dw->cmd ) {
+                case CMD_DEF_ALLOCATE_BUFF:
+                    {
+                        QCameraChannel * pChannel = dw->args.allocArgs.ch;
+
+                        if ( NULL == pChannel ) {
+                            LOGE("Invalid deferred work channel");
+                            job_status = BAD_VALUE;
+                            break;
+                        }
+
+                        cam_stream_type_t streamType = dw->args.allocArgs.type;
+                        LOGH("Deferred buffer allocation started for stream type: %d",
+                                 streamType);
+
+                        uint32_t iNumOfStreams = pChannel->getNumOfStreams();
+                        QCameraStream *pStream = NULL;
+                        for ( uint32_t i = 0; i < iNumOfStreams; ++i) {
+                            pStream = pChannel->getStreamByIndex(i);
+
+                            if ( NULL == pStream ) {
+                                job_status = BAD_VALUE;
+                                break;
+                            }
+
+                            if ( pStream->isTypeOf(streamType)) {
+                                if ( pStream->allocateBuffers() ) {
+                                    LOGE("Error allocating buffers !!!");
+                                    job_status =  NO_MEMORY;
+                                    pme->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_UNKNOWN, 0);
+                                }
+                                break;
+                            }
+                        }
+                    }
+                    break;
+                case CMD_DEF_PPROC_START:
+                    {
+                        int32_t ret = pme->getDefJobStatus(pme->mInitPProcJob);
+                        if (ret != NO_ERROR) {
+                            job_status = ret;
+                            LOGE("PPROC Start failed");
+                            pme->sendEvtNotify(CAMERA_MSG_ERROR,
+                                    CAMERA_ERROR_UNKNOWN, 0);
+                            break;
+                        }
+                        QCameraChannel * pChannel = dw->args.pprocArgs;
+                        assert(pChannel);
+
+                        if (pme->m_postprocessor.start(pChannel) != NO_ERROR) {
+                            LOGE("cannot start postprocessor");
+                            job_status = BAD_VALUE;
+                            pme->sendEvtNotify(CAMERA_MSG_ERROR,
+                                    CAMERA_ERROR_UNKNOWN, 0);
+                        }
+                    }
+                    break;
+                case CMD_DEF_METADATA_ALLOC:
+                    {
+                        int32_t ret = pme->getDefJobStatus(pme->mParamAllocJob);
+                        if (ret != NO_ERROR) {
+                            job_status = ret;
+                            LOGE("Metadata alloc failed");
+                            pme->sendEvtNotify(CAMERA_MSG_ERROR,
+                                    CAMERA_ERROR_UNKNOWN, 0);
+                            break;
+                        }
+                        pme->mMetadataMem = new QCameraMetadataStreamMemory(
+                                QCAMERA_ION_USE_CACHE);
+
+                        if (pme->mMetadataMem == NULL) {
+                            LOGE("Unable to allocate metadata buffers");
+                            job_status = BAD_VALUE;
+                            pme->sendEvtNotify(CAMERA_MSG_ERROR,
+                                    CAMERA_ERROR_UNKNOWN, 0);
+                        } else {
+                            int32_t rc = pme->mMetadataMem->allocate(
+                                    dw->args.metadataAllocArgs.bufferCnt,
+                                    dw->args.metadataAllocArgs.size,
+                                    NON_SECURE);
+                            if (rc < 0) {
+                                delete pme->mMetadataMem;
+                                pme->mMetadataMem = NULL;
+                            }
+                        }
+                     }
+                     break;
+                case CMD_DEF_CREATE_JPEG_SESSION:
+                    {
+                        QCameraChannel * pChannel = dw->args.pprocArgs;
+                        assert(pChannel);
+
+                        int32_t ret = pme->getDefJobStatus(pme->mReprocJob);
+                        if (ret != NO_ERROR) {
+                            job_status = ret;
+                            LOGE("Jpeg create failed");
+                            break;
+                        }
+
+                        if (pme->m_postprocessor.createJpegSession(pChannel)
+                            != NO_ERROR) {
+                            LOGE("cannot create JPEG session");
+                            job_status = UNKNOWN_ERROR;
+                            pme->sendEvtNotify(CAMERA_MSG_ERROR,
+                                    CAMERA_ERROR_UNKNOWN, 0);
+                        }
+                    }
+                    break;
+                case CMD_DEF_PPROC_INIT:
+                    {
+                        int32_t rc = NO_ERROR;
+
+                        jpeg_encode_callback_t jpegEvtHandle =
+                                dw->args.pprocInitArgs.jpeg_cb;
+                        void* user_data = dw->args.pprocInitArgs.user_data;
+                        QCameraPostProcessor *postProcessor =
+                                &(pme->m_postprocessor);
+                        uint32_t cameraId = pme->mCameraId;
+                        cam_capability_t *capability =
+                                gCamCapability[cameraId];
+                        cam_padding_info_t padding_info;
+                        cam_padding_info_t& cam_capability_padding_info =
+                                capability->padding_info;
+
+                        if(!pme->mJpegClientHandle) {
+                            rc = pme->initJpegHandle();
+                            if (rc != NO_ERROR) {
+                                LOGE("Error!! creating JPEG handle failed");
+                                job_status = UNKNOWN_ERROR;
+                                pme->sendEvtNotify(CAMERA_MSG_ERROR,
+                                        CAMERA_ERROR_UNKNOWN, 0);
+                                break;
+                            }
+                        }
+                        LOGH("mJpegClientHandle : %d", pme->mJpegClientHandle);
+
+                        rc = postProcessor->setJpegHandle(&pme->mJpegHandle,
+                                &pme->mJpegMpoHandle,
+                                pme->mJpegClientHandle);
+                        if (rc != 0) {
+                            LOGE("Error!! set JPEG handle failed");
+                            job_status = UNKNOWN_ERROR;
+                            pme->sendEvtNotify(CAMERA_MSG_ERROR,
+                                    CAMERA_ERROR_UNKNOWN, 0);
+                            break;
+                        }
+
+                        /* get max pic size for jpeg work buf calculation*/
+                        rc = postProcessor->init(jpegEvtHandle, user_data);
+
+                        if (rc != NO_ERROR) {
+                            LOGE("cannot init postprocessor");
+                            job_status = UNKNOWN_ERROR;
+                            pme->sendEvtNotify(CAMERA_MSG_ERROR,
+                                    CAMERA_ERROR_UNKNOWN, 0);
+                            break;
+                        }
+
+                        // update padding info from jpeg
+                        postProcessor->getJpegPaddingReq(padding_info);
+                        if (cam_capability_padding_info.width_padding <
+                                padding_info.width_padding) {
+                            cam_capability_padding_info.width_padding =
+                                    padding_info.width_padding;
+                        }
+                        if (cam_capability_padding_info.height_padding <
+                                padding_info.height_padding) {
+                            cam_capability_padding_info.height_padding =
+                                    padding_info.height_padding;
+                        }
+                        if (cam_capability_padding_info.plane_padding !=
+                                padding_info.plane_padding) {
+                            cam_capability_padding_info.plane_padding =
+                                    mm_stream_calc_lcm(
+                                    cam_capability_padding_info.plane_padding,
+                                    padding_info.plane_padding);
+                        }
+                        if (cam_capability_padding_info.offset_info.offset_x
+                                != padding_info.offset_info.offset_x) {
+                            cam_capability_padding_info.offset_info.offset_x =
+                                    mm_stream_calc_lcm (
+                                    cam_capability_padding_info.offset_info.offset_x,
+                                    padding_info.offset_info.offset_x);
+                        }
+                        if (cam_capability_padding_info.offset_info.offset_y
+                                != padding_info.offset_info.offset_y) {
+                            cam_capability_padding_info.offset_info.offset_y =
+                            mm_stream_calc_lcm (
+                                    cam_capability_padding_info.offset_info.offset_y,
+                                    padding_info.offset_info.offset_y);
+                        }
+                    }
+                    break;
+                case CMD_DEF_PARAM_ALLOC:
+                    {
+                        int32_t rc = pme->mParameters.allocate();
+                        // notify routine would not be initialized by this time.
+                        // So, just update error job status
+                        if (rc != NO_ERROR) {
+                            job_status = rc;
+                            LOGE("Param allocation failed");
+                            break;
+                        }
+                    }
+                    break;
+                case CMD_DEF_PARAM_INIT:
+                    {
+                        int32_t rc = pme->getDefJobStatus(pme->mParamAllocJob);
+                        if (rc != NO_ERROR) {
+                            job_status = rc;
+                            LOGE("Param init failed");
+                            pme->sendEvtNotify(CAMERA_MSG_ERROR,
+                                    CAMERA_ERROR_UNKNOWN, 0);
+                            break;
+                        }
+
+                        uint32_t camId = pme->mCameraId;
+                        cam_capability_t * cap = gCamCapability[camId];
+
+                        if (pme->mCameraHandle == NULL) {
+                            LOGE("Camera handle is null");
+                            job_status = BAD_VALUE;
+                            pme->sendEvtNotify(CAMERA_MSG_ERROR,
+                                    CAMERA_ERROR_UNKNOWN, 0);
+                            break;
+                        }
+
+                        // Now PostProc need calibration data as initialization
+                        // time for jpeg_open and calibration data is a
+                        // get param for now, so params needs to be initialized
+                        // before postproc init
+                        rc = pme->mParameters.init(cap,
+                                pme->mCameraHandle,
+                                pme);
+                        if (rc != 0) {
+                            job_status = UNKNOWN_ERROR;
+                            LOGE("Parameter Initialization failed");
+                            pme->sendEvtNotify(CAMERA_MSG_ERROR,
+                                    CAMERA_ERROR_UNKNOWN, 0);
+                            break;
+                        }
+
+                        // Get related cam calibration only in
+                        // dual camera mode
+                        if (pme->getRelatedCamSyncInfo()->sync_control ==
+                                CAM_SYNC_RELATED_SENSORS_ON) {
+                            rc = pme->mParameters.getRelatedCamCalibration(
+                                &(pme->mJpegMetadata.otp_calibration_data));
+                            LOGD("Dumping Calibration Data Version Id %f rc %d",
+                                    pme->mJpegMetadata.otp_calibration_data.calibration_format_version,
+                                    rc);
+                            if (rc != 0) {
+                                job_status = UNKNOWN_ERROR;
+                                LOGE("getRelatedCamCalibration failed");
+                                pme->sendEvtNotify(CAMERA_MSG_ERROR,
+                                        CAMERA_ERROR_UNKNOWN, 0);
+                                break;
+                            }
+                            pme->m_bRelCamCalibValid = true;
+                        }
+
+                        pme->mJpegMetadata.sensor_mount_angle =
+                            cap->sensor_mount_angle;
+                        pme->mJpegMetadata.default_sensor_flip = FLIP_NONE;
+
+                        pme->mParameters.setMinPpMask(
+                            cap->qcom_supported_feature_mask);
+                        pme->mExifParams.debug_params =
+                                (mm_jpeg_debug_exif_params_t *)
+                                malloc(sizeof(mm_jpeg_debug_exif_params_t));
+                        if (!pme->mExifParams.debug_params) {
+                            LOGE("Out of Memory. Allocation failed for "
+                                    "3A debug exif params");
+                            job_status = NO_MEMORY;
+                            pme->sendEvtNotify(CAMERA_MSG_ERROR,
+                                    CAMERA_ERROR_UNKNOWN, 0);
+                            break;
+                        }
+                        memset(pme->mExifParams.debug_params, 0,
+                                sizeof(mm_jpeg_debug_exif_params_t));
+                    }
+                    break;
+                case CMD_DEF_GENERIC:
+                    {
+                        BackgroundTask *bgTask = dw->args.genericArgs;
+                        job_status = bgTask->bgFunction(bgTask->bgArgs);
+                    }
+                    break;
+                default:
+                    LOGE("Incorrect command : %d", dw->cmd);
+                }
+
+                pme->dequeueDeferredWork(dw, job_status);
+            }
+            break;
+        case CAMERA_CMD_TYPE_EXIT:
+            running = 0;
+            break;
+        default:
+            break;
+        }
+    } while (running);
+
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : queueDeferredWork
+ *
+ * DESCRIPTION: function which queues deferred tasks
+ *
+ * PARAMETERS :
+ *   @cmd     : deferred task
+ *   @args    : deferred task arguments
+ *
+ * RETURN     : job id of deferred job
+ *            : 0 in case of error
+ *==========================================================================*/
+uint32_t QCamera2HardwareInterface::queueDeferredWork(DeferredWorkCmd cmd,
+                                                      DeferWorkArgs args)
+{
+    Mutex::Autolock l(mDefLock);
+    for (int32_t i = 0; i < MAX_ONGOING_JOBS; ++i) {
+        if (mDefOngoingJobs[i].mDefJobId == 0) {
+            DefWork *dw = new DefWork(cmd, sNextJobId, args);
+            if (!dw) {
+                LOGE("out of memory.");
+                return 0;
+            }
+            if (mCmdQueue.enqueue(dw)) {
+                mDefOngoingJobs[i].mDefJobId = sNextJobId++;
+                mDefOngoingJobs[i].mDefJobStatus = 0;
+                if (sNextJobId == 0) { // handle overflow
+                    sNextJobId = 1;
+                }
+                mDeferredWorkThread.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB,
+                        FALSE,
+                        FALSE);
+                return mDefOngoingJobs[i].mDefJobId;
+            } else {
+                LOGD("Command queue not active! cmd = %d", cmd);
+                delete dw;
+                return 0;
+            }
+        }
+    }
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : initJpegHandle
+ *
+ * DESCRIPTION: Opens JPEG client and gets a handle.
+ *                     Sends Dual cam calibration info if present
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::initJpegHandle() {
+    // Check if JPEG client handle is present
+    LOGH("E");
+    if(!mJpegClientHandle) {
+        mm_dimension max_size = {0, 0};
+        cam_dimension_t size;
+
+        mParameters.getMaxPicSize(size);
+        max_size.w = size.width;
+        max_size.h = size.height;
+
+        if (getRelatedCamSyncInfo()->sync_control == CAM_SYNC_RELATED_SENSORS_ON) {
+            if (m_bRelCamCalibValid) {
+                mJpegClientHandle = jpeg_open(&mJpegHandle, &mJpegMpoHandle,
+                        max_size, &mJpegMetadata);
+            } else {
+                mJpegClientHandle =  jpeg_open(&mJpegHandle, &mJpegMpoHandle,
+                        max_size, NULL);
+            }
+        } else {
+            mJpegClientHandle = jpeg_open(&mJpegHandle, NULL, max_size, NULL);
+        }
+        if (!mJpegClientHandle) {
+            LOGE("Error !! jpeg_open failed!! ");
+            return UNKNOWN_ERROR;
+        }
+        // Set JPEG initialized as true to signify that this camera
+        // has initialized the handle
+        mJpegHandleOwner = true;
+    }
+    LOGH("X mJpegHandleOwner: %d, mJpegClientHandle: %d camera id: %d",
+             mJpegHandleOwner, mJpegClientHandle, mCameraId);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : deinitJpegHandle
+ *
+ * DESCRIPTION: Closes JPEG client using handle
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::deinitJpegHandle() {
+    int32_t rc = NO_ERROR;
+    LOGH("E");
+    // Check if JPEG client handle is present and inited by this camera
+    if(mJpegHandleOwner && mJpegClientHandle) {
+        rc = mJpegHandle.close(mJpegClientHandle);
+        if (rc != NO_ERROR) {
+            LOGE("Error!! Closing mJpegClientHandle: %d failed",
+                     mJpegClientHandle);
+        }
+        memset(&mJpegHandle, 0, sizeof(mJpegHandle));
+        memset(&mJpegMpoHandle, 0, sizeof(mJpegMpoHandle));
+        mJpegHandleOwner = false;
+    }
+    mJpegClientHandle = 0;
+    LOGH("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setJpegHandleInfo
+ *
+ * DESCRIPTION: sets JPEG client handle info
+ *
+ * PARAMETERS:
+ *                  @ops                    : JPEG ops
+ *                  @mpo_ops             : Jpeg MPO ops
+ *                  @pJpegClientHandle : o/p Jpeg Client Handle
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::setJpegHandleInfo(mm_jpeg_ops_t *ops,
+        mm_jpeg_mpo_ops_t *mpo_ops, uint32_t pJpegClientHandle) {
+
+    if (pJpegClientHandle && ops && mpo_ops) {
+        LOGH("Setting JPEG client handle %d",
+                pJpegClientHandle);
+        memcpy(&mJpegHandle, ops, sizeof(mm_jpeg_ops_t));
+        memcpy(&mJpegMpoHandle, mpo_ops, sizeof(mm_jpeg_mpo_ops_t));
+        mJpegClientHandle = pJpegClientHandle;
+        return NO_ERROR;
+    }
+    else {
+        LOGE("Error!! No Handle found: %d",
+                pJpegClientHandle);
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegHandleInfo
+ *
+ * DESCRIPTION: gets JPEG client handle info
+ *
+ * PARAMETERS:
+ *                  @ops                    : JPEG ops
+ *                  @mpo_ops             : Jpeg MPO ops
+ *                  @pJpegClientHandle : o/p Jpeg Client Handle
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::getJpegHandleInfo(mm_jpeg_ops_t *ops,
+        mm_jpeg_mpo_ops_t *mpo_ops, uint32_t *pJpegClientHandle) {
+
+    if (NO_ERROR != waitDeferredWork(mInitPProcJob)) {
+        LOGE("Init PProc Deferred work failed");
+        return UNKNOWN_ERROR;
+    }
+    // Copy JPEG ops if present
+    if (ops && mpo_ops && pJpegClientHandle) {
+        memcpy(ops, &mJpegHandle, sizeof(mm_jpeg_ops_t));
+        memcpy(mpo_ops, &mJpegMpoHandle, sizeof(mm_jpeg_mpo_ops_t));
+        *pJpegClientHandle = mJpegClientHandle;
+        LOGH("Getting JPEG client handle %d",
+                pJpegClientHandle);
+        return NO_ERROR;
+    } else {
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : dequeueDeferredWork
+ *
+ * DESCRIPTION: function which dequeues deferred tasks
+ *
+ * PARAMETERS :
+ *   @dw      : deferred work
+ *   @jobStatus: deferred task job status
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+uint32_t QCamera2HardwareInterface::dequeueDeferredWork(DefWork* dw, int32_t jobStatus)
+{
+    Mutex::Autolock l(mDefLock);
+    for (uint32_t i = 0; i < MAX_ONGOING_JOBS; i++) {
+        if (mDefOngoingJobs[i].mDefJobId == dw->id) {
+            if (jobStatus != NO_ERROR) {
+                mDefOngoingJobs[i].mDefJobStatus = jobStatus;
+                LOGH("updating job status %d for id %d",
+                         jobStatus, dw->id);
+            } else {
+                mDefOngoingJobs[i].mDefJobId = 0;
+                mDefOngoingJobs[i].mDefJobStatus = 0;
+            }
+            delete dw;
+            mDefCond.broadcast();
+            return NO_ERROR;
+        }
+    }
+
+    return UNKNOWN_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getDefJobStatus
+ *
+ * DESCRIPTION: Gets if a deferred task is success/fail
+ *
+ * PARAMETERS :
+ *   @job_id  : deferred task id
+ *
+ * RETURN     : NO_ERROR if the job success, otherwise false
+ *
+ * PRECONDITION : mDefLock is held by current thread
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::getDefJobStatus(uint32_t &job_id)
+{
+    for (uint32_t i = 0; i < MAX_ONGOING_JOBS; i++) {
+        if (mDefOngoingJobs[i].mDefJobId == job_id) {
+            if ( NO_ERROR != mDefOngoingJobs[i].mDefJobStatus ) {
+                LOGE("job_id (%d) was failed", job_id);
+                return mDefOngoingJobs[i].mDefJobStatus;
+            }
+            else
+                return NO_ERROR;
+        }
+    }
+    return NO_ERROR;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : checkDeferredWork
+ *
+ * DESCRIPTION: checks if a deferred task is in progress
+ *
+ * PARAMETERS :
+ *   @job_id  : deferred task id
+ *
+ * RETURN     : true if the task exists, otherwise false
+ *
+ * PRECONDITION : mDefLock is held by current thread
+ *==========================================================================*/
+bool QCamera2HardwareInterface::checkDeferredWork(uint32_t &job_id)
+{
+    for (uint32_t i = 0; i < MAX_ONGOING_JOBS; i++) {
+        if (mDefOngoingJobs[i].mDefJobId == job_id) {
+            return (NO_ERROR == mDefOngoingJobs[i].mDefJobStatus);
+        }
+    }
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : waitDeferredWork
+ *
+ * DESCRIPTION: waits for a deferred task to finish
+ *
+ * PARAMETERS :
+ *   @job_id  : deferred task id
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::waitDeferredWork(uint32_t &job_id)
+{
+    Mutex::Autolock l(mDefLock);
+
+    if (job_id == 0) {
+        LOGD("Invalid job id %d", job_id);
+        return NO_ERROR;
+    }
+
+    while (checkDeferredWork(job_id) == true ) {
+        mDefCond.waitRelative(mDefLock, CAMERA_DEFERRED_THREAD_TIMEOUT);
+    }
+    return getDefJobStatus(job_id);
+}
+
+/*===========================================================================
+ * FUNCTION   : scheduleBackgroundTask
+ *
+ * DESCRIPTION: Run a requested task in the deferred thread
+ *
+ * PARAMETERS :
+ *   @bgTask  : Task to perform in the background
+ *
+ * RETURN     : job id of deferred job
+ *            : 0 in case of error
+ *==========================================================================*/
+uint32_t QCamera2HardwareInterface::scheduleBackgroundTask(BackgroundTask* bgTask)
+{
+    DeferWorkArgs args;
+    memset(&args, 0, sizeof(DeferWorkArgs));
+    args.genericArgs = bgTask;
+
+    return queueDeferredWork(CMD_DEF_GENERIC, args);
+}
+
+/*===========================================================================
+ * FUNCTION   : waitForBackgroundTask
+ *
+ * DESCRIPTION: Wait for a background task to complete
+ *
+ * PARAMETERS :
+ *   @taskId  : Task id to wait for
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::waitForBackgroundTask(uint32_t& taskId)
+{
+    return waitDeferredWork(taskId);
+}
+
+/*===========================================================================
+ * FUNCTION   : needDeferedAllocation
+ *
+ * DESCRIPTION: Function to decide background task for streams
+ *
+ * PARAMETERS :
+ *   @stream_type  : stream type
+ *
+ * RETURN     : true - if background task is needed
+ *              false -  if background task is NOT needed
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needDeferred(cam_stream_type_t stream_type)
+{
+    if ((stream_type == CAM_STREAM_TYPE_PREVIEW && mPreviewWindow == NULL)
+            || (stream_type == CAM_STREAM_TYPE_ANALYSIS)) {
+        return FALSE;
+    }
+
+    if ((stream_type == CAM_STREAM_TYPE_RAW)
+            && (mParameters.getofflineRAW())) {
+        return FALSE;
+    }
+
+    if ((stream_type == CAM_STREAM_TYPE_SNAPSHOT)
+            && (!mParameters.getRecordingHintValue())){
+        return TRUE;
+    }
+
+    if ((stream_type == CAM_STREAM_TYPE_PREVIEW)
+            || (stream_type == CAM_STREAM_TYPE_METADATA)
+            || (stream_type == CAM_STREAM_TYPE_RAW)
+            || (stream_type == CAM_STREAM_TYPE_POSTVIEW)) {
+        return TRUE;
+    }
+
+    if (stream_type == CAM_STREAM_TYPE_VIDEO) {
+        return FALSE;
+    }
+    return FALSE;
+}
+
+/*===========================================================================
+ * FUNCTION   : isRegularCapture
+ *
+ * DESCRIPTION: Check configuration for regular catpure
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : true - regular capture
+ *              false - other type of capture
+ *==========================================================================*/
+bool QCamera2HardwareInterface::isRegularCapture()
+{
+    bool ret = false;
+
+    if (numOfSnapshotsExpected() == 1 &&
+        !isLongshotEnabled() &&
+        !mParameters.isHDREnabled() &&
+        !mParameters.getRecordingHintValue() &&
+        !isZSLMode() && !mParameters.getofflineRAW()) {
+            ret = true;
+    }
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : getLogLevel
+ *
+ * DESCRIPTION: Reads the log level property into a variable
+ *
+ * PARAMETERS :
+ *   None
+ *
+ * RETURN     :
+ *   None
+ *==========================================================================*/
+void QCamera2HardwareInterface::getLogLevel()
+{
+    char prop[PROPERTY_VALUE_MAX];
+
+    property_get("persist.camera.kpi.debug", prop, "1");
+    gKpiDebugLevel = atoi(prop);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : getSensorType
+ *
+ * DESCRIPTION: Returns the type of sensor being used whether YUV or Bayer
+ *
+ * PARAMETERS :
+ *   None
+ *
+ * RETURN     : Type of sensor - bayer or YUV
+ *
+ *==========================================================================*/
+cam_sensor_t QCamera2HardwareInterface::getSensorType()
+{
+    return gCamCapability[mCameraId]->sensor_type.sens_type;
+}
+
+/*===========================================================================
+ * FUNCTION   : startRAWChannel
+ *
+ * DESCRIPTION: start RAW Channel
+ *
+ * PARAMETERS :
+ *   @pChannel  : Src channel to link this RAW channel.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::startRAWChannel(QCameraChannel *pMetaChannel)
+{
+    int32_t rc = NO_ERROR;
+    QCameraChannel *pChannel = m_channels[QCAMERA_CH_TYPE_RAW];
+    if ((NULL != pChannel) && (mParameters.getofflineRAW())) {
+        // Find and try to link a metadata stream from preview channel
+        QCameraStream *pMetaStream = NULL;
+
+        if (pMetaChannel != NULL) {
+            uint32_t streamNum = pMetaChannel->getNumOfStreams();
+            QCameraStream *pStream = NULL;
+            for (uint32_t i = 0 ; i < streamNum ; i++ ) {
+                pStream = pMetaChannel->getStreamByIndex(i);
+                if ((NULL != pStream) &&
+                        (CAM_STREAM_TYPE_METADATA == pStream->getMyType())) {
+                    pMetaStream = pStream;
+                    break;
+                }
+            }
+
+            if (NULL != pMetaStream) {
+                rc = pChannel->linkStream(pMetaChannel, pMetaStream);
+                if (NO_ERROR != rc) {
+                    LOGE("Metadata stream link failed %d", rc);
+                }
+            }
+        }
+        rc = pChannel->start();
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : startRecording
+ *
+ * DESCRIPTION: start recording impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::stopRAWChannel()
+{
+    int32_t rc = NO_ERROR;
+    rc = stopChannel(QCAMERA_CH_TYPE_RAW);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : isLowPowerMode
+ *
+ * DESCRIPTION: Returns TRUE if low power mode settings are to be applied for video recording
+ *
+ * PARAMETERS :
+ *   None
+ *
+ * RETURN     : TRUE/FALSE
+ *
+ *==========================================================================*/
+bool QCamera2HardwareInterface::isLowPowerMode()
+{
+    cam_dimension_t dim;
+    mParameters.getStreamDimension(CAM_STREAM_TYPE_VIDEO, dim);
+
+    char prop[PROPERTY_VALUE_MAX];
+    property_get("camera.lowpower.record.enable", prop, "0");
+    int enable = atoi(prop);
+
+    //Enable low power mode if :
+    //1. Video resolution is 2k (2048x1080) or above and
+    //2. camera.lowpower.record.enable is set
+
+    bool isLowpower = mParameters.getRecordingHintValue() && enable
+            && ((dim.width * dim.height) >= (2048 * 1080));
+    return isLowpower;
+}
+
+/*===========================================================================
+ * FUNCTION   : getBootToMonoTimeOffset
+ *
+ * DESCRIPTION: Calculate offset that is used to convert from
+ *              clock domain of boot to monotonic
+ *
+ * PARAMETERS :
+ *   None
+ *
+ * RETURN     : clock offset between boottime and monotonic time.
+ *
+ *==========================================================================*/
+nsecs_t QCamera2HardwareInterface::getBootToMonoTimeOffset()
+{
+    // try three times to get the clock offset, choose the one
+    // with the minimum gap in measurements.
+    const int tries = 3;
+    nsecs_t bestGap, measured;
+    for (int i = 0; i < tries; ++i) {
+        const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
+        const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
+        const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
+        const nsecs_t gap = tmono2 - tmono;
+        if (i == 0 || gap < bestGap) {
+            bestGap = gap;
+            measured = tbase - ((tmono + tmono2) >> 1);
+        }
+    }
+    return measured;
+}
+
+}; // namespace qcamera
diff --git a/msmcobalt/QCamera2/HAL/QCamera2HWI.h b/msmcobalt/QCamera2/HAL/QCamera2HWI.h
new file mode 100644
index 0000000..2d365bb
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/QCamera2HWI.h
@@ -0,0 +1,798 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA2HARDWAREINTERFACE_H__
+#define __QCAMERA2HARDWAREINTERFACE_H__
+
+// System dependencies
+#include <utils/Mutex.h>
+#include <utils/Condition.h>
+
+// Camera dependencies
+#include "camera.h"
+#include "QCameraAllocator.h"
+#include "QCameraChannel.h"
+#include "QCameraCmdThread.h"
+#include "QCameraDisplay.h"
+#include "QCameraMem.h"
+#include "QCameraParameters.h"
+#include "QCameraParametersIntf.h"
+#include "QCameraPerf.h"
+#include "QCameraPostProc.h"
+#include "QCameraQueue.h"
+#include "QCameraStream.h"
+#include "QCameraStateMachine.h"
+#include "QCameraThermalAdapter.h"
+
+#ifdef TARGET_TS_MAKEUP
+#include "ts_makeup_engine.h"
+#include "ts_detectface_engine.h"
+#endif
+extern "C" {
+#include "mm_camera_interface.h"
+#include "mm_jpeg_interface.h"
+}
+
+#include "QCameraTrace.h"
+
+namespace qcamera {
+
+#ifndef TRUE
+#define TRUE 1
+#endif
+
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+typedef enum {
+    QCAMERA_CH_TYPE_ZSL,
+    QCAMERA_CH_TYPE_CAPTURE,
+    QCAMERA_CH_TYPE_PREVIEW,
+    QCAMERA_CH_TYPE_VIDEO,
+    QCAMERA_CH_TYPE_SNAPSHOT,
+    QCAMERA_CH_TYPE_RAW,
+    QCAMERA_CH_TYPE_METADATA,
+    QCAMERA_CH_TYPE_ANALYSIS,
+    QCAMERA_CH_TYPE_CALLBACK,
+    QCAMERA_CH_TYPE_MAX
+} qcamera_ch_type_enum_t;
+
+typedef struct {
+    int32_t msg_type;
+    int32_t ext1;
+    int32_t ext2;
+} qcamera_evt_argm_t;
+
+#define QCAMERA_DUMP_FRM_PREVIEW             1
+#define QCAMERA_DUMP_FRM_VIDEO               (1<<1)
+#define QCAMERA_DUMP_FRM_SNAPSHOT            (1<<2)
+#define QCAMERA_DUMP_FRM_THUMBNAIL           (1<<3)
+#define QCAMERA_DUMP_FRM_RAW                 (1<<4)
+#define QCAMERA_DUMP_FRM_JPEG                (1<<5)
+#define QCAMERA_DUMP_FRM_INPUT_REPROCESS     (1<<6)
+
+#define QCAMERA_DUMP_FRM_MASK_ALL    0x000000ff
+
+#define QCAMERA_ION_USE_CACHE   true
+#define QCAMERA_ION_USE_NOCACHE false
+#define MAX_ONGOING_JOBS 25
+
+#define MAX(a, b) ((a) > (b) ? (a) : (b))
+
+#define EXIF_ASCII_PREFIX_SIZE           8   //(sizeof(ExifAsciiPrefix))
+
+typedef enum {
+    QCAMERA_NOTIFY_CALLBACK,
+    QCAMERA_DATA_CALLBACK,
+    QCAMERA_DATA_TIMESTAMP_CALLBACK,
+    QCAMERA_DATA_SNAPSHOT_CALLBACK
+} qcamera_callback_type_m;
+
+typedef void (*camera_release_callback)(void *user_data,
+                                        void *cookie,
+                                        int32_t cb_status);
+typedef void (*jpeg_data_callback)(int32_t msg_type,
+        const camera_memory_t *data, unsigned int index,
+        camera_frame_metadata_t *metadata, void *user,
+        uint32_t frame_idx, camera_release_callback release_cb,
+        void *release_cookie, void *release_data);
+
+typedef struct {
+    qcamera_callback_type_m  cb_type;    // event type
+    int32_t                  msg_type;   // msg type
+    int32_t                  ext1;       // extended parameter
+    int32_t                  ext2;       // extended parameter
+    camera_memory_t *        data;       // ptr to data memory struct
+    unsigned int             index;      // index of the buf in the whole buffer
+    int64_t                  timestamp;  // buffer timestamp
+    camera_frame_metadata_t *metadata;   // meta data
+    void                    *user_data;  // any data needs to be released after callback
+    void                    *cookie;     // release callback cookie
+    camera_release_callback  release_cb; // release callback
+    uint32_t                 frame_index;  // frame index for the buffer
+} qcamera_callback_argm_t;
+
+class QCameraCbNotifier {
+public:
+    QCameraCbNotifier(QCamera2HardwareInterface *parent) :
+                          mNotifyCb (NULL),
+                          mDataCb (NULL),
+                          mDataCbTimestamp (NULL),
+                          mCallbackCookie (NULL),
+                          mJpegCb(NULL),
+                          mJpegCallbackCookie(NULL),
+                          mParent (parent),
+                          mDataQ(releaseNotifications, this),
+                          mActive(false){}
+
+    virtual ~QCameraCbNotifier();
+
+    virtual int32_t notifyCallback(qcamera_callback_argm_t &cbArgs);
+    virtual void setCallbacks(camera_notify_callback notifyCb,
+                              camera_data_callback dataCb,
+                              camera_data_timestamp_callback dataCbTimestamp,
+                              void *callbackCookie);
+    virtual void setJpegCallBacks(
+            jpeg_data_callback jpegCb, void *callbackCookie);
+    virtual int32_t startSnapshots();
+    virtual void stopSnapshots();
+    virtual void exit();
+    static void * cbNotifyRoutine(void * data);
+    static void releaseNotifications(void *data, void *user_data);
+    static bool matchSnapshotNotifications(void *data, void *user_data);
+    static bool matchPreviewNotifications(void *data, void *user_data);
+    static bool matchTimestampNotifications(void *data, void *user_data);
+    virtual int32_t flushPreviewNotifications();
+    virtual int32_t flushVideoNotifications();
+private:
+
+    camera_notify_callback         mNotifyCb;
+    camera_data_callback           mDataCb;
+    camera_data_timestamp_callback mDataCbTimestamp;
+    void                          *mCallbackCookie;
+    jpeg_data_callback             mJpegCb;
+    void                          *mJpegCallbackCookie;
+    QCamera2HardwareInterface     *mParent;
+
+    QCameraQueue     mDataQ;
+    QCameraCmdThread mProcTh;
+    bool             mActive;
+};
+class QCamera2HardwareInterface : public QCameraAllocator,
+        public QCameraThermalCallback, public QCameraAdjustFPS
+{
+public:
+    /* static variable and functions accessed by camera service */
+    static camera_device_ops_t mCameraOps;
+
+    static int set_preview_window(struct camera_device *,
+        struct preview_stream_ops *window);
+    static void set_CallBacks(struct camera_device *,
+        camera_notify_callback notify_cb,
+        camera_data_callback data_cb,
+        camera_data_timestamp_callback data_cb_timestamp,
+        camera_request_memory get_memory,
+        void *user);
+    static void enable_msg_type(struct camera_device *, int32_t msg_type);
+    static void disable_msg_type(struct camera_device *, int32_t msg_type);
+    static int msg_type_enabled(struct camera_device *, int32_t msg_type);
+    static int start_preview(struct camera_device *);
+    static void stop_preview(struct camera_device *);
+    static int preview_enabled(struct camera_device *);
+    static int store_meta_data_in_buffers(struct camera_device *, int enable);
+    static int restart_start_preview(struct camera_device *);
+    static int restart_stop_preview(struct camera_device *);
+    static int pre_start_recording(struct camera_device *);
+    static int start_recording(struct camera_device *);
+    static void stop_recording(struct camera_device *);
+    static int recording_enabled(struct camera_device *);
+    static void release_recording_frame(struct camera_device *, const void *opaque);
+    static int auto_focus(struct camera_device *);
+    static int cancel_auto_focus(struct camera_device *);
+    static int pre_take_picture(struct camera_device *);
+    static int take_picture(struct camera_device *);
+    int takeLiveSnapshot_internal();
+    int cancelLiveSnapshot_internal();
+    int takeBackendPic_internal(bool *JpegMemOpt, char *raw_format);
+    void clearIntPendingEvents();
+    void checkIntPicPending(bool JpegMemOpt, char *raw_format);
+    static int cancel_picture(struct camera_device *);
+    static int set_parameters(struct camera_device *, const char *parms);
+    static int stop_after_set_params(struct camera_device *);
+    static int commit_params(struct camera_device *);
+    static int restart_after_set_params(struct camera_device *);
+    static char* get_parameters(struct camera_device *);
+    static void put_parameters(struct camera_device *, char *);
+    static int send_command(struct camera_device *,
+              int32_t cmd, int32_t arg1, int32_t arg2);
+    static int send_command_restart(struct camera_device *,
+            int32_t cmd, int32_t arg1, int32_t arg2);
+    static void release(struct camera_device *);
+    static int dump(struct camera_device *, int fd);
+    static int close_camera_device(hw_device_t *);
+
+    static int register_face_image(struct camera_device *,
+                                   void *img_ptr,
+                                   cam_pp_offline_src_config_t *config);
+    static int prepare_preview(struct camera_device *);
+    static int prepare_snapshot(struct camera_device *device);
+
+public:
+    QCamera2HardwareInterface(uint32_t cameraId);
+    virtual ~QCamera2HardwareInterface();
+    int openCamera(struct hw_device_t **hw_device);
+
+    // Dual camera specific oprations
+    int bundleRelatedCameras(bool syncOn,
+            uint32_t related_sensor_session_id);
+    int getCameraSessionId(uint32_t* session_id);
+    const cam_sync_related_sensors_event_info_t* getRelatedCamSyncInfo(
+            void);
+    int32_t setRelatedCamSyncInfo(
+            cam_sync_related_sensors_event_info_t* info);
+    bool isFrameSyncEnabled(void);
+    int32_t setFrameSyncEnabled(bool enable);
+    int32_t setMpoComposition(bool enable);
+    bool getMpoComposition(void);
+    bool getRecordingHintValue(void);
+    int32_t setRecordingHintValue(int32_t value);
+    bool isPreviewRestartNeeded(void) { return mPreviewRestartNeeded; };
+    static int getCapabilities(uint32_t cameraId,
+            struct camera_info *info, cam_sync_type_t *cam_type);
+    static int initCapabilities(uint32_t cameraId, mm_camera_vtbl_t *cameraHandle);
+    cam_capability_t *getCamHalCapabilities();
+
+    // Implementation of QCameraAllocator
+    virtual QCameraMemory *allocateStreamBuf(cam_stream_type_t stream_type,
+            size_t size, int stride, int scanline, uint8_t &bufferCnt);
+    virtual int32_t allocateMoreStreamBuf(QCameraMemory *mem_obj,
+            size_t size, uint8_t &bufferCnt);
+
+    virtual QCameraHeapMemory *allocateStreamInfoBuf(cam_stream_type_t stream_type);
+    virtual QCameraHeapMemory *allocateMiscBuf(cam_stream_info_t *streamInfo);
+    virtual QCameraMemory *allocateStreamUserBuf(cam_stream_info_t *streamInfo);
+    virtual void waitForDeferredAlloc(cam_stream_type_t stream_type);
+
+    // Implementation of QCameraThermalCallback
+    virtual int thermalEvtHandle(qcamera_thermal_level_enum_t *level,
+            void *userdata, void *data);
+
+    virtual int recalcFPSRange(int &minFPS, int &maxFPS,
+            const float &minVideoFPS, const float &maxVideoFPS,
+            cam_fps_range_t &adjustedRange);
+
+    friend class QCameraStateMachine;
+    friend class QCameraPostProcessor;
+    friend class QCameraCbNotifier;
+    friend class QCameraMuxer;
+
+    void setJpegCallBacks(jpeg_data_callback jpegCb,
+            void *callbackCookie);
+    int32_t initJpegHandle();
+    int32_t deinitJpegHandle();
+    int32_t setJpegHandleInfo(mm_jpeg_ops_t *ops,
+            mm_jpeg_mpo_ops_t *mpo_ops, uint32_t pJpegClientHandle);
+    int32_t getJpegHandleInfo(mm_jpeg_ops_t *ops,
+            mm_jpeg_mpo_ops_t *mpo_ops, uint32_t *pJpegClientHandle);
+    uint32_t getCameraId() { return mCameraId; };
+    bool bLiveSnapshot;
+private:
+    int setPreviewWindow(struct preview_stream_ops *window);
+    int setCallBacks(
+        camera_notify_callback notify_cb,
+        camera_data_callback data_cb,
+        camera_data_timestamp_callback data_cb_timestamp,
+        camera_request_memory get_memory,
+        void *user);
+    int enableMsgType(int32_t msg_type);
+    int disableMsgType(int32_t msg_type);
+    int msgTypeEnabled(int32_t msg_type);
+    int msgTypeEnabledWithLock(int32_t msg_type);
+    int startPreview();
+    int stopPreview();
+    int storeMetaDataInBuffers(int enable);
+    int preStartRecording();
+    int startRecording();
+    int stopRecording();
+    int releaseRecordingFrame(const void *opaque);
+    int autoFocus();
+    int cancelAutoFocus();
+    int preTakePicture();
+    int takePicture();
+    int stopCaptureChannel(bool destroy);
+    int cancelPicture();
+    int takeLiveSnapshot();
+    int takePictureInternal();
+    int cancelLiveSnapshot();
+    char* getParameters() {return mParameters.getParameters(); }
+    int putParameters(char *);
+    int sendCommand(int32_t cmd, int32_t &arg1, int32_t &arg2);
+    int release();
+    int dump(int fd);
+    int registerFaceImage(void *img_ptr,
+                          cam_pp_offline_src_config_t *config,
+                          int32_t &faceID);
+    int32_t longShot();
+
+    uint32_t deferPPInit();
+    int openCamera();
+    int closeCamera();
+
+    int processAPI(qcamera_sm_evt_enum_t api, void *api_payload);
+    int processEvt(qcamera_sm_evt_enum_t evt, void *evt_payload);
+    int processSyncEvt(qcamera_sm_evt_enum_t evt, void *evt_payload);
+    void lockAPI();
+    void waitAPIResult(qcamera_sm_evt_enum_t api_evt, qcamera_api_result_t *apiResult);
+    void unlockAPI();
+    void signalAPIResult(qcamera_api_result_t *result);
+    void signalEvtResult(qcamera_api_result_t *result);
+
+    int calcThermalLevel(qcamera_thermal_level_enum_t level,
+            const int minFPSi, const int maxFPSi,
+            const float &minVideoFPS, const float &maxVideoFPS,
+            cam_fps_range_t &adjustedRange,
+            enum msm_vfe_frame_skip_pattern &skipPattern);
+    int updateThermalLevel(void *level);
+
+    // update entris to set parameters and check if restart is needed
+    int updateParameters(const char *parms, bool &needRestart);
+    // send request to server to set parameters
+    int commitParameterChanges();
+
+    bool isCaptureShutterEnabled();
+    bool needDebugFps();
+    bool isRegularCapture();
+    bool isCACEnabled();
+    bool is4k2kResolution(cam_dimension_t* resolution);
+    bool isPreviewRestartEnabled();
+    bool needReprocess();
+    bool needRotationReprocess();
+    void debugShowVideoFPS();
+    void debugShowPreviewFPS();
+    void dumpJpegToFile(const void *data, size_t size, uint32_t index);
+    void dumpFrameToFile(QCameraStream *stream,
+            mm_camera_buf_def_t *frame, uint32_t dump_type, const char *misc = NULL);
+    void dumpMetadataToFile(QCameraStream *stream,
+                            mm_camera_buf_def_t *frame,char *type);
+    void releaseSuperBuf(mm_camera_super_buf_t *super_buf);
+    void playShutter();
+    void getThumbnailSize(cam_dimension_t &dim);
+    uint32_t getJpegQuality();
+    QCameraExif *getExifData();
+    cam_sensor_t getSensorType();
+    bool isLowPowerMode();
+    nsecs_t getBootToMonoTimeOffset();
+
+    int32_t processAutoFocusEvent(cam_auto_focus_data_t &focus_data);
+    int32_t processZoomEvent(cam_crop_data_t &crop_info);
+    int32_t processPrepSnapshotDoneEvent(cam_prep_snapshot_state_t prep_snapshot_state);
+    int32_t processASDUpdate(cam_asd_decision_t asd_decision);
+    int32_t processJpegNotify(qcamera_jpeg_evt_payload_t *jpeg_job);
+    int32_t processHDRData(cam_asd_hdr_scene_data_t hdr_scene);
+    int32_t processRetroAECUnlock();
+    int32_t processZSLCaptureDone();
+    int32_t processSceneData(cam_scene_mode_type scene);
+    int32_t transAwbMetaToParams(cam_awb_params_t &awb_params);
+    int32_t processFocusPositionInfo(cam_focus_pos_info_t &cur_pos_info);
+    int32_t processAEInfo(cam_3a_params_t &ae_params);
+
+    int32_t sendEvtNotify(int32_t msg_type, int32_t ext1, int32_t ext2);
+    int32_t sendDataNotify(int32_t msg_type,
+            camera_memory_t *data,
+            uint8_t index,
+            camera_frame_metadata_t *metadata,
+            uint32_t frame_idx);
+
+    int32_t sendPreviewCallback(QCameraStream *stream,
+            QCameraMemory *memory, uint32_t idx);
+    int32_t selectScene(QCameraChannel *pChannel,
+            mm_camera_super_buf_t *recvd_frame);
+
+    int32_t addChannel(qcamera_ch_type_enum_t ch_type);
+    int32_t startChannel(qcamera_ch_type_enum_t ch_type);
+    int32_t stopChannel(qcamera_ch_type_enum_t ch_type);
+    int32_t delChannel(qcamera_ch_type_enum_t ch_type, bool destroy = true);
+    int32_t addPreviewChannel();
+    int32_t addSnapshotChannel();
+    int32_t addVideoChannel();
+    int32_t addZSLChannel();
+    int32_t addCaptureChannel();
+    int32_t addRawChannel();
+    int32_t addMetaDataChannel();
+    int32_t addAnalysisChannel();
+    QCameraReprocessChannel *addReprocChannel(QCameraChannel *pInputChannel,
+            int8_t cur_channel_index = 0);
+    QCameraReprocessChannel *addOfflineReprocChannel(
+                                                cam_pp_offline_src_config_t &img_config,
+                                                cam_pp_feature_config_t &pp_feature,
+                                                stream_cb_routine stream_cb,
+                                                void *userdata);
+    int32_t addCallbackChannel();
+
+    int32_t addStreamToChannel(QCameraChannel *pChannel,
+                               cam_stream_type_t streamType,
+                               stream_cb_routine streamCB,
+                               void *userData);
+    int32_t preparePreview();
+    void unpreparePreview();
+    int32_t prepareRawStream(QCameraChannel *pChannel);
+    QCameraChannel *getChannelByHandle(uint32_t channelHandle);
+    mm_camera_buf_def_t *getSnapshotFrame(mm_camera_super_buf_t *recvd_frame);
+    int32_t processFaceDetectionResult(cam_faces_data_t *fd_data);
+    bool needPreviewFDCallback(uint8_t num_faces);
+    int32_t processHistogramStats(cam_hist_stats_t &stats_data);
+    int32_t setHistogram(bool histogram_en);
+    int32_t setFaceDetection(bool enabled);
+    int32_t prepareHardwareForSnapshot(int32_t afNeeded);
+    bool needProcessPreviewFrame(uint32_t frameID);
+    bool needSendPreviewCallback();
+    bool isNoDisplayMode() {return mParameters.isNoDisplayMode();};
+    bool isZSLMode() {return mParameters.isZSLMode();};
+    bool isRdiMode() {return mParameters.isRdiMode();};
+    uint8_t numOfSnapshotsExpected() {
+        return mParameters.getNumOfSnapshots();};
+    bool isSecureMode() {return mParameters.isSecureMode();};
+    bool isLongshotEnabled() { return mLongshotEnabled; };
+    bool isHFRMode() {return mParameters.isHfrMode();};
+    bool isLiveSnapshot() {return m_stateMachine.isRecording();};
+    void setRetroPicture(bool enable) { bRetroPicture = enable; };
+    bool isRetroPicture() {return bRetroPicture; };
+    bool isHDRMode() {return mParameters.isHDREnabled();};
+    uint8_t getBufNumRequired(cam_stream_type_t stream_type);
+    bool needFDMetadata(qcamera_ch_type_enum_t channel_type);
+    int32_t configureOnlineRotation(QCameraChannel &ch);
+    int32_t declareSnapshotStreams();
+    int32_t unconfigureAdvancedCapture();
+    int32_t configureAdvancedCapture();
+    int32_t configureAFBracketing(bool enable = true);
+    int32_t configureHDRBracketing();
+    int32_t stopAdvancedCapture(QCameraPicChannel *pChannel);
+    int32_t startAdvancedCapture(QCameraPicChannel *pChannel);
+    int32_t configureOptiZoom();
+    int32_t configureStillMore();
+    int32_t configureAEBracketing();
+    int32_t updatePostPreviewParameters();
+    inline void setOutputImageCount(uint32_t aCount) {mOutputCount = aCount;}
+    inline uint32_t getOutputImageCount() {return mOutputCount;}
+    bool processUFDumps(qcamera_jpeg_evt_payload_t *evt);
+    void captureDone();
+    int32_t updateMetadata(metadata_buffer_t *pMetaData);
+    void fillFacesData(cam_faces_data_t &faces_data, metadata_buffer_t *metadata);
+
+    int32_t getPPConfig(cam_pp_feature_config_t &pp_config,
+            int8_t curIndex = 0, bool multipass = FALSE);
+    virtual uint32_t scheduleBackgroundTask(BackgroundTask* bgTask);
+    virtual int32_t waitForBackgroundTask(uint32_t &taskId);
+    bool needDeferred(cam_stream_type_t stream_type);
+    static void camEvtHandle(uint32_t camera_handle,
+                          mm_camera_event_t *evt,
+                          void *user_data);
+    static void jpegEvtHandle(jpeg_job_status_t status,
+                              uint32_t client_hdl,
+                              uint32_t jobId,
+                              mm_jpeg_output_t *p_buf,
+                              void *userdata);
+
+    static void *evtNotifyRoutine(void *data);
+
+    // functions for different data notify cb
+    static void zsl_channel_cb(mm_camera_super_buf_t *recvd_frame, void *userdata);
+    static void capture_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
+                                           void *userdata);
+    static void postproc_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
+                                            void *userdata);
+    static void rdi_mode_stream_cb_routine(mm_camera_super_buf_t *frame,
+                                              QCameraStream *stream,
+                                              void *userdata);
+    static void nodisplay_preview_stream_cb_routine(mm_camera_super_buf_t *frame,
+                                                    QCameraStream *stream,
+                                                    void *userdata);
+    static void preview_stream_cb_routine(mm_camera_super_buf_t *frame,
+                                          QCameraStream *stream,
+                                          void *userdata);
+    static void synchronous_stream_cb_routine(mm_camera_super_buf_t *frame,
+            QCameraStream *stream, void *userdata);
+    static void postview_stream_cb_routine(mm_camera_super_buf_t *frame,
+                                           QCameraStream *stream,
+                                           void *userdata);
+    static void video_stream_cb_routine(mm_camera_super_buf_t *frame,
+                                        QCameraStream *stream,
+                                        void *userdata);
+    static void snapshot_channel_cb_routine(mm_camera_super_buf_t *frame,
+           void *userdata);
+    static void raw_channel_cb_routine(mm_camera_super_buf_t *frame,
+            void *userdata);
+    static void raw_stream_cb_routine(mm_camera_super_buf_t *frame,
+                                      QCameraStream *stream,
+                                      void *userdata);
+    static void preview_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
+                                              QCameraStream * stream,
+                                              void * userdata);
+    static void snapshot_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
+                                               QCameraStream * stream,
+                                               void * userdata);
+    static void metadata_stream_cb_routine(mm_camera_super_buf_t *frame,
+                                           QCameraStream *stream,
+                                           void *userdata);
+    static void callback_stream_cb_routine(mm_camera_super_buf_t *frame,
+            QCameraStream *stream, void *userdata);
+    static void reprocess_stream_cb_routine(mm_camera_super_buf_t *frame,
+                                            QCameraStream *stream,
+                                            void *userdata);
+
+    static void releaseCameraMemory(void *data,
+                                    void *cookie,
+                                    int32_t cbStatus);
+    static void returnStreamBuffer(void *data,
+                                   void *cookie,
+                                   int32_t cbStatus);
+    static void getLogLevel();
+
+    int32_t startRAWChannel(QCameraChannel *pChannel);
+    int32_t stopRAWChannel();
+
+    inline bool getNeedRestart() {return m_bNeedRestart;}
+    inline void setNeedRestart(bool needRestart) {m_bNeedRestart = needRestart;}
+
+    /*Start display skip. Skip starts after
+    skipCnt number of frames from current frame*/
+    void setDisplaySkip(bool enabled, uint8_t skipCnt = 0);
+    /*Caller can specify range frameID to skip.
+    if end is 0, all the frames after start will be skipped*/
+    void setDisplayFrameSkip(uint32_t start = 0, uint32_t end = 0);
+    /*Verifies if frameId is valid to skip*/
+    bool isDisplayFrameToSkip(uint32_t frameId);
+
+private:
+    camera_device_t   mCameraDevice;
+    uint32_t          mCameraId;
+    mm_camera_vtbl_t *mCameraHandle;
+    bool mCameraOpened;
+
+    cam_jpeg_metadata_t mJpegMetadata;
+    bool m_bRelCamCalibValid;
+
+    preview_stream_ops_t *mPreviewWindow;
+    QCameraParametersIntf mParameters;
+    int32_t               mMsgEnabled;
+    int                   mStoreMetaDataInFrame;
+
+    camera_notify_callback         mNotifyCb;
+    camera_data_callback           mDataCb;
+    camera_data_timestamp_callback mDataCbTimestamp;
+    camera_request_memory          mGetMemory;
+    jpeg_data_callback             mJpegCb;
+    void                          *mCallbackCookie;
+    void                          *mJpegCallbackCookie;
+    bool                           m_bMpoEnabled;
+
+    QCameraStateMachine m_stateMachine;   // state machine
+    bool m_smThreadActive;
+    QCameraPostProcessor m_postprocessor; // post processor
+    QCameraThermalAdapter &m_thermalAdapter;
+    QCameraCbNotifier m_cbNotifier;
+    QCameraPerfLock m_perfLock;
+    pthread_mutex_t m_lock;
+    pthread_cond_t m_cond;
+    api_result_list *m_apiResultList;
+    QCameraMemoryPool m_memoryPool;
+
+    pthread_mutex_t m_evtLock;
+    pthread_cond_t m_evtCond;
+    qcamera_api_result_t m_evtResult;
+
+
+    QCameraChannel *m_channels[QCAMERA_CH_TYPE_MAX]; // array holding channel ptr
+
+    bool m_bPreviewStarted;             //flag indicates first preview frame callback is received
+    bool m_bRecordStarted;             //flag indicates Recording is started for first time
+
+    // Signifies if ZSL Retro Snapshots are enabled
+    bool bRetroPicture;
+    // Signifies AEC locked during zsl snapshots
+    bool m_bLedAfAecLock;
+    cam_af_state_t m_currentFocusState;
+
+    uint32_t mDumpFrmCnt;  // frame dump count
+    uint32_t mDumpSkipCnt; // frame skip count
+    mm_jpeg_exif_params_t mExifParams;
+    qcamera_thermal_level_enum_t mThermalLevel;
+    bool mActiveAF;
+    bool m_HDRSceneEnabled;
+    bool mLongshotEnabled;
+
+    pthread_t mLiveSnapshotThread;
+    pthread_t mIntPicThread;
+    bool mFlashNeeded;
+    uint32_t mDeviceRotation;
+    uint32_t mCaptureRotation;
+    uint32_t mJpegExifRotation;
+    bool mUseJpegExifRotation;
+    bool mIs3ALocked;
+    bool mPrepSnapRun;
+    int32_t mZoomLevel;
+    // Flag to indicate whether preview restart needed (for dual camera mode)
+    bool mPreviewRestartNeeded;
+
+    int mVFrameCount;
+    int mVLastFrameCount;
+    nsecs_t mVLastFpsTime;
+    double mVFps;
+    int mPFrameCount;
+    int mPLastFrameCount;
+    nsecs_t mPLastFpsTime;
+    double mPFps;
+    uint8_t mInstantAecFrameCount;
+
+    //eztune variables for communication with eztune server at backend
+    bool m_bIntJpegEvtPending;
+    bool m_bIntRawEvtPending;
+    char m_BackendFileName[QCAMERA_MAX_FILEPATH_LENGTH];
+    size_t mBackendFileSize;
+    pthread_mutex_t m_int_lock;
+    pthread_cond_t m_int_cond;
+
+    enum DeferredWorkCmd {
+        CMD_DEF_ALLOCATE_BUFF,
+        CMD_DEF_PPROC_START,
+        CMD_DEF_PPROC_INIT,
+        CMD_DEF_METADATA_ALLOC,
+        CMD_DEF_CREATE_JPEG_SESSION,
+        CMD_DEF_PARAM_ALLOC,
+        CMD_DEF_PARAM_INIT,
+        CMD_DEF_GENERIC,
+        CMD_DEF_MAX
+    };
+
+    typedef struct {
+        QCameraChannel *ch;
+        cam_stream_type_t type;
+    } DeferAllocBuffArgs;
+
+    typedef struct {
+        uint8_t bufferCnt;
+        size_t size;
+    } DeferMetadataAllocArgs;
+
+    typedef struct {
+        jpeg_encode_callback_t jpeg_cb;
+        void *user_data;
+    } DeferPProcInitArgs;
+
+    typedef union {
+        DeferAllocBuffArgs allocArgs;
+        QCameraChannel *pprocArgs;
+        DeferMetadataAllocArgs metadataAllocArgs;
+        DeferPProcInitArgs pprocInitArgs;
+        BackgroundTask *genericArgs;
+    } DeferWorkArgs;
+
+    typedef struct {
+        uint32_t mDefJobId;
+
+        //Job status is needed to check job was successful or failed
+        //Error code when job was not sucessful and there is error
+        //0 when is initialized.
+        //for sucessfull job, do not need to maintain job status
+        int32_t mDefJobStatus;
+    } DefOngoingJob;
+
+    DefOngoingJob mDefOngoingJobs[MAX_ONGOING_JOBS];
+
+    struct DefWork
+    {
+        DefWork(DeferredWorkCmd cmd_,
+                 uint32_t id_,
+                 DeferWorkArgs args_)
+            : cmd(cmd_),
+              id(id_),
+              args(args_){};
+
+        DeferredWorkCmd cmd;
+        uint32_t id;
+        DeferWorkArgs args;
+    };
+
+    QCameraCmdThread      mDeferredWorkThread;
+    QCameraQueue          mCmdQueue;
+
+    Mutex                 mDefLock;
+    Condition             mDefCond;
+
+    uint32_t queueDeferredWork(DeferredWorkCmd cmd,
+                               DeferWorkArgs args);
+    uint32_t dequeueDeferredWork(DefWork* dw, int32_t jobStatus);
+    int32_t waitDeferredWork(uint32_t &job_id);
+    static void *deferredWorkRoutine(void *obj);
+    bool checkDeferredWork(uint32_t &job_id);
+    int32_t getDefJobStatus(uint32_t &job_id);
+
+    uint32_t mReprocJob;
+    uint32_t mJpegJob;
+    uint32_t mMetadataAllocJob;
+    uint32_t mInitPProcJob;
+    uint32_t mParamAllocJob;
+    uint32_t mParamInitJob;
+    uint32_t mOutputCount;
+    uint32_t mInputCount;
+    bool mAdvancedCaptureConfigured;
+    bool mHDRBracketingEnabled;
+    int32_t mNumPreviewFaces;
+    // Jpeg Handle shared between HWI instances
+    mm_jpeg_ops_t         mJpegHandle;
+    // MPO handle shared between HWI instances
+    // this is needed for MPO composition of related
+    // cam images
+    mm_jpeg_mpo_ops_t     mJpegMpoHandle;
+    uint32_t              mJpegClientHandle;
+    bool                  mJpegHandleOwner;
+   //ts add for makeup
+#ifdef TARGET_TS_MAKEUP
+    TSRect mFaceRect;
+    bool TsMakeupProcess_Preview(mm_camera_buf_def_t *pFrame,QCameraStream * pStream);
+    bool TsMakeupProcess_Snapshot(mm_camera_buf_def_t *pFrame,QCameraStream * pStream);
+    bool TsMakeupProcess(mm_camera_buf_def_t *frame,QCameraStream * stream,TSRect& faceRect);
+#endif
+    QCameraMemory *mMetadataMem;
+    QCameraVideoMemory *mVideoMem;
+
+    static uint32_t sNextJobId;
+
+    //Gralloc memory details
+    pthread_mutex_t mGrallocLock;
+    uint8_t mEnqueuedBuffers;
+    bool mCACDoneReceived;
+
+    //GPU library to read buffer padding details.
+    void *lib_surface_utils;
+    int (*LINK_get_surface_pixel_alignment)();
+    uint32_t mSurfaceStridePadding;
+
+    //QCamera Display Object
+    QCameraDisplay mCameraDisplay;
+
+    bool m_bNeedRestart;
+    Mutex mMapLock;
+    Condition mMapCond;
+
+    //Used to decide the next frameID to be skipped
+    uint32_t mLastPreviewFrameID;
+    //FrameID to start frame skip.
+    uint32_t mFrameSkipStart;
+    /*FrameID to stop frameskip. If this is not set,
+    all frames are skipped till we set this*/
+    uint32_t mFrameSkipEnd;
+    //The offset between BOOTTIME and MONOTONIC timestamps
+    nsecs_t mBootToMonoTimestampOffset;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA2HARDWAREINTERFACE_H__ */
diff --git a/msmcobalt/QCamera2/HAL/QCamera2HWICallbacks.cpp b/msmcobalt/QCamera2/HAL/QCamera2HWICallbacks.cpp
new file mode 100644
index 0000000..93a5271
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/QCamera2HWICallbacks.cpp
@@ -0,0 +1,3529 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCamera2HWI"
+
+// System dependencies
+#include <fcntl.h>
+#include <stdio.h>
+#include <stdlib.h>
+#define STAT_H <SYSTEM_HEADER_PREFIX/stat.h>
+#include STAT_H
+#include <utils/Errors.h>
+
+// Camera dependencies
+#include "QCamera2HWI.h"
+#include "QCameraTrace.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : zsl_channel_cb
+ *
+ * DESCRIPTION: helper function to handle ZSL superbuf callback directly from
+ *              mm-camera-interface
+ *
+ * PARAMETERS :
+ *   @recvd_frame : received super buffer
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : recvd_frame will be released after this call by caller, so if
+ *             async operation needed for recvd_frame, it's our responsibility
+ *             to save a copy for this variable to be used later.
+ *==========================================================================*/
+void QCamera2HardwareInterface::zsl_channel_cb(mm_camera_super_buf_t *recvd_frame,
+                                               void *userdata)
+{
+    ATRACE_CALL();
+    LOGH("[KPI Perf]: E");
+    char value[PROPERTY_VALUE_MAX];
+    bool dump_raw = false;
+    bool log_matching = false;
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != recvd_frame->camera_handle){
+       LOGE("camera obj not valid");
+       return;
+    }
+
+    QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_ZSL];
+    if (pChannel == NULL ||
+        pChannel->getMyHandle() != recvd_frame->ch_id) {
+        LOGE("ZSL channel doesn't exist, return here");
+        return;
+    }
+
+    if(pme->mParameters.isSceneSelectionEnabled() &&
+            !pme->m_stateMachine.isCaptureRunning()) {
+        pme->selectScene(pChannel, recvd_frame);
+        pChannel->bufDone(recvd_frame);
+        return;
+    }
+
+    LOGD("Frame CB Unlock : %d, is AEC Locked: %d",
+           recvd_frame->bUnlockAEC, pme->m_bLedAfAecLock);
+    if(recvd_frame->bUnlockAEC && pme->m_bLedAfAecLock) {
+        qcamera_sm_internal_evt_payload_t *payload =
+                (qcamera_sm_internal_evt_payload_t *)malloc(
+                        sizeof(qcamera_sm_internal_evt_payload_t));
+        if (NULL != payload) {
+            memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+            payload->evt_type = QCAMERA_INTERNAL_EVT_RETRO_AEC_UNLOCK;
+            int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+            if (rc != NO_ERROR) {
+                LOGE("processEvt for retro AEC unlock failed");
+                free(payload);
+                payload = NULL;
+            }
+        } else {
+            LOGE("No memory for retro AEC event");
+        }
+    }
+
+    // Check if retro-active frames are completed and camera is
+    // ready to go ahead with LED estimation for regular frames
+    if (recvd_frame->bReadyForPrepareSnapshot) {
+        // Send an event
+        LOGD("Ready for Prepare Snapshot, signal ");
+        qcamera_sm_internal_evt_payload_t *payload =
+                    (qcamera_sm_internal_evt_payload_t *)malloc(
+                    sizeof(qcamera_sm_internal_evt_payload_t));
+        if (NULL != payload) {
+            memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+            payload->evt_type = QCAMERA_INTERNAL_EVT_READY_FOR_SNAPSHOT;
+            int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+            if (rc != NO_ERROR) {
+                LOGW("processEvt Ready for Snaphot failed");
+                free(payload);
+                payload = NULL;
+            }
+        } else {
+            LOGE("No memory for prepare signal event detect"
+                    " qcamera_sm_internal_evt_payload_t");
+        }
+    }
+
+    /* indicate the parent that capture is done */
+    pme->captureDone();
+
+    // save a copy for the superbuf
+    mm_camera_super_buf_t* frame =
+               (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+    if (frame == NULL) {
+        LOGE("Error allocating memory to save received_frame structure.");
+        pChannel->bufDone(recvd_frame);
+        return;
+    }
+    *frame = *recvd_frame;
+
+    if (recvd_frame->num_bufs > 0) {
+        LOGI("[KPI Perf]: superbuf frame_idx %d",
+            recvd_frame->bufs[0]->frame_idx);
+    }
+
+    // DUMP RAW if available
+    property_get("persist.camera.zsl_raw", value, "0");
+    dump_raw = atoi(value) > 0 ? true : false;
+    if (dump_raw) {
+        for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
+            if (recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_RAW) {
+                mm_camera_buf_def_t * raw_frame = recvd_frame->bufs[i];
+                QCameraStream *pStream = pChannel->getStreamByHandle(raw_frame->stream_id);
+                if (NULL != pStream) {
+                    pme->dumpFrameToFile(pStream, raw_frame, QCAMERA_DUMP_FRM_RAW);
+                }
+                break;
+            }
+        }
+    }
+
+    for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
+        if (recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_SNAPSHOT) {
+            mm_camera_buf_def_t * yuv_frame = recvd_frame->bufs[i];
+            QCameraStream *pStream = pChannel->getStreamByHandle(yuv_frame->stream_id);
+            if (NULL != pStream) {
+                pme->dumpFrameToFile(pStream, yuv_frame, QCAMERA_DUMP_FRM_INPUT_REPROCESS);
+            }
+            break;
+        }
+    }
+    //
+    // whether need FD Metadata along with Snapshot frame in ZSL mode
+    if(pme->needFDMetadata(QCAMERA_CH_TYPE_ZSL)){
+        //Need Face Detection result for snapshot frames
+        //Get the Meta Data frames
+        mm_camera_buf_def_t *pMetaFrame = NULL;
+        for (uint32_t i = 0; i < frame->num_bufs; i++) {
+            QCameraStream *pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
+            if (pStream != NULL) {
+                if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+                    pMetaFrame = frame->bufs[i]; //find the metadata
+                    break;
+                }
+            }
+        }
+
+        if(pMetaFrame != NULL){
+            metadata_buffer_t *pMetaData = (metadata_buffer_t *)pMetaFrame->buffer;
+            //send the face detection info
+            cam_faces_data_t faces_data;
+            pme->fillFacesData(faces_data, pMetaData);
+            //HARD CODE here before MCT can support
+            faces_data.detection_data.fd_type = QCAMERA_FD_SNAPSHOT;
+
+            qcamera_sm_internal_evt_payload_t *payload =
+                (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+            if (NULL != payload) {
+                memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+                payload->evt_type = QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT;
+                payload->faces_data = faces_data;
+                int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+                if (rc != NO_ERROR) {
+                    LOGW("processEvt face_detection_result failed");
+                    free(payload);
+                    payload = NULL;
+                }
+            } else {
+                LOGE("No memory for face_detection_result qcamera_sm_internal_evt_payload_t");
+            }
+        }
+    }
+
+    property_get("persist.camera.dumpmetadata", value, "0");
+    int32_t enabled = atoi(value);
+    if (enabled) {
+        mm_camera_buf_def_t *pMetaFrame = NULL;
+        QCameraStream *pStream = NULL;
+        for (uint32_t i = 0; i < frame->num_bufs; i++) {
+            pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
+            if (pStream != NULL) {
+                if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+                    pMetaFrame = frame->bufs[i];
+                    if (pMetaFrame != NULL &&
+                            ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
+                        pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "ZSL_Snapshot");
+                    }
+                    break;
+                }
+            }
+        }
+    }
+
+    property_get("persist.camera.zsl_matching", value, "0");
+    log_matching = atoi(value) > 0 ? true : false;
+    if (log_matching) {
+        LOGH("ZSL super buffer contains:");
+        QCameraStream *pStream = NULL;
+        for (uint32_t i = 0; i < frame->num_bufs; i++) {
+            pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
+            if (pStream != NULL ) {
+                LOGH("Buffer with V4L index %d frame index %d of type %d Timestamp: %ld %ld ",
+                        frame->bufs[i]->buf_idx,
+                        frame->bufs[i]->frame_idx,
+                        pStream->getMyType(),
+                        frame->bufs[i]->ts.tv_sec,
+                        frame->bufs[i]->ts.tv_nsec);
+            }
+        }
+    }
+
+    // Wait on Postproc initialization if needed
+    // then send to postprocessor
+    if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
+            (NO_ERROR != pme->m_postprocessor.processData(frame))) {
+        LOGE("Failed to trigger process data");
+        pChannel->bufDone(recvd_frame);
+        free(frame);
+        frame = NULL;
+        return;
+    }
+
+    LOGH("[KPI Perf]: X");
+}
+
+/*===========================================================================
+ * FUNCTION   : selectScene
+ *
+ * DESCRIPTION: send a preview callback when a specific selected scene is applied
+ *
+ * PARAMETERS :
+ *   @pChannel: Camera channel
+ *   @frame   : Bundled super buffer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::selectScene(QCameraChannel *pChannel,
+        mm_camera_super_buf_t *frame)
+{
+    mm_camera_buf_def_t *pMetaFrame = NULL;
+    QCameraStream *pStream = NULL;
+    int32_t rc = NO_ERROR;
+
+    if ((NULL == frame) || (NULL == pChannel)) {
+        LOGE("Invalid scene select input");
+        return BAD_VALUE;
+    }
+
+    cam_scene_mode_type selectedScene = mParameters.getSelectedScene();
+    if (CAM_SCENE_MODE_MAX == selectedScene) {
+        LOGL("No selected scene");
+        return NO_ERROR;
+    }
+
+    for (uint32_t i = 0; i < frame->num_bufs; i++) {
+        pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
+        if (pStream != NULL) {
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+                pMetaFrame = frame->bufs[i];
+                break;
+            }
+        }
+    }
+
+    if (NULL == pMetaFrame) {
+        LOGE("No metadata buffer found in scene select super buffer");
+        return NO_INIT;
+    }
+
+    metadata_buffer_t *pMetaData = (metadata_buffer_t *)pMetaFrame->buffer;
+
+    IF_META_AVAILABLE(cam_scene_mode_type, scene, CAM_INTF_META_CURRENT_SCENE, pMetaData) {
+        if ((*scene == selectedScene) &&
+                (mDataCb != NULL) &&
+                (msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0)) {
+            mm_camera_buf_def_t *preview_frame = NULL;
+            for (uint32_t i = 0; i < frame->num_bufs; i++) {
+                pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
+                if (pStream != NULL) {
+                    if (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW)) {
+                        preview_frame = frame->bufs[i];
+                        break;
+                    }
+                }
+            }
+            if (preview_frame) {
+                QCameraGrallocMemory *memory = (QCameraGrallocMemory *)preview_frame->mem_info;
+                uint32_t idx = preview_frame->buf_idx;
+                rc = sendPreviewCallback(pStream, memory, idx);
+                if (NO_ERROR != rc) {
+                    LOGE("Error triggering scene select preview callback");
+                } else {
+                    mParameters.setSelectedScene(CAM_SCENE_MODE_MAX);
+                }
+            } else {
+                LOGE("No preview buffer found in scene select super buffer");
+                return NO_INIT;
+            }
+        }
+    } else {
+        LOGE("No current scene metadata!");
+        rc = NO_INIT;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : capture_channel_cb_routine
+ *
+ * DESCRIPTION: helper function to handle snapshot superbuf callback directly from
+ *              mm-camera-interface
+ *
+ * PARAMETERS :
+ *   @recvd_frame : received super buffer
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : recvd_frame will be released after this call by caller, so if
+ *             async operation needed for recvd_frame, it's our responsibility
+ *             to save a copy for this variable to be used later.
+*==========================================================================*/
+void QCamera2HardwareInterface::capture_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
+                                                           void *userdata)
+{
+    KPI_ATRACE_CALL();
+    char value[PROPERTY_VALUE_MAX];
+    LOGH("[KPI Perf]: E PROFILE_YUV_CB_TO_HAL");
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != recvd_frame->camera_handle){
+        LOGE("camera obj not valid");
+        return;
+    }
+
+    QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_CAPTURE];
+    if (pChannel == NULL ||
+        pChannel->getMyHandle() != recvd_frame->ch_id) {
+        LOGE("Capture channel doesn't exist, return here");
+        return;
+    }
+
+    // save a copy for the superbuf
+    mm_camera_super_buf_t* frame =
+               (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+    if (frame == NULL) {
+        LOGE("Error allocating memory to save received_frame structure.");
+        pChannel->bufDone(recvd_frame);
+        return;
+    }
+    *frame = *recvd_frame;
+
+    if (recvd_frame->num_bufs > 0) {
+        LOGI("[KPI Perf]: superbuf frame_idx %d",
+                recvd_frame->bufs[0]->frame_idx);
+    }
+
+    for ( uint32_t i= 0 ; i < recvd_frame->num_bufs ; i++ ) {
+        if ( recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_SNAPSHOT ) {
+            mm_camera_buf_def_t * yuv_frame = recvd_frame->bufs[i];
+            QCameraStream *pStream = pChannel->getStreamByHandle(yuv_frame->stream_id);
+            if ( NULL != pStream ) {
+                pme->dumpFrameToFile(pStream, yuv_frame, QCAMERA_DUMP_FRM_INPUT_REPROCESS);
+            }
+            break;
+        }
+    }
+
+    property_get("persist.camera.dumpmetadata", value, "0");
+    int32_t enabled = atoi(value);
+    if (enabled) {
+        mm_camera_buf_def_t *pMetaFrame = NULL;
+        QCameraStream *pStream = NULL;
+        for (uint32_t i = 0; i < frame->num_bufs; i++) {
+            pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
+            if (pStream != NULL) {
+                if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+                    pMetaFrame = frame->bufs[i]; //find the metadata
+                    if (pMetaFrame != NULL &&
+                            ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
+                        pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "Snapshot");
+                    }
+                    break;
+                }
+            }
+        }
+    }
+
+    // Wait on Postproc initialization if needed
+    // then send to postprocessor
+    if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
+            (NO_ERROR != pme->m_postprocessor.processData(frame))) {
+        LOGE("Failed to trigger process data");
+        pChannel->bufDone(recvd_frame);
+        free(frame);
+        frame = NULL;
+        return;
+    }
+
+/* START of test register face image for face authentication */
+#ifdef QCOM_TEST_FACE_REGISTER_FACE
+    static uint8_t bRunFaceReg = 1;
+
+    if (bRunFaceReg > 0) {
+        // find snapshot frame
+        QCameraStream *main_stream = NULL;
+        mm_camera_buf_def_t *main_frame = NULL;
+        for (int i = 0; i < recvd_frame->num_bufs; i++) {
+            QCameraStream *pStream =
+                pChannel->getStreamByHandle(recvd_frame->bufs[i]->stream_id);
+            if (pStream != NULL) {
+                if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
+                    main_stream = pStream;
+                    main_frame = recvd_frame->bufs[i];
+                    break;
+                }
+            }
+        }
+        if (main_stream != NULL && main_frame != NULL) {
+            int32_t faceId = -1;
+            cam_pp_offline_src_config_t config;
+            memset(&config, 0, sizeof(cam_pp_offline_src_config_t));
+            config.num_of_bufs = 1;
+            main_stream->getFormat(config.input_fmt);
+            main_stream->getFrameDimension(config.input_dim);
+            main_stream->getFrameOffset(config.input_buf_planes.plane_info);
+            LOGH("DEBUG: registerFaceImage E");
+            int32_t rc = pme->registerFaceImage(main_frame->buffer, &config, faceId);
+            LOGH("DEBUG: registerFaceImage X, ret=%d, faceId=%d", rc, faceId);
+            bRunFaceReg = 0;
+        }
+    }
+
+#endif
+/* END of test register face image for face authentication */
+
+    LOGH("[KPI Perf]: X");
+}
+#ifdef TARGET_TS_MAKEUP
+bool QCamera2HardwareInterface::TsMakeupProcess_Preview(mm_camera_buf_def_t *pFrame,
+        QCameraStream * pStream) {
+    LOGD("begin");
+    bool bRet = false;
+    if (pStream == NULL || pFrame == NULL) {
+        bRet = false;
+        LOGH("pStream == NULL || pFrame == NULL");
+    } else {
+        bRet = TsMakeupProcess(pFrame, pStream, mFaceRect);
+    }
+    LOGD("end bRet = %d ",bRet);
+    return bRet;
+}
+
+bool QCamera2HardwareInterface::TsMakeupProcess_Snapshot(mm_camera_buf_def_t *pFrame,
+        QCameraStream * pStream) {
+    LOGD("begin");
+    bool bRet = false;
+    if (pStream == NULL || pFrame == NULL) {
+        bRet = false;
+        LOGH("pStream == NULL || pFrame == NULL");
+    } else {
+        cam_frame_len_offset_t offset;
+        memset(&offset, 0, sizeof(cam_frame_len_offset_t));
+        pStream->getFrameOffset(offset);
+
+        cam_dimension_t dim;
+        pStream->getFrameDimension(dim);
+
+        unsigned char *yBuf  = (unsigned char*)pFrame->buffer;
+        unsigned char *uvBuf = yBuf + offset.mp[0].len;
+        TSMakeupDataEx inMakeupData;
+        inMakeupData.frameWidth  = dim.width;
+        inMakeupData.frameHeight = dim.height;
+        inMakeupData.yBuf  = yBuf;
+        inMakeupData.uvBuf = uvBuf;
+        inMakeupData.yStride  = offset.mp[0].stride;
+        inMakeupData.uvStride = offset.mp[1].stride;
+        LOGD("detect begin");
+        TSHandle fd_handle = ts_detectface_create_context();
+        if (fd_handle != NULL) {
+            cam_format_t fmt;
+            pStream->getFormat(fmt);
+            int iret = ts_detectface_detectEx(fd_handle, &inMakeupData);
+            LOGD("ts_detectface_detect iret = %d",iret);
+            if (iret <= 0) {
+                bRet = false;
+            } else {
+                TSRect faceRect;
+                memset(&faceRect,-1,sizeof(TSRect));
+                iret = ts_detectface_get_face_info(fd_handle, 0, &faceRect, NULL,NULL,NULL);
+                LOGD("ts_detectface_get_face_info iret=%d,faceRect.left=%ld,"
+                        "faceRect.top=%ld,faceRect.right=%ld,faceRect.bottom=%ld"
+                        ,iret,faceRect.left,faceRect.top,faceRect.right,faceRect.bottom);
+                bRet = TsMakeupProcess(pFrame,pStream,faceRect);
+            }
+            ts_detectface_destroy_context(&fd_handle);
+            fd_handle = NULL;
+        } else {
+            LOGH("fd_handle == NULL");
+        }
+        LOGD("detect end");
+    }
+    LOGD("end bRet = %d ",bRet);
+    return bRet;
+}
+
+bool QCamera2HardwareInterface::TsMakeupProcess(mm_camera_buf_def_t *pFrame,
+        QCameraStream * pStream,TSRect& faceRect) {
+    bool bRet = false;
+    LOGD("begin");
+    if (pStream == NULL || pFrame == NULL) {
+        LOGH("pStream == NULL || pFrame == NULL ");
+        return false;
+    }
+
+    int whiteLevel, cleanLevel;
+    bool enableMakeup = (faceRect.left > -1) &&
+            (mParameters.getTsMakeupInfo(whiteLevel, cleanLevel));
+    if (enableMakeup) {
+        cam_dimension_t dim;
+        cam_frame_len_offset_t offset;
+        pStream->getFrameDimension(dim);
+        pStream->getFrameOffset(offset);
+        unsigned char *tempOriBuf = NULL;
+
+        tempOriBuf = (unsigned char*)pFrame->buffer;
+        unsigned char *yBuf = tempOriBuf;
+        unsigned char *uvBuf = tempOriBuf + offset.mp[0].len;
+        unsigned char *tmpBuf = new unsigned char[offset.frame_len];
+        if (tmpBuf == NULL) {
+            LOGH("tmpBuf == NULL ");
+            return false;
+        }
+        TSMakeupDataEx inMakeupData, outMakeupData;
+        whiteLevel =  whiteLevel <= 0 ? 0 : (whiteLevel >= 100 ? 100 : whiteLevel);
+        cleanLevel =  cleanLevel <= 0 ? 0 : (cleanLevel >= 100 ? 100 : cleanLevel);
+        inMakeupData.frameWidth = dim.width;  // NV21 Frame width  > 0
+        inMakeupData.frameHeight = dim.height; // NV21 Frame height > 0
+        inMakeupData.yBuf =  yBuf; //  Y buffer pointer
+        inMakeupData.uvBuf = uvBuf; // VU buffer pointer
+        inMakeupData.yStride  = offset.mp[0].stride;
+        inMakeupData.uvStride = offset.mp[1].stride;
+        outMakeupData.frameWidth = dim.width; // NV21 Frame width  > 0
+        outMakeupData.frameHeight = dim.height; // NV21 Frame height > 0
+        outMakeupData.yBuf =  tmpBuf; //  Y buffer pointer
+        outMakeupData.uvBuf = tmpBuf + offset.mp[0].len; // VU buffer pointer
+        outMakeupData.yStride  = offset.mp[0].stride;
+        outMakeupData.uvStride = offset.mp[1].stride;
+        LOGD("faceRect:left 2:%ld,,right:%ld,,top:%ld,,bottom:%ld,,Level:%dx%d",
+            faceRect.left,faceRect.right,faceRect.top,faceRect.bottom,cleanLevel,whiteLevel);
+        ts_makeup_skin_beautyEx(&inMakeupData, &outMakeupData, &(faceRect),cleanLevel,whiteLevel);
+        memcpy((unsigned char*)pFrame->buffer, tmpBuf, offset.frame_len);
+        QCameraMemory *memory = (QCameraMemory *)pFrame->mem_info;
+        memory->cleanCache(pFrame->buf_idx);
+        if (tmpBuf != NULL) {
+            delete[] tmpBuf;
+            tmpBuf = NULL;
+        }
+    }
+    LOGD("end bRet = %d ",bRet);
+    return bRet;
+}
+#endif
+/*===========================================================================
+ * FUNCTION   : postproc_channel_cb_routine
+ *
+ * DESCRIPTION: helper function to handle postprocess superbuf callback directly from
+ *              mm-camera-interface
+ *
+ * PARAMETERS :
+ *   @recvd_frame : received super buffer
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : recvd_frame will be released after this call by caller, so if
+ *             async operation needed for recvd_frame, it's our responsibility
+ *             to save a copy for this variable to be used later.
+*==========================================================================*/
+void QCamera2HardwareInterface::postproc_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
+                                                            void *userdata)
+{
+    ATRACE_CALL();
+    LOGH("[KPI Perf]: E");
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != recvd_frame->camera_handle){
+        LOGE("camera obj not valid");
+        return;
+    }
+
+    // save a copy for the superbuf
+    mm_camera_super_buf_t* frame =
+               (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+    if (frame == NULL) {
+        LOGE("Error allocating memory to save received_frame structure.");
+        return;
+    }
+    *frame = *recvd_frame;
+
+    if (recvd_frame->num_bufs > 0) {
+        LOGI("[KPI Perf]: frame_idx %d", recvd_frame->bufs[0]->frame_idx);
+    }
+    // Wait on JPEG create session
+    pme->waitDeferredWork(pme->mJpegJob);
+
+    // send to postprocessor
+    pme->m_postprocessor.processPPData(frame);
+
+    ATRACE_INT("Camera:Reprocess", 0);
+    LOGH("[KPI Perf]: X");
+}
+
+/*===========================================================================
+ * FUNCTION   : synchronous_stream_cb_routine
+ *
+ * DESCRIPTION: Function to handle STREAM SYNC CALLBACKS
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : This Function is excecuted in mm-interface context.
+ *             Avoid adding latency on this thread.
+ *==========================================================================*/
+void QCamera2HardwareInterface::synchronous_stream_cb_routine(
+        mm_camera_super_buf_t *super_frame, QCameraStream * stream,
+        void *userdata)
+{
+    nsecs_t frameTime = 0, mPreviewTimestamp = 0;
+    int err = NO_ERROR;
+
+    ATRACE_CALL();
+    LOGH("[KPI Perf] : BEGIN");
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+
+    if (pme == NULL) {
+        LOGE("Invalid hardware object");
+        return;
+    }
+    if (super_frame == NULL) {
+        LOGE("Invalid super buffer");
+        return;
+    }
+    mm_camera_buf_def_t *frame = super_frame->bufs[0];
+    if (NULL == frame) {
+        LOGE("Frame is NULL");
+        return;
+    }
+
+    if (stream->getMyType() != CAM_STREAM_TYPE_PREVIEW) {
+        LOGE("This is only for PREVIEW stream for now");
+        return;
+    }
+
+    if(pme->m_bPreviewStarted) {
+        LOGI("[KPI Perf] : PROFILE_FIRST_PREVIEW_FRAME");
+        pme->m_bPreviewStarted = false;
+    }
+
+    QCameraGrallocMemory *memory = (QCameraGrallocMemory *) frame->mem_info;
+    if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
+        pthread_mutex_lock(&pme->mGrallocLock);
+        pme->mLastPreviewFrameID = frame->frame_idx;
+        memory->setBufferStatus(frame->buf_idx, STATUS_SKIPPED);
+        pthread_mutex_unlock(&pme->mGrallocLock);
+        LOGH("preview is not running, no need to process");
+        return;
+    }
+
+    if (pme->needDebugFps()) {
+        pme->debugShowPreviewFPS();
+    }
+
+    frameTime = nsecs_t(frame->ts.tv_sec) * 1000000000LL + frame->ts.tv_nsec;
+    // Convert Boottime from camera to Monotime for display if needed.
+    // Otherwise, mBootToMonoTimestampOffset value will be 0.
+    frameTime = frameTime - pme->mBootToMonoTimestampOffset;
+    // Calculate the future presentation time stamp for displaying frames at regular interval
+    mPreviewTimestamp = pme->mCameraDisplay.computePresentationTimeStamp(frameTime);
+    stream->mStreamTimestamp = frameTime;
+
+#ifdef TARGET_TS_MAKEUP
+    pme->TsMakeupProcess_Preview(frame,stream);
+#endif
+
+    // Enqueue  buffer to gralloc.
+    uint32_t idx = frame->buf_idx;
+    LOGD("%p Enqueue Buffer to display %d frame Time = %lld Display Time = %lld",
+            pme, idx, frameTime, mPreviewTimestamp);
+    err = memory->enqueueBuffer(idx, mPreviewTimestamp);
+
+    if (err == NO_ERROR) {
+        pthread_mutex_lock(&pme->mGrallocLock);
+        pme->mLastPreviewFrameID = frame->frame_idx;
+        pme->mEnqueuedBuffers++;
+        pthread_mutex_unlock(&pme->mGrallocLock);
+    } else {
+        LOGE("Enqueue Buffer failed");
+    }
+
+    LOGH("[KPI Perf] : END");
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : preview_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle preview frame from preview stream in
+ *              normal case with display.
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done. The new
+ *             preview frame will be sent to display, and an older frame
+ *             will be dequeued from display and needs to be returned back
+ *             to kernel for future use.
+ *==========================================================================*/
+void QCamera2HardwareInterface::preview_stream_cb_routine(mm_camera_super_buf_t *super_frame,
+                                                          QCameraStream * stream,
+                                                          void *userdata)
+{
+    KPI_ATRACE_CALL();
+    LOGH("[KPI Perf] : BEGIN");
+    int err = NO_ERROR;
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    QCameraGrallocMemory *memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info;
+    uint8_t dequeueCnt = 0;
+
+    if (pme == NULL) {
+        LOGE("Invalid hardware object");
+        free(super_frame);
+        return;
+    }
+    if (memory == NULL) {
+        LOGE("Invalid memory object");
+        free(super_frame);
+        return;
+    }
+
+    mm_camera_buf_def_t *frame = super_frame->bufs[0];
+    if (NULL == frame) {
+        LOGE("preview frame is NLUL");
+        free(super_frame);
+        return;
+    }
+
+    // For instant capture and for instant AEC, keep track of the frame counter.
+    // This count will be used to check against the corresponding bound values.
+    if (pme->mParameters.isInstantAECEnabled() ||
+            pme->mParameters.isInstantCaptureEnabled()) {
+        pme->mInstantAecFrameCount++;
+    }
+
+    pthread_mutex_lock(&pme->mGrallocLock);
+    if (!stream->isSyncCBEnabled()) {
+        pme->mLastPreviewFrameID = frame->frame_idx;
+    }
+    bool discardFrame = false;
+    if (!stream->isSyncCBEnabled() &&
+            !pme->needProcessPreviewFrame(frame->frame_idx))
+    {
+        discardFrame = true;
+    } else if (stream->isSyncCBEnabled() &&
+            memory->isBufSkipped(frame->buf_idx)) {
+        discardFrame = true;
+        memory->setBufferStatus(frame->buf_idx, STATUS_IDLE);
+    }
+    pthread_mutex_unlock(&pme->mGrallocLock);
+
+    if (discardFrame) {
+        LOGH("preview is not running, no need to process");
+        stream->bufDone(frame->buf_idx);
+        free(super_frame);
+        return;
+    }
+
+    uint32_t idx = frame->buf_idx;
+
+    pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_PREVIEW);
+
+    if(pme->m_bPreviewStarted) {
+       LOGI("[KPI Perf] : PROFILE_FIRST_PREVIEW_FRAME");
+       pme->m_bPreviewStarted = false ;
+    }
+
+    if (!stream->isSyncCBEnabled()) {
+
+        if (pme->needDebugFps()) {
+            pme->debugShowPreviewFPS();
+        }
+
+        LOGD("Enqueue Buffer to display %d", idx);
+#ifdef TARGET_TS_MAKEUP
+        pme->TsMakeupProcess_Preview(frame,stream);
+#endif
+        err = memory->enqueueBuffer(idx);
+
+        if (err == NO_ERROR) {
+            pthread_mutex_lock(&pme->mGrallocLock);
+            pme->mEnqueuedBuffers++;
+            dequeueCnt = pme->mEnqueuedBuffers;
+            pthread_mutex_unlock(&pme->mGrallocLock);
+        } else {
+            LOGE("Enqueue Buffer failed");
+        }
+    } else {
+        pthread_mutex_lock(&pme->mGrallocLock);
+        dequeueCnt = pme->mEnqueuedBuffers;
+        pthread_mutex_unlock(&pme->mGrallocLock);
+    }
+
+    // Display the buffer.
+    LOGD("%p displayBuffer %d E", pme, idx);
+    uint8_t numMapped = memory->getMappable();
+
+    for (uint8_t i = 0; i < dequeueCnt; i++) {
+        int dequeuedIdx = memory->dequeueBuffer();
+        if (dequeuedIdx < 0 || dequeuedIdx >= memory->getCnt()) {
+            LOGE("Invalid dequeued buffer index %d from display",
+                   dequeuedIdx);
+            break;
+        } else {
+            pthread_mutex_lock(&pme->mGrallocLock);
+            pme->mEnqueuedBuffers--;
+            pthread_mutex_unlock(&pme->mGrallocLock);
+            if (dequeuedIdx >= numMapped) {
+                // This buffer has not yet been mapped to the backend
+                err = stream->mapNewBuffer((uint32_t)dequeuedIdx);
+                if (memory->checkIfAllBuffersMapped()) {
+                    // check if mapping is done for all the buffers
+                    // Signal the condition for create jpeg session
+                    Mutex::Autolock l(pme->mMapLock);
+                    pme->mMapCond.signal();
+                    LOGH("Mapping done for all bufs");
+                } else {
+                    LOGH("All buffers are not yet mapped");
+                }
+            }
+        }
+
+        if (err < 0) {
+            LOGE("buffer mapping failed %d", err);
+        } else {
+            // Return dequeued buffer back to driver
+            err = stream->bufDone((uint32_t)dequeuedIdx);
+            if ( err < 0) {
+                LOGW("stream bufDone failed %d", err);
+            }
+        }
+    }
+
+    // Handle preview data callback
+    if (pme->m_channels[QCAMERA_CH_TYPE_CALLBACK] == NULL) {
+        if (pme->needSendPreviewCallback() &&
+                (!pme->mParameters.isSceneSelectionEnabled())) {
+            int32_t rc = pme->sendPreviewCallback(stream, memory, idx);
+            if (NO_ERROR != rc) {
+                LOGW("Preview callback was not sent succesfully");
+            }
+        }
+    }
+
+    free(super_frame);
+    LOGH("[KPI Perf] : END");
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : sendPreviewCallback
+ *
+ * DESCRIPTION: helper function for triggering preview callbacks
+ *
+ * PARAMETERS :
+ *   @stream    : stream object
+ *   @memory    : Stream memory allocator
+ *   @idx       : buffer index
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::sendPreviewCallback(QCameraStream *stream,
+        QCameraMemory *memory, uint32_t idx)
+{
+    camera_memory_t *previewMem = NULL;
+    camera_memory_t *data = NULL;
+    camera_memory_t *dataToApp = NULL;
+    size_t previewBufSize = 0;
+    size_t previewBufSizeFromCallback = 0;
+    cam_dimension_t preview_dim;
+    cam_format_t previewFmt;
+    int32_t rc = NO_ERROR;
+    int32_t yStride = 0;
+    int32_t yScanline = 0;
+    int32_t uvStride = 0;
+    int32_t uvScanline = 0;
+    int32_t uStride = 0;
+    int32_t uScanline = 0;
+    int32_t vStride = 0;
+    int32_t vScanline = 0;
+    int32_t yStrideToApp = 0;
+    int32_t uvStrideToApp = 0;
+    int32_t yScanlineToApp = 0;
+    int32_t uvScanlineToApp = 0;
+    int32_t srcOffset = 0;
+    int32_t dstOffset = 0;
+    int32_t srcBaseOffset = 0;
+    int32_t dstBaseOffset = 0;
+    int i;
+
+    if ((NULL == stream) || (NULL == memory)) {
+        LOGE("Invalid preview callback input");
+        return BAD_VALUE;
+    }
+
+    cam_stream_info_t *streamInfo =
+            reinterpret_cast<cam_stream_info_t *>(stream->getStreamInfoBuf()->getPtr(0));
+    if (NULL == streamInfo) {
+        LOGE("Invalid streamInfo");
+        return BAD_VALUE;
+    }
+
+    stream->getFrameDimension(preview_dim);
+    stream->getFormat(previewFmt);
+
+    yStrideToApp = preview_dim.width;
+    yScanlineToApp = preview_dim.height;
+    uvStrideToApp = yStrideToApp;
+    uvScanlineToApp = yScanlineToApp / 2;
+
+    /* The preview buffer size in the callback should be
+     * (width*height*bytes_per_pixel). As all preview formats we support,
+     * use 12 bits per pixel, buffer size = previewWidth * previewHeight * 3/2.
+     * We need to put a check if some other formats are supported in future. */
+    if ((previewFmt == CAM_FORMAT_YUV_420_NV21) ||
+        (previewFmt == CAM_FORMAT_YUV_420_NV12) ||
+        (previewFmt == CAM_FORMAT_YUV_420_YV12) ||
+        (previewFmt == CAM_FORMAT_YUV_420_NV12_VENUS) ||
+        (previewFmt == CAM_FORMAT_YUV_420_NV21_VENUS) ||
+        (previewFmt == CAM_FORMAT_YUV_420_NV21_ADRENO)) {
+        if(previewFmt == CAM_FORMAT_YUV_420_YV12) {
+            yStride = streamInfo->buf_planes.plane_info.mp[0].stride;
+            yScanline = streamInfo->buf_planes.plane_info.mp[0].scanline;
+            uStride = streamInfo->buf_planes.plane_info.mp[1].stride;
+            uScanline = streamInfo->buf_planes.plane_info.mp[1].scanline;
+            vStride = streamInfo->buf_planes.plane_info.mp[2].stride;
+            vScanline = streamInfo->buf_planes.plane_info.mp[2].scanline;
+
+            previewBufSize = (size_t)
+                    (yStride * yScanline + uStride * uScanline + vStride * vScanline);
+            previewBufSizeFromCallback = previewBufSize;
+        } else {
+            yStride = streamInfo->buf_planes.plane_info.mp[0].stride;
+            yScanline = streamInfo->buf_planes.plane_info.mp[0].scanline;
+            uvStride = streamInfo->buf_planes.plane_info.mp[1].stride;
+            uvScanline = streamInfo->buf_planes.plane_info.mp[1].scanline;
+
+            previewBufSize = (size_t)
+                    ((yStrideToApp * yScanlineToApp) + (uvStrideToApp * uvScanlineToApp));
+
+            previewBufSizeFromCallback = (size_t)
+                    ((yStride * yScanline) + (uvStride * uvScanline));
+        }
+        if(previewBufSize == previewBufSizeFromCallback) {
+            previewMem = mGetMemory(memory->getFd(idx),
+                       previewBufSize, 1, mCallbackCookie);
+            if (!previewMem || !previewMem->data) {
+                LOGE("mGetMemory failed.\n");
+                return NO_MEMORY;
+            } else {
+                data = previewMem;
+            }
+        } else {
+            data = memory->getMemory(idx, false);
+            dataToApp = mGetMemory(-1, previewBufSize, 1, mCallbackCookie);
+            if (!dataToApp || !dataToApp->data) {
+                LOGE("mGetMemory failed.\n");
+                return NO_MEMORY;
+            }
+
+            for (i = 0; i < preview_dim.height; i++) {
+                srcOffset = i * yStride;
+                dstOffset = i * yStrideToApp;
+
+                memcpy((unsigned char *) dataToApp->data + dstOffset,
+                        (unsigned char *) data->data + srcOffset,
+                        (size_t)yStrideToApp);
+            }
+
+            srcBaseOffset = yStride * yScanline;
+            dstBaseOffset = yStrideToApp * yScanlineToApp;
+
+            for (i = 0; i < preview_dim.height/2; i++) {
+                srcOffset = i * uvStride + srcBaseOffset;
+                dstOffset = i * uvStrideToApp + dstBaseOffset;
+
+                memcpy((unsigned char *) dataToApp->data + dstOffset,
+                        (unsigned char *) data->data + srcOffset,
+                        (size_t)yStrideToApp);
+            }
+        }
+    } else {
+        /*Invalid Buffer content. But can be used as a first preview frame trigger in
+        framework/app */
+        previewBufSize = (size_t)
+                    ((yStrideToApp * yScanlineToApp) +
+                    (uvStrideToApp * uvScanlineToApp));
+        previewBufSizeFromCallback = 0;
+        LOGW("Invalid preview format. Buffer content cannot be processed size = %d",
+                previewBufSize);
+        dataToApp = mGetMemory(-1, previewBufSize, 1, mCallbackCookie);
+        if (!dataToApp || !dataToApp->data) {
+            LOGE("mGetMemory failed.\n");
+            return NO_MEMORY;
+        }
+    }
+    qcamera_callback_argm_t cbArg;
+    memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+    cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+    cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME;
+    if (previewBufSize != 0 && previewBufSizeFromCallback != 0 &&
+            previewBufSize == previewBufSizeFromCallback) {
+        cbArg.data = data;
+    } else {
+        cbArg.data = dataToApp;
+    }
+    if ( previewMem ) {
+        cbArg.user_data = previewMem;
+        cbArg.release_cb = releaseCameraMemory;
+    } else if (dataToApp) {
+        cbArg.user_data = dataToApp;
+        cbArg.release_cb = releaseCameraMemory;
+    }
+    cbArg.cookie = this;
+    rc = m_cbNotifier.notifyCallback(cbArg);
+    if (rc != NO_ERROR) {
+        LOGW("fail sending notification");
+        if (previewMem) {
+            previewMem->release(previewMem);
+        } else if (dataToApp) {
+            dataToApp->release(dataToApp);
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : nodisplay_preview_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle preview frame from preview stream in
+ *              no-display case
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done.
+ *==========================================================================*/
+void QCamera2HardwareInterface::nodisplay_preview_stream_cb_routine(
+                                                          mm_camera_super_buf_t *super_frame,
+                                                          QCameraStream *stream,
+                                                          void * userdata)
+{
+    ATRACE_CALL();
+    LOGH("[KPI Perf] E");
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        LOGE("camera obj not valid");
+        // simply free super frame
+        free(super_frame);
+        return;
+    }
+    mm_camera_buf_def_t *frame = super_frame->bufs[0];
+    if (NULL == frame) {
+        LOGE("preview frame is NULL");
+        free(super_frame);
+        return;
+    }
+
+    if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
+        LOGH("preview is not running, no need to process");
+        stream->bufDone(frame->buf_idx);
+        free(super_frame);
+        return;
+    }
+
+    if (pme->needDebugFps()) {
+        pme->debugShowPreviewFPS();
+    }
+
+    QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info;
+    camera_memory_t *preview_mem = NULL;
+    if (previewMemObj != NULL) {
+        preview_mem = previewMemObj->getMemory(frame->buf_idx, false);
+    }
+    if (NULL != previewMemObj && NULL != preview_mem) {
+        pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_PREVIEW);
+
+        if ((pme->needProcessPreviewFrame(frame->frame_idx)) &&
+                pme->needSendPreviewCallback() &&
+                (pme->getRelatedCamSyncInfo()->mode != CAM_MODE_SECONDARY)) {
+            qcamera_callback_argm_t cbArg;
+            memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+            cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+            cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME;
+            cbArg.data = preview_mem;
+            cbArg.user_data = (void *) &frame->buf_idx;
+            cbArg.cookie = stream;
+            cbArg.release_cb = returnStreamBuffer;
+            int32_t rc = pme->m_cbNotifier.notifyCallback(cbArg);
+            if (rc != NO_ERROR) {
+                LOGE ("fail sending data notify");
+                stream->bufDone(frame->buf_idx);
+            }
+        } else {
+            stream->bufDone(frame->buf_idx);
+        }
+    }
+    free(super_frame);
+    LOGH("[KPI Perf] X");
+}
+
+/*===========================================================================
+ * FUNCTION   : rdi_mode_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle RDI frame from preview stream in
+ *              rdi mode case
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done.
+ *==========================================================================*/
+void QCamera2HardwareInterface::rdi_mode_stream_cb_routine(
+  mm_camera_super_buf_t *super_frame,
+  QCameraStream *stream,
+  void * userdata)
+{
+    ATRACE_CALL();
+    LOGH("RDI_DEBUG Enter");
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        LOGE("camera obj not valid");
+        free(super_frame);
+        return;
+    }
+    mm_camera_buf_def_t *frame = super_frame->bufs[0];
+    if (NULL == frame) {
+        LOGE("preview frame is NLUL");
+        goto end;
+    }
+    if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
+        LOGE("preview is not running, no need to process");
+        stream->bufDone(frame->buf_idx);
+        goto end;
+    }
+    if (pme->needDebugFps()) {
+        pme->debugShowPreviewFPS();
+    }
+    // Non-secure Mode
+    if (!pme->isSecureMode()) {
+        QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info;
+        if (NULL == previewMemObj) {
+            LOGE("previewMemObj is NULL");
+            stream->bufDone(frame->buf_idx);
+            goto end;
+        }
+
+        camera_memory_t *preview_mem = previewMemObj->getMemory(frame->buf_idx, false);
+        if (NULL != preview_mem) {
+            previewMemObj->cleanCache(frame->buf_idx);
+            // Dump RAW frame
+            pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_RAW);
+            // Notify Preview callback frame
+            if (pme->needProcessPreviewFrame(frame->frame_idx) &&
+                    pme->mDataCb != NULL &&
+                    pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) {
+                qcamera_callback_argm_t cbArg;
+                memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+                cbArg.cb_type    = QCAMERA_DATA_CALLBACK;
+                cbArg.msg_type   = CAMERA_MSG_PREVIEW_FRAME;
+                cbArg.data       = preview_mem;
+                cbArg.user_data = (void *) &frame->buf_idx;
+                cbArg.cookie     = stream;
+                cbArg.release_cb = returnStreamBuffer;
+                pme->m_cbNotifier.notifyCallback(cbArg);
+            } else {
+                LOGE("preview_mem is NULL");
+                stream->bufDone(frame->buf_idx);
+            }
+        }
+        else {
+            LOGE("preview_mem is NULL");
+            stream->bufDone(frame->buf_idx);
+        }
+    } else {
+        // Secure Mode
+        // We will do QCAMERA_NOTIFY_CALLBACK and share FD in case of secure mode
+        QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info;
+        if (NULL == previewMemObj) {
+            LOGE("previewMemObj is NULL");
+            stream->bufDone(frame->buf_idx);
+            goto end;
+        }
+
+        int fd = previewMemObj->getFd(frame->buf_idx);
+        LOGD("Preview frame fd =%d for index = %d ", fd, frame->buf_idx);
+        if (pme->needProcessPreviewFrame(frame->frame_idx) &&
+                pme->mDataCb != NULL &&
+                pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) {
+            // Prepare Callback structure
+            qcamera_callback_argm_t cbArg;
+            memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+            cbArg.cb_type    = QCAMERA_NOTIFY_CALLBACK;
+            cbArg.msg_type   = CAMERA_MSG_PREVIEW_FRAME;
+#ifndef VANILLA_HAL
+            cbArg.ext1       = CAMERA_FRAME_DATA_FD;
+            cbArg.ext2       = fd;
+#endif
+            cbArg.user_data  = (void *) &frame->buf_idx;
+            cbArg.cookie     = stream;
+            cbArg.release_cb = returnStreamBuffer;
+            pme->m_cbNotifier.notifyCallback(cbArg);
+        } else {
+            LOGH("No need to process preview frame, return buffer");
+            stream->bufDone(frame->buf_idx);
+        }
+    }
+end:
+    free(super_frame);
+    LOGH("RDI_DEBUG Exit");
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : postview_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle post frame from postview stream
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done.
+ *==========================================================================*/
+void QCamera2HardwareInterface::postview_stream_cb_routine(mm_camera_super_buf_t *super_frame,
+                                                           QCameraStream *stream,
+                                                           void *userdata)
+{
+    ATRACE_CALL();
+    int err = NO_ERROR;
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    QCameraGrallocMemory *memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info;
+
+    if (pme == NULL) {
+        LOGE("Invalid hardware object");
+        free(super_frame);
+        return;
+    }
+    if (memory == NULL) {
+        LOGE("Invalid memory object");
+        free(super_frame);
+        return;
+    }
+
+    LOGH("[KPI Perf] : BEGIN");
+
+    mm_camera_buf_def_t *frame = super_frame->bufs[0];
+    if (NULL == frame) {
+        LOGE("preview frame is NULL");
+        free(super_frame);
+        return;
+    }
+
+    QCameraMemory *memObj = (QCameraMemory *)frame->mem_info;
+    if (NULL != memObj) {
+        pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_THUMBNAIL);
+    }
+
+    // Return buffer back to driver
+    err = stream->bufDone(frame->buf_idx);
+    if ( err < 0) {
+        LOGE("stream bufDone failed %d", err);
+    }
+
+    free(super_frame);
+    LOGH("[KPI Perf] : END");
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : video_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle video frame from video stream
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done. video
+ *             frame will be sent to video encoder. Once video encoder is
+ *             done with the video frame, it will call another API
+ *             (release_recording_frame) to return the frame back
+ *==========================================================================*/
+void QCamera2HardwareInterface::video_stream_cb_routine(mm_camera_super_buf_t *super_frame,
+                                                        QCameraStream *stream,
+                                                        void *userdata)
+{
+    ATRACE_CALL();
+    QCameraVideoMemory *videoMemObj = NULL;
+    camera_memory_t *video_mem = NULL;
+    nsecs_t timeStamp = 0;
+    bool triggerTCB = FALSE;
+
+    LOGH("[KPI Perf] : BEGIN");
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        LOGE("camera obj not valid");
+        // simply free super frame
+        free(super_frame);
+        return;
+    }
+
+    mm_camera_buf_def_t *frame = super_frame->bufs[0];
+
+    if (pme->needDebugFps()) {
+        pme->debugShowVideoFPS();
+    }
+    if(pme->m_bRecordStarted) {
+       LOGI("[KPI Perf] : PROFILE_FIRST_RECORD_FRAME");
+       pme->m_bRecordStarted = false ;
+    }
+    LOGD("Stream(%d), Timestamp: %ld %ld",
+          frame->stream_id,
+          frame->ts.tv_sec,
+          frame->ts.tv_nsec);
+
+    if (frame->buf_type == CAM_STREAM_BUF_TYPE_MPLANE) {
+        if (pme->mParameters.getVideoBatchSize() == 0) {
+            timeStamp = nsecs_t(frame->ts.tv_sec) * 1000000000LL
+                    + frame->ts.tv_nsec;
+            LOGD("Video frame to encoder TimeStamp : %lld batch = 0",
+                    timeStamp);
+            pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_VIDEO);
+            videoMemObj = (QCameraVideoMemory *)frame->mem_info;
+            video_mem = NULL;
+            if (NULL != videoMemObj) {
+                video_mem = videoMemObj->getMemory(frame->buf_idx,
+                        (pme->mStoreMetaDataInFrame > 0)? true : false);
+                videoMemObj->updateNativeHandle(frame->buf_idx);
+                triggerTCB = TRUE;
+            }
+        } else {
+            //Handle video batch callback
+            native_handle_t *nh = NULL;
+            pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_VIDEO);
+            QCameraVideoMemory *videoMemObj = (QCameraVideoMemory *)frame->mem_info;
+            if ((stream->mCurMetaMemory == NULL)
+                    || (stream->mCurBufIndex == -1)) {
+                //get Free metadata available
+                for (int i = 0; i < CAMERA_MIN_VIDEO_BATCH_BUFFERS; i++) {
+                    if (stream->mStreamMetaMemory[i].consumerOwned == 0) {
+                        stream->mCurMetaMemory = videoMemObj->getMemory(i,true);
+                        stream->mCurBufIndex = 0;
+                        stream->mCurMetaIndex = i;
+                        stream->mStreamMetaMemory[i].numBuffers = 0;
+                        break;
+                    }
+                }
+            }
+            video_mem = stream->mCurMetaMemory;
+            nh = videoMemObj->updateNativeHandle(stream->mCurMetaIndex);
+            if (video_mem == NULL || nh == NULL) {
+                LOGE("No Free metadata. Drop this frame");
+                stream->mCurBufIndex = -1;
+                stream->bufDone(frame->buf_idx);
+                free(super_frame);
+                return;
+            }
+
+            int index = stream->mCurBufIndex;
+            int fd_cnt = pme->mParameters.getVideoBatchSize();
+            nsecs_t frame_ts = nsecs_t(frame->ts.tv_sec) * 1000000000LL
+                    + frame->ts.tv_nsec;
+            if (index == 0) {
+                stream->mFirstTimeStamp = frame_ts;
+            }
+
+            stream->mStreamMetaMemory[stream->mCurMetaIndex].buf_index[index]
+                    = (uint8_t)frame->buf_idx;
+            stream->mStreamMetaMemory[stream->mCurMetaIndex].numBuffers++;
+            stream->mStreamMetaMemory[stream->mCurMetaIndex].consumerOwned
+                    = TRUE;
+            /*
+            * data[0] => FD
+            * data[mNumFDs + 1] => OFFSET
+            * data[mNumFDs + 2] => SIZE
+            * data[mNumFDs + 3] => Usage Flag (Color format/Compression)
+            * data[mNumFDs + 4] => TIMESTAMP
+            * data[mNumFDs + 5] => FORMAT
+            */
+            nh->data[index] = videoMemObj->getFd(frame->buf_idx);
+            nh->data[index + fd_cnt] = 0;
+            nh->data[index + (fd_cnt * 2)] = (int)videoMemObj->getSize(frame->buf_idx);
+            nh->data[index + (fd_cnt * 3)] = videoMemObj->getUsage();
+            nh->data[index + (fd_cnt * 4)] = (int)(frame_ts - stream->mFirstTimeStamp);
+            nh->data[index + (fd_cnt * 5)] = videoMemObj->getFormat();
+            stream->mCurBufIndex++;
+            if (stream->mCurBufIndex == fd_cnt) {
+                timeStamp = stream->mFirstTimeStamp;
+                LOGD("Video frame to encoder TimeStamp : %lld batch = %d",
+                    timeStamp, fd_cnt);
+                stream->mCurBufIndex = -1;
+                stream->mCurMetaIndex = -1;
+                stream->mCurMetaMemory = NULL;
+                triggerTCB = TRUE;
+            }
+        }
+    } else {
+        videoMemObj = (QCameraVideoMemory *)frame->mem_info;
+        video_mem = NULL;
+        native_handle_t *nh = NULL;
+        int fd_cnt = frame->user_buf.bufs_used;
+        if (NULL != videoMemObj) {
+            video_mem = videoMemObj->getMemory(frame->buf_idx, true);
+            nh = videoMemObj->updateNativeHandle(frame->buf_idx);
+        } else {
+            LOGE("videoMemObj NULL");
+        }
+
+        if (nh != NULL) {
+            timeStamp = nsecs_t(frame->ts.tv_sec) * 1000000000LL
+                    + frame->ts.tv_nsec;
+            LOGD("Batch buffer TimeStamp : %lld FD = %d index = %d fd_cnt = %d",
+                    timeStamp, frame->fd, frame->buf_idx, fd_cnt);
+
+            for (int i = 0; i < fd_cnt; i++) {
+                if (frame->user_buf.buf_idx[i] >= 0) {
+                    mm_camera_buf_def_t *plane_frame =
+                            &frame->user_buf.plane_buf[frame->user_buf.buf_idx[i]];
+                    QCameraVideoMemory *frameobj =
+                            (QCameraVideoMemory *)plane_frame->mem_info;
+                    int usage = frameobj->getUsage();
+                    nsecs_t frame_ts = nsecs_t(plane_frame->ts.tv_sec) * 1000000000LL
+                            + plane_frame->ts.tv_nsec;
+                    /*
+                       data[0] => FD
+                       data[mNumFDs + 1] => OFFSET
+                       data[mNumFDs + 2] => SIZE
+                       data[mNumFDs + 3] => Usage Flag (Color format/Compression)
+                       data[mNumFDs + 4] => TIMESTAMP
+                       data[mNumFDs + 5] => FORMAT
+                    */
+                    nh->data[i] = frameobj->getFd(plane_frame->buf_idx);
+                    nh->data[fd_cnt + i] = 0;
+                    nh->data[(2 * fd_cnt) + i] = (int)frameobj->getSize(plane_frame->buf_idx);
+                    nh->data[(3 * fd_cnt) + i] = usage;
+                    nh->data[(4 * fd_cnt) + i] = (int)(frame_ts - timeStamp);
+                    nh->data[(5 * fd_cnt) + i] = frameobj->getFormat();
+                    LOGD("Send Video frames to services/encoder delta : %lld FD = %d index = %d",
+                            (frame_ts - timeStamp), plane_frame->fd, plane_frame->buf_idx);
+                    pme->dumpFrameToFile(stream, plane_frame, QCAMERA_DUMP_FRM_VIDEO);
+                }
+            }
+            triggerTCB = TRUE;
+        } else {
+            LOGE("No Video Meta Available. Return Buffer");
+            stream->bufDone(super_frame->bufs[0]->buf_idx);
+        }
+    }
+
+    if ((NULL != video_mem) && (triggerTCB == TRUE)) {
+        if ((pme->mDataCbTimestamp != NULL) &&
+            pme->msgTypeEnabledWithLock(CAMERA_MSG_VIDEO_FRAME) > 0) {
+            qcamera_callback_argm_t cbArg;
+            memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+            cbArg.cb_type = QCAMERA_DATA_TIMESTAMP_CALLBACK;
+            cbArg.msg_type = CAMERA_MSG_VIDEO_FRAME;
+            cbArg.data = video_mem;
+
+            // Convert Boottime from camera to Monotime for video if needed.
+            // Otherwise, mBootToMonoTimestampOffset value will be 0.
+            timeStamp = timeStamp - pme->mBootToMonoTimestampOffset;
+            LOGD("Final video buffer TimeStamp : %lld ", timeStamp);
+            cbArg.timestamp = timeStamp;
+            int32_t rc = pme->m_cbNotifier.notifyCallback(cbArg);
+            if (rc != NO_ERROR) {
+                LOGE("fail sending data notify");
+                stream->bufDone(frame->buf_idx);
+            }
+        }
+    }
+
+    free(super_frame);
+    LOGH("[KPI Perf] : END");
+}
+
+/*===========================================================================
+ * FUNCTION   : snapshot_channel_cb_routine
+ *
+ * DESCRIPTION: helper function to handle snapshot frame from snapshot channel
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : recvd_frame will be released after this call by caller, so if
+ *             async operation needed for recvd_frame, it's our responsibility
+ *             to save a copy for this variable to be used later.
+ *==========================================================================*/
+void QCamera2HardwareInterface::snapshot_channel_cb_routine(mm_camera_super_buf_t *super_frame,
+       void *userdata)
+{
+    ATRACE_CALL();
+    char value[PROPERTY_VALUE_MAX];
+    QCameraChannel *pChannel = NULL;
+
+    LOGH("[KPI Perf]: E");
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        LOGE("camera obj not valid");
+        // simply free super frame
+        free(super_frame);
+        return;
+    }
+
+    if (pme->isLowPowerMode()) {
+        pChannel = pme->m_channels[QCAMERA_CH_TYPE_VIDEO];
+    } else {
+        pChannel = pme->m_channels[QCAMERA_CH_TYPE_SNAPSHOT];
+    }
+
+    if ((pChannel == NULL) || (pChannel->getMyHandle() != super_frame->ch_id)) {
+        LOGE("Snapshot channel doesn't exist, return here");
+        return;
+    }
+
+    property_get("persist.camera.dumpmetadata", value, "0");
+    int32_t enabled = atoi(value);
+    if (enabled) {
+        if (pChannel == NULL ||
+            pChannel->getMyHandle() != super_frame->ch_id) {
+            LOGE("Capture channel doesn't exist, return here");
+            return;
+        }
+        mm_camera_buf_def_t *pMetaFrame = NULL;
+        QCameraStream *pStream = NULL;
+        for (uint32_t i = 0; i < super_frame->num_bufs; i++) {
+            pStream = pChannel->getStreamByHandle(super_frame->bufs[i]->stream_id);
+            if (pStream != NULL) {
+                if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+                    pMetaFrame = super_frame->bufs[i]; //find the metadata
+                    if (pMetaFrame != NULL &&
+                            ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
+                        pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "Snapshot");
+                    }
+                    break;
+                }
+            }
+        }
+    }
+
+    // save a copy for the superbuf
+    mm_camera_super_buf_t* frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+    if (frame == NULL) {
+        LOGE("Error allocating memory to save received_frame structure.");
+        pChannel->bufDone(super_frame);
+        return;
+    }
+    *frame = *super_frame;
+
+    if (frame->num_bufs > 0) {
+        LOGI("[KPI Perf]: superbuf frame_idx %d",
+                frame->bufs[0]->frame_idx);
+    }
+
+    if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
+            (NO_ERROR != pme->m_postprocessor.processData(frame))) {
+        LOGE("Failed to trigger process data");
+        pChannel->bufDone(super_frame);
+        free(frame);
+        frame = NULL;
+        return;
+    }
+
+    LOGH("[KPI Perf]: X");
+}
+
+/*===========================================================================
+ * FUNCTION   : raw_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle raw dump frame from raw stream
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done. For raw
+ *             frame, there is no need to send to postprocessor for jpeg
+ *             encoding. this function will play shutter and send the data
+ *             callback to upper layer. Raw frame buffer will be returned
+ *             back to kernel, and frame will be free after use.
+ *==========================================================================*/
+void QCamera2HardwareInterface::raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
+                                                      QCameraStream * /*stream*/,
+                                                      void * userdata)
+{
+    ATRACE_CALL();
+    LOGH("[KPI Perf] : BEGIN");
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        LOGE("camera obj not valid");
+        // simply free super frame
+        free(super_frame);
+        return;
+    }
+
+    pme->m_postprocessor.processRawData(super_frame);
+    LOGH("[KPI Perf] : END");
+}
+
+/*===========================================================================
+ * FUNCTION   : raw_channel_cb_routine
+ *
+ * DESCRIPTION: helper function to handle RAW  superbuf callback directly from
+ *              mm-camera-interface
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : recvd_frame will be released after this call by caller, so if
+ *             async operation needed for recvd_frame, it's our responsibility
+ *             to save a copy for this variable to be used later.
+*==========================================================================*/
+void QCamera2HardwareInterface::raw_channel_cb_routine(mm_camera_super_buf_t *super_frame,
+        void *userdata)
+
+{
+    ATRACE_CALL();
+    char value[PROPERTY_VALUE_MAX];
+
+    LOGH("[KPI Perf]: E");
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        LOGE("camera obj not valid");
+        // simply free super frame
+        free(super_frame);
+        return;
+    }
+
+    QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_RAW];
+    if (pChannel == NULL) {
+        LOGE("RAW channel doesn't exist, return here");
+        return;
+    }
+
+    if (pChannel->getMyHandle() != super_frame->ch_id) {
+        LOGE("Invalid Input super buffer");
+        pChannel->bufDone(super_frame);
+        return;
+    }
+
+    property_get("persist.camera.dumpmetadata", value, "0");
+    int32_t enabled = atoi(value);
+    if (enabled) {
+        mm_camera_buf_def_t *pMetaFrame = NULL;
+        QCameraStream *pStream = NULL;
+        for (uint32_t i = 0; i < super_frame->num_bufs; i++) {
+            pStream = pChannel->getStreamByHandle(super_frame->bufs[i]->stream_id);
+            if (pStream != NULL) {
+                if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+                    pMetaFrame = super_frame->bufs[i]; //find the metadata
+                    if (pMetaFrame != NULL &&
+                            ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) {
+                        pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "raw");
+                    }
+                    break;
+                }
+            }
+        }
+    }
+
+    // save a copy for the superbuf
+    mm_camera_super_buf_t* frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+    if (frame == NULL) {
+        LOGE("Error allocating memory to save received_frame structure.");
+        pChannel->bufDone(super_frame);
+        return;
+    }
+    *frame = *super_frame;
+
+    if (frame->num_bufs > 0) {
+        LOGI("[KPI Perf]: superbuf frame_idx %d",
+                frame->bufs[0]->frame_idx);
+    }
+
+    // Wait on Postproc initialization if needed
+    // then send to postprocessor
+    if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) ||
+            (NO_ERROR != pme->m_postprocessor.processData(frame))) {
+        LOGE("Failed to trigger process data");
+        pChannel->bufDone(super_frame);
+        free(frame);
+        frame = NULL;
+        return;
+    }
+
+    LOGH("[KPI Perf]: X");
+
+}
+
+/*===========================================================================
+ * FUNCTION   : preview_raw_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle raw frame during standard preview
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done.
+ *==========================================================================*/
+void QCamera2HardwareInterface::preview_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
+                                                              QCameraStream * stream,
+                                                              void * userdata)
+{
+    ATRACE_CALL();
+    LOGH("[KPI Perf] : BEGIN");
+    char value[PROPERTY_VALUE_MAX];
+    bool dump_preview_raw = false, dump_video_raw = false;
+
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        LOGE("camera obj not valid");
+        // simply free super frame
+        free(super_frame);
+        return;
+    }
+
+    mm_camera_buf_def_t *raw_frame = super_frame->bufs[0];
+
+    if (raw_frame != NULL) {
+        property_get("persist.camera.preview_raw", value, "0");
+        dump_preview_raw = atoi(value) > 0 ? true : false;
+        property_get("persist.camera.video_raw", value, "0");
+        dump_video_raw = atoi(value) > 0 ? true : false;
+        if (dump_preview_raw || (pme->mParameters.getRecordingHintValue()
+                && dump_video_raw)) {
+            pme->dumpFrameToFile(stream, raw_frame, QCAMERA_DUMP_FRM_RAW);
+        }
+        stream->bufDone(raw_frame->buf_idx);
+    }
+    free(super_frame);
+
+    LOGH("[KPI Perf] : END");
+}
+
+/*===========================================================================
+ * FUNCTION   : snapshot_raw_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle raw frame during standard capture
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done.
+ *==========================================================================*/
+void QCamera2HardwareInterface::snapshot_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
+                                                               QCameraStream * stream,
+                                                               void * userdata)
+{
+    ATRACE_CALL();
+    LOGH("[KPI Perf] : BEGIN");
+    char value[PROPERTY_VALUE_MAX];
+    bool dump_raw = false;
+
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        LOGE("camera obj not valid");
+        // simply free super frame
+        free(super_frame);
+        return;
+    }
+
+    property_get("persist.camera.snapshot_raw", value, "0");
+    dump_raw = atoi(value) > 0 ? true : false;
+
+    for (uint32_t i = 0; i < super_frame->num_bufs; i++) {
+        if (super_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_RAW) {
+            mm_camera_buf_def_t * raw_frame = super_frame->bufs[i];
+            if (NULL != stream) {
+                if (dump_raw) {
+                    pme->dumpFrameToFile(stream, raw_frame, QCAMERA_DUMP_FRM_RAW);
+                }
+                stream->bufDone(super_frame->bufs[i]->buf_idx);
+            }
+            break;
+        }
+    }
+
+    free(super_frame);
+
+    LOGH("[KPI Perf] : END");
+}
+
+/*===========================================================================
+ * FUNCTION   : updateMetadata
+ *
+ * DESCRIPTION: Frame related parameter can be updated here
+ *
+ * PARAMETERS :
+ *   @pMetaData : pointer to metadata buffer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::updateMetadata(metadata_buffer_t *pMetaData)
+{
+    int32_t rc = NO_ERROR;
+
+    if (pMetaData == NULL) {
+        LOGE("Null Metadata buffer");
+        return rc;
+    }
+
+    // Sharpness
+    cam_edge_application_t edge_application;
+    memset(&edge_application, 0x00, sizeof(cam_edge_application_t));
+    edge_application.sharpness = mParameters.getSharpness();
+    if (edge_application.sharpness != 0) {
+        edge_application.edge_mode = CAM_EDGE_MODE_FAST;
+    } else {
+        edge_application.edge_mode = CAM_EDGE_MODE_OFF;
+    }
+    ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData,
+            CAM_INTF_META_EDGE_MODE, edge_application);
+
+    //Effect
+    int32_t prmEffect = mParameters.getEffect();
+    ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_PARM_EFFECT, prmEffect);
+
+    //flip
+    int32_t prmFlip = mParameters.getFlipMode(CAM_STREAM_TYPE_SNAPSHOT);
+    ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_PARM_FLIP, prmFlip);
+
+    //denoise
+    uint8_t prmDenoise = (uint8_t)mParameters.isWNREnabled();
+    ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData,
+            CAM_INTF_META_NOISE_REDUCTION_MODE, prmDenoise);
+
+    //rotation & device rotation
+    uint32_t prmRotation = mParameters.getJpegRotation();
+    cam_rotation_info_t rotation_info;
+    memset(&rotation_info, 0, sizeof(cam_rotation_info_t));
+    if (prmRotation == 0) {
+       rotation_info.rotation = ROTATE_0;
+    } else if (prmRotation == 90) {
+       rotation_info.rotation = ROTATE_90;
+    } else if (prmRotation == 180) {
+       rotation_info.rotation = ROTATE_180;
+    } else if (prmRotation == 270) {
+       rotation_info.rotation = ROTATE_270;
+    }
+
+    uint32_t device_rotation = mParameters.getDeviceRotation();
+    if (device_rotation == 0) {
+        rotation_info.device_rotation = ROTATE_0;
+    } else if (device_rotation == 90) {
+        rotation_info.device_rotation = ROTATE_90;
+    } else if (device_rotation == 180) {
+        rotation_info.device_rotation = ROTATE_180;
+    } else if (device_rotation == 270) {
+        rotation_info.device_rotation = ROTATE_270;
+    } else {
+        rotation_info.device_rotation = ROTATE_0;
+    }
+
+    ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_PARM_ROTATION, rotation_info);
+
+    // Imglib Dynamic Scene Data
+    cam_dyn_img_data_t dyn_img_data = mParameters.getDynamicImgData();
+    if (mParameters.isStillMoreEnabled()) {
+        cam_still_more_t stillmore_cap = mParameters.getStillMoreSettings();
+        dyn_img_data.input_count = stillmore_cap.burst_count;
+    }
+    ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData,
+            CAM_INTF_META_IMG_DYN_FEAT, dyn_img_data);
+
+    //CPP CDS
+    int32_t prmCDSMode = mParameters.getCDSMode();
+    ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData,
+            CAM_INTF_PARM_CDS_MODE, prmCDSMode);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : metadata_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle metadata frame from metadata stream
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done. Metadata
+ *             could have valid entries for face detection result or
+ *             histogram statistics information.
+ *==========================================================================*/
+void QCamera2HardwareInterface::metadata_stream_cb_routine(mm_camera_super_buf_t * super_frame,
+                                                           QCameraStream * stream,
+                                                           void * userdata)
+{
+    ATRACE_CALL();
+    LOGD("[KPI Perf] : BEGIN");
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        LOGE("camera obj not valid");
+        // simply free super frame
+        free(super_frame);
+        return;
+    }
+
+    mm_camera_buf_def_t *frame = super_frame->bufs[0];
+    metadata_buffer_t *pMetaData = (metadata_buffer_t *)frame->buffer;
+    if(pme->m_stateMachine.isNonZSLCaptureRunning()&&
+       !pme->mLongshotEnabled) {
+       //Make shutter call back in non ZSL mode once raw frame is received from VFE.
+       pme->playShutter();
+    }
+
+    if (pMetaData->is_tuning_params_valid && pme->mParameters.getRecordingHintValue() == true) {
+        //Dump Tuning data for video
+        pme->dumpMetadataToFile(stream,frame,(char *)"Video");
+    }
+
+    IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, pMetaData) {
+        // process histogram statistics info
+        qcamera_sm_internal_evt_payload_t *payload =
+            (qcamera_sm_internal_evt_payload_t *)
+                malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+        if (NULL != payload) {
+            memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+            payload->evt_type = QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS;
+            payload->stats_data = *stats_data;
+            int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+            if (rc != NO_ERROR) {
+                LOGW("processEvt histogram failed");
+                free(payload);
+                payload = NULL;
+
+            }
+        } else {
+            LOGE("No memory for histogram qcamera_sm_internal_evt_payload_t");
+        }
+    }
+
+    IF_META_AVAILABLE(cam_face_detection_data_t, detection_data,
+            CAM_INTF_META_FACE_DETECTION, pMetaData) {
+
+        cam_faces_data_t faces_data;
+        pme->fillFacesData(faces_data, pMetaData);
+        faces_data.detection_data.fd_type = QCAMERA_FD_PREVIEW; //HARD CODE here before MCT can support
+
+        qcamera_sm_internal_evt_payload_t *payload = (qcamera_sm_internal_evt_payload_t *)
+            malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+        if (NULL != payload) {
+            memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+            payload->evt_type = QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT;
+            payload->faces_data = faces_data;
+            int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+            if (rc != NO_ERROR) {
+                LOGW("processEvt face detection failed");
+                free(payload);
+                payload = NULL;
+            }
+        } else {
+            LOGE("No memory for face detect qcamera_sm_internal_evt_payload_t");
+        }
+    }
+
+    IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, pMetaData) {
+        uint8_t forceAFUpdate = FALSE;
+        //1. Earlier HAL used to rely on AF done flags set in metadata to generate callbacks to
+        //upper layers. But in scenarios where metadata drops especially which contain important
+        //AF information, APP will wait indefinitely for focus result resulting in capture hang.
+        //2. HAL can check for AF state transitions to generate AF state callbacks to upper layers.
+        //This will help overcome metadata drop issue with the earlier approach.
+        //3. But sometimes AF state transitions can happen so fast within same metadata due to
+        //which HAL will receive only the final AF state. HAL may perceive this as no change in AF
+        //state depending on the state transitions happened (for example state A -> B -> A).
+        //4. To overcome the drawbacks of both the approaches, we go for a hybrid model in which
+        //we check state transition at both HAL level and AF module level. We rely on
+        //'state transition' meta field set by AF module for the state transition detected by it.
+        IF_META_AVAILABLE(uint8_t, stateChange, CAM_INTF_AF_STATE_TRANSITION, pMetaData) {
+            forceAFUpdate = *stateChange;
+        }
+        //This is a special scenario in which when scene modes like landscape are selected, AF mode
+        //gets changed to INFINITY at backend, but HAL will not be aware of it. Also, AF state in
+        //such cases will be set to CAM_AF_STATE_INACTIVE by backend. So, detect the AF mode
+        //change here and trigger AF callback @ processAutoFocusEvent().
+        IF_META_AVAILABLE(uint32_t, afFocusMode, CAM_INTF_PARM_FOCUS_MODE, pMetaData) {
+            if (((cam_focus_mode_type)(*afFocusMode) == CAM_FOCUS_MODE_INFINITY) &&
+                    pme->mActiveAF){
+                forceAFUpdate = TRUE;
+            }
+        }
+        if ((pme->m_currentFocusState != (*afState)) || forceAFUpdate) {
+            cam_af_state_t prevFocusState = pme->m_currentFocusState;
+            pme->m_currentFocusState = (cam_af_state_t)(*afState);
+            qcamera_sm_internal_evt_payload_t *payload = (qcamera_sm_internal_evt_payload_t *)
+                    malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+            if (NULL != payload) {
+                memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+                payload->evt_type = QCAMERA_INTERNAL_EVT_FOCUS_UPDATE;
+                payload->focus_data.focus_state = (cam_af_state_t)(*afState);
+                //Need to flush ZSL Q only if we are transitioning from scanning state
+                //to focused/not focused state.
+                payload->focus_data.flush_info.needFlush =
+                        ((prevFocusState == CAM_AF_STATE_PASSIVE_SCAN) ||
+                        (prevFocusState == CAM_AF_STATE_ACTIVE_SCAN)) &&
+                        ((pme->m_currentFocusState == CAM_AF_STATE_FOCUSED_LOCKED) ||
+                        (pme->m_currentFocusState == CAM_AF_STATE_NOT_FOCUSED_LOCKED));
+                payload->focus_data.flush_info.focused_frame_idx = frame->frame_idx;
+
+                IF_META_AVAILABLE(float, focusDistance,
+                        CAM_INTF_META_LENS_FOCUS_DISTANCE, pMetaData) {
+                    payload->focus_data.focus_dist.
+                    focus_distance[CAM_FOCUS_DISTANCE_OPTIMAL_INDEX] = *focusDistance;
+                }
+                IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, pMetaData) {
+                    payload->focus_data.focus_dist.
+                            focus_distance[CAM_FOCUS_DISTANCE_NEAR_INDEX] = focusRange[0];
+                    payload->focus_data.focus_dist.
+                            focus_distance[CAM_FOCUS_DISTANCE_FAR_INDEX] = focusRange[1];
+                }
+                IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, pMetaData) {
+                    payload->focus_data.focus_mode = (cam_focus_mode_type)(*focusMode);
+                }
+                int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+                if (rc != NO_ERROR) {
+                    LOGW("processEvt focus failed");
+                    free(payload);
+                    payload = NULL;
+                }
+            } else {
+                LOGE("No memory for focus qcamera_sm_internal_evt_payload_t");
+            }
+        }
+    }
+
+    IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, pMetaData) {
+        if (crop_data->num_of_streams > MAX_NUM_STREAMS) {
+            LOGE("Invalid num_of_streams %d in crop_data",
+                crop_data->num_of_streams);
+        } else {
+            qcamera_sm_internal_evt_payload_t *payload =
+                (qcamera_sm_internal_evt_payload_t *)
+                    malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+            if (NULL != payload) {
+                memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+                payload->evt_type = QCAMERA_INTERNAL_EVT_CROP_INFO;
+                payload->crop_data = *crop_data;
+                int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+                if (rc != NO_ERROR) {
+                    LOGE("processEvt crop info failed");
+                    free(payload);
+                    payload = NULL;
+                }
+            } else {
+                LOGE("No memory for prep_snapshot qcamera_sm_internal_evt_payload_t");
+            }
+        }
+    }
+
+    IF_META_AVAILABLE(int32_t, prep_snapshot_done_state,
+            CAM_INTF_META_PREP_SNAPSHOT_DONE, pMetaData) {
+        qcamera_sm_internal_evt_payload_t *payload =
+        (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+        if (NULL != payload) {
+            memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+            payload->evt_type = QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE;
+            payload->prep_snapshot_state = (cam_prep_snapshot_state_t)*prep_snapshot_done_state;
+            int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+            if (rc != NO_ERROR) {
+                LOGW("processEvt prep_snapshot failed");
+                free(payload);
+                payload = NULL;
+            }
+        } else {
+            LOGE("No memory for prep_snapshot qcamera_sm_internal_evt_payload_t");
+        }
+    }
+
+    IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
+            CAM_INTF_META_ASD_HDR_SCENE_DATA, pMetaData) {
+        LOGH("hdr_scene_data: %d %f\n",
+                hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
+        //Handle this HDR meta data only if capture is not in process
+        if (!pme->m_stateMachine.isCaptureRunning()) {
+            qcamera_sm_internal_evt_payload_t *payload =
+                    (qcamera_sm_internal_evt_payload_t *)
+                    malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+            if (NULL != payload) {
+                memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+                payload->evt_type = QCAMERA_INTERNAL_EVT_HDR_UPDATE;
+                payload->hdr_data = *hdr_scene_data;
+                int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+                if (rc != NO_ERROR) {
+                    LOGW("processEvt hdr update failed");
+                    free(payload);
+                    payload = NULL;
+                }
+            } else {
+                LOGE("No memory for hdr update qcamera_sm_internal_evt_payload_t");
+            }
+        }
+    }
+
+    IF_META_AVAILABLE(cam_asd_decision_t, cam_asd_info,
+            CAM_INTF_META_ASD_SCENE_INFO, pMetaData) {
+        qcamera_sm_internal_evt_payload_t *payload =
+            (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+        if (NULL != payload) {
+            memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+            payload->evt_type = QCAMERA_INTERNAL_EVT_ASD_UPDATE;
+            payload->asd_data = (cam_asd_decision_t)*cam_asd_info;
+            int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+            if (rc != NO_ERROR) {
+                LOGW("processEvt asd_update failed");
+                free(payload);
+                payload = NULL;
+            }
+        } else {
+            LOGE("No memory for asd_update qcamera_sm_internal_evt_payload_t");
+        }
+    }
+
+    IF_META_AVAILABLE(cam_awb_params_t, awb_params, CAM_INTF_META_AWB_INFO, pMetaData) {
+        LOGH(", metadata for awb params.");
+        qcamera_sm_internal_evt_payload_t *payload =
+                (qcamera_sm_internal_evt_payload_t *)
+                malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+        if (NULL != payload) {
+            memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+            payload->evt_type = QCAMERA_INTERNAL_EVT_AWB_UPDATE;
+            payload->awb_data = *awb_params;
+            int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+            if (rc != NO_ERROR) {
+                LOGW("processEvt awb_update failed");
+                free(payload);
+                payload = NULL;
+            }
+        } else {
+            LOGE("No memory for awb_update qcamera_sm_internal_evt_payload_t");
+        }
+    }
+
+    IF_META_AVAILABLE(uint32_t, flash_mode, CAM_INTF_META_FLASH_MODE, pMetaData) {
+        pme->mExifParams.sensor_params.flash_mode = (cam_flash_mode_t)*flash_mode;
+    }
+
+    IF_META_AVAILABLE(int32_t, flash_state, CAM_INTF_META_FLASH_STATE, pMetaData) {
+        pme->mExifParams.sensor_params.flash_state = (cam_flash_state_t) *flash_state;
+    }
+
+    IF_META_AVAILABLE(float, aperture_value, CAM_INTF_META_LENS_APERTURE, pMetaData) {
+        pme->mExifParams.sensor_params.aperture_value = *aperture_value;
+    }
+
+    IF_META_AVAILABLE(cam_3a_params_t, ae_params, CAM_INTF_META_AEC_INFO, pMetaData) {
+        pme->mExifParams.cam_3a_params = *ae_params;
+        pme->mExifParams.cam_3a_params_valid = TRUE;
+        pme->mFlashNeeded = ae_params->flash_needed;
+        pme->mExifParams.cam_3a_params.brightness = (float) pme->mParameters.getBrightness();
+        qcamera_sm_internal_evt_payload_t *payload =
+                (qcamera_sm_internal_evt_payload_t *)
+                malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+        if (NULL != payload) {
+            memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+            payload->evt_type = QCAMERA_INTERNAL_EVT_AE_UPDATE;
+            payload->ae_data = *ae_params;
+            int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+            if (rc != NO_ERROR) {
+                LOGW("processEvt ae_update failed");
+                free(payload);
+                payload = NULL;
+            }
+        } else {
+            LOGE("No memory for ae_update qcamera_sm_internal_evt_payload_t");
+        }
+    }
+
+    IF_META_AVAILABLE(int32_t, wb_mode, CAM_INTF_PARM_WHITE_BALANCE, pMetaData) {
+        pme->mExifParams.cam_3a_params.wb_mode = (cam_wb_mode_type) *wb_mode;
+    }
+
+    IF_META_AVAILABLE(cam_sensor_params_t, sensor_params, CAM_INTF_META_SENSOR_INFO, pMetaData) {
+        pme->mExifParams.sensor_params = *sensor_params;
+    }
+
+    IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
+            CAM_INTF_META_EXIF_DEBUG_AE, pMetaData) {
+        if (pme->mExifParams.debug_params) {
+            pme->mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
+            pme->mExifParams.debug_params->ae_debug_params_valid = TRUE;
+        }
+    }
+
+    IF_META_AVAILABLE(cam_awb_exif_debug_t, awb_exif_debug_params,
+            CAM_INTF_META_EXIF_DEBUG_AWB, pMetaData) {
+        if (pme->mExifParams.debug_params) {
+            pme->mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
+            pme->mExifParams.debug_params->awb_debug_params_valid = TRUE;
+        }
+    }
+
+    IF_META_AVAILABLE(cam_af_exif_debug_t, af_exif_debug_params,
+            CAM_INTF_META_EXIF_DEBUG_AF, pMetaData) {
+        if (pme->mExifParams.debug_params) {
+            pme->mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
+            pme->mExifParams.debug_params->af_debug_params_valid = TRUE;
+        }
+    }
+
+    IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
+            CAM_INTF_META_EXIF_DEBUG_ASD, pMetaData) {
+        if (pme->mExifParams.debug_params) {
+            pme->mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
+            pme->mExifParams.debug_params->asd_debug_params_valid = TRUE;
+        }
+    }
+
+    IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t, stats_exif_debug_params,
+            CAM_INTF_META_EXIF_DEBUG_STATS, pMetaData) {
+        if (pme->mExifParams.debug_params) {
+            pme->mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
+            pme->mExifParams.debug_params->stats_debug_params_valid = TRUE;
+        }
+    }
+
+    IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t, bestats_exif_debug_params,
+            CAM_INTF_META_EXIF_DEBUG_BESTATS, pMetaData) {
+        if (pme->mExifParams.debug_params) {
+            pme->mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
+            pme->mExifParams.debug_params->bestats_debug_params_valid = TRUE;
+        }
+    }
+
+    IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
+            CAM_INTF_META_EXIF_DEBUG_BHIST, pMetaData) {
+        if (pme->mExifParams.debug_params) {
+            pme->mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
+            pme->mExifParams.debug_params->bhist_debug_params_valid = TRUE;
+        }
+    }
+
+    IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
+            CAM_INTF_META_EXIF_DEBUG_3A_TUNING, pMetaData) {
+        if (pme->mExifParams.debug_params) {
+            pme->mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
+            pme->mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
+        }
+    }
+
+    IF_META_AVAILABLE(uint32_t, led_mode, CAM_INTF_META_LED_MODE_OVERRIDE, pMetaData) {
+        qcamera_sm_internal_evt_payload_t *payload =
+                (qcamera_sm_internal_evt_payload_t *)
+                malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+        if (NULL != payload) {
+            memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+            payload->evt_type = QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE;
+            payload->led_data = (cam_flash_mode_t)*led_mode;
+            int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+            if (rc != NO_ERROR) {
+                LOGW("processEvt led mode override failed");
+                free(payload);
+                payload = NULL;
+            }
+        } else {
+            LOGE("No memory for focus qcamera_sm_internal_evt_payload_t");
+        }
+    }
+
+    cam_edge_application_t edge_application;
+    memset(&edge_application, 0x00, sizeof(cam_edge_application_t));
+    edge_application.sharpness = pme->mParameters.getSharpness();
+    if (edge_application.sharpness != 0) {
+        edge_application.edge_mode = CAM_EDGE_MODE_FAST;
+    } else {
+        edge_application.edge_mode = CAM_EDGE_MODE_OFF;
+    }
+    ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_META_EDGE_MODE, edge_application);
+
+    IF_META_AVAILABLE(cam_focus_pos_info_t, cur_pos_info,
+            CAM_INTF_META_FOCUS_POSITION, pMetaData) {
+        qcamera_sm_internal_evt_payload_t *payload =
+            (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+        if (NULL != payload) {
+            memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+            payload->evt_type = QCAMERA_INTERNAL_EVT_FOCUS_POS_UPDATE;
+            payload->focus_pos = *cur_pos_info;
+            int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+            if (rc != NO_ERROR) {
+                LOGW("processEvt focus_pos_update failed");
+                free(payload);
+                payload = NULL;
+            }
+        } else {
+            LOGE("No memory for focus_pos_update qcamera_sm_internal_evt_payload_t");
+        }
+    }
+
+    if (pme->mParameters.getLowLightCapture()) {
+        IF_META_AVAILABLE(cam_low_light_mode_t, low_light_level,
+                CAM_INTF_META_LOW_LIGHT, pMetaData) {
+            pme->mParameters.setLowLightLevel(*low_light_level);
+        }
+    }
+
+    IF_META_AVAILABLE(cam_dyn_img_data_t, dyn_img_data,
+            CAM_INTF_META_IMG_DYN_FEAT, pMetaData) {
+        pme->mParameters.setDynamicImgData(*dyn_img_data);
+    }
+
+    IF_META_AVAILABLE(int32_t, touch_ae_status, CAM_INTF_META_TOUCH_AE_RESULT, pMetaData) {
+      LOGD("touch_ae_status: %d", *touch_ae_status);
+    }
+
+    stream->bufDone(frame->buf_idx);
+    free(super_frame);
+
+    LOGD("[KPI Perf] : END");
+}
+
+/*===========================================================================
+ * FUNCTION   : reprocess_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle reprocess frame from reprocess stream
+                (after reprocess, e.g., ZSL snapshot frame after WNR if
+ *              WNR is enabled)
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done. In this
+ *             case, reprocessed frame need to be passed to postprocessor
+ *             for jpeg encoding.
+ *==========================================================================*/
+void QCamera2HardwareInterface::reprocess_stream_cb_routine(mm_camera_super_buf_t * super_frame,
+                                                            QCameraStream * /*stream*/,
+                                                            void * userdata)
+{
+    ATRACE_CALL();
+    LOGH("[KPI Perf]: E");
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        LOGE("camera obj not valid");
+        // simply free super frame
+        free(super_frame);
+        return;
+    }
+
+    pme->m_postprocessor.processPPData(super_frame);
+
+    LOGH("[KPI Perf]: X");
+}
+
+/*===========================================================================
+ * FUNCTION   : callback_stream_cb_routine
+ *
+ * DESCRIPTION: function to process CALBACK stream data
+                           Frame will processed and sent to framework
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::callback_stream_cb_routine(mm_camera_super_buf_t *super_frame,
+        QCameraStream *stream, void *userdata)
+{
+    ATRACE_CALL();
+    LOGH("[KPI Perf]: E");
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+
+    if (pme == NULL ||
+            pme->mCameraHandle == NULL ||
+            pme->mCameraHandle->camera_handle != super_frame->camera_handle) {
+        LOGE("camera obj not valid");
+        // simply free super frame
+        free(super_frame);
+        return;
+    }
+
+    mm_camera_buf_def_t *frame = super_frame->bufs[0];
+    if (NULL == frame) {
+        LOGE("preview callback frame is NULL");
+        free(super_frame);
+        return;
+    }
+
+    if (!pme->needProcessPreviewFrame(frame->frame_idx)) {
+        LOGH("preview is not running, no need to process");
+        stream->bufDone(frame->buf_idx);
+        free(super_frame);
+        return;
+    }
+
+    QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info;
+    // Handle preview data callback
+    if (pme->mDataCb != NULL &&
+            (pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) &&
+            (!pme->mParameters.isSceneSelectionEnabled())) {
+        int32_t rc = pme->sendPreviewCallback(stream, previewMemObj, frame->buf_idx);
+        if (NO_ERROR != rc) {
+            LOGE("Preview callback was not sent succesfully");
+        }
+    }
+    stream->bufDone(frame->buf_idx);
+    free(super_frame);
+    LOGH("[KPI Perf]: X");
+}
+
+/*===========================================================================
+ * FUNCTION   : dumpFrameToFile
+ *
+ * DESCRIPTION: helper function to dump jpeg into file for debug purpose.
+ *
+ * PARAMETERS :
+ *    @data : data ptr
+ *    @size : length of data buffer
+ *    @index : identifier for data
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::dumpJpegToFile(const void *data,
+        size_t size, uint32_t index)
+{
+    char value[PROPERTY_VALUE_MAX];
+    property_get("persist.camera.dumpimg", value, "0");
+    uint32_t enabled = (uint32_t) atoi(value);
+    uint32_t frm_num = 0;
+    uint32_t skip_mode = 0;
+
+    char buf[32];
+    cam_dimension_t dim;
+    memset(buf, 0, sizeof(buf));
+    memset(&dim, 0, sizeof(dim));
+
+    if(((enabled & QCAMERA_DUMP_FRM_JPEG) && data) ||
+        ((true == m_bIntJpegEvtPending) && data)) {
+        frm_num = ((enabled & 0xffff0000) >> 16);
+        if(frm_num == 0) {
+            frm_num = 10; //default 10 frames
+        }
+        if(frm_num > 256) {
+            frm_num = 256; //256 buffers cycle around
+        }
+        skip_mode = ((enabled & 0x0000ff00) >> 8);
+        if(skip_mode == 0) {
+            skip_mode = 1; //no-skip
+        }
+
+        if( mDumpSkipCnt % skip_mode == 0) {
+            if((frm_num == 256) && (mDumpFrmCnt >= frm_num)) {
+                // reset frame count if cycling
+                mDumpFrmCnt = 0;
+            }
+            if (mDumpFrmCnt <= frm_num) {
+                snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION "%d_%d.jpg",
+                        mDumpFrmCnt, index);
+                if (true == m_bIntJpegEvtPending) {
+                    strlcpy(m_BackendFileName, buf, QCAMERA_MAX_FILEPATH_LENGTH);
+                    mBackendFileSize = size;
+                }
+
+                int file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+                if (file_fd >= 0) {
+                    ssize_t written_len = write(file_fd, data, size);
+                    fchmod(file_fd, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
+                    LOGH("written number of bytes %zd\n",
+                             written_len);
+                    close(file_fd);
+                } else {
+                    LOGE("fail to open file for image dumping");
+                }
+                if (false == m_bIntJpegEvtPending) {
+                    mDumpFrmCnt++;
+                }
+            }
+        }
+        mDumpSkipCnt++;
+    }
+}
+
+
+void QCamera2HardwareInterface::dumpMetadataToFile(QCameraStream *stream,
+                                                   mm_camera_buf_def_t *frame,char *type)
+{
+    char value[PROPERTY_VALUE_MAX];
+    uint32_t frm_num = 0;
+    metadata_buffer_t *metadata = (metadata_buffer_t *)frame->buffer;
+    property_get("persist.camera.dumpmetadata", value, "0");
+    uint32_t enabled = (uint32_t) atoi(value);
+    if (stream == NULL) {
+        LOGH("No op");
+        return;
+    }
+
+    uint32_t dumpFrmCnt = stream->mDumpMetaFrame;
+    if(enabled){
+        frm_num = ((enabled & 0xffff0000) >> 16);
+        if (frm_num == 0) {
+            frm_num = 10; //default 10 frames
+        }
+        if (frm_num > 256) {
+            frm_num = 256; //256 buffers cycle around
+        }
+        if ((frm_num == 256) && (dumpFrmCnt >= frm_num)) {
+            // reset frame count if cycling
+            dumpFrmCnt = 0;
+        }
+        LOGH("dumpFrmCnt= %u, frm_num = %u", dumpFrmCnt, frm_num);
+        if (dumpFrmCnt < frm_num) {
+            char timeBuf[128];
+            char buf[32];
+            memset(buf, 0, sizeof(buf));
+            memset(timeBuf, 0, sizeof(timeBuf));
+            time_t current_time;
+            struct tm * timeinfo;
+            time (&current_time);
+            timeinfo = localtime (&current_time);
+            if (NULL != timeinfo) {
+                strftime(timeBuf, sizeof(timeBuf),
+                        QCAMERA_DUMP_FRM_LOCATION "%Y%m%d%H%M%S", timeinfo);
+            }
+            String8 filePath(timeBuf);
+            snprintf(buf, sizeof(buf), "%um_%s_%d.bin", dumpFrmCnt, type, frame->frame_idx);
+            filePath.append(buf);
+            int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
+            if (file_fd >= 0) {
+                ssize_t written_len = 0;
+                metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
+                void *data = (void *)((uint8_t *)&metadata->tuning_params.tuning_data_version);
+                written_len += write(file_fd, data, sizeof(uint32_t));
+                data = (void *)((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size);
+                LOGH("tuning_sensor_data_size %d",(int)(*(int *)data));
+                written_len += write(file_fd, data, sizeof(uint32_t));
+                data = (void *)((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size);
+                LOGH("tuning_vfe_data_size %d",(int)(*(int *)data));
+                written_len += write(file_fd, data, sizeof(uint32_t));
+                data = (void *)((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size);
+                LOGH("tuning_cpp_data_size %d",(int)(*(int *)data));
+                written_len += write(file_fd, data, sizeof(uint32_t));
+                data = (void *)((uint8_t *)&metadata->tuning_params.tuning_cac_data_size);
+                LOGH("tuning_cac_data_size %d",(int)(*(int *)data));
+                written_len += write(file_fd, data, sizeof(uint32_t));
+                data = (void *)((uint8_t *)&metadata->tuning_params.tuning_cac_data_size2);
+                LOGH("< skrajago >tuning_cac_data_size %d",(int)(*(int *)data));
+                written_len += write(file_fd, data, sizeof(uint32_t));
+                size_t total_size = metadata->tuning_params.tuning_sensor_data_size;
+                data = (void *)((uint8_t *)&metadata->tuning_params.data);
+                written_len += write(file_fd, data, total_size);
+                total_size = metadata->tuning_params.tuning_vfe_data_size;
+                data = (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]);
+                written_len += write(file_fd, data, total_size);
+                total_size = metadata->tuning_params.tuning_cpp_data_size;
+                data = (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]);
+                written_len += write(file_fd, data, total_size);
+                total_size = metadata->tuning_params.tuning_cac_data_size;
+                data = (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]);
+                written_len += write(file_fd, data, total_size);
+                close(file_fd);
+            }else {
+                LOGE("fail t open file for image dumping");
+            }
+            dumpFrmCnt++;
+        }
+    }
+    stream->mDumpMetaFrame = dumpFrmCnt;
+}
+/*===========================================================================
+ * FUNCTION   : dumpFrameToFile
+ *
+ * DESCRIPTION: helper function to dump frame into file for debug purpose.
+ *
+ * PARAMETERS :
+ *    @data : data ptr
+ *    @size : length of data buffer
+ *    @index : identifier for data
+ *    @dump_type : type of the frame to be dumped. Only such
+ *                 dump type is enabled, the frame will be
+ *                 dumped into a file.
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::dumpFrameToFile(QCameraStream *stream,
+        mm_camera_buf_def_t *frame, uint32_t dump_type, const char *misc)
+{
+    char value[PROPERTY_VALUE_MAX];
+    property_get("persist.camera.dumpimg", value, "0");
+    uint32_t enabled = (uint32_t) atoi(value);
+    uint32_t frm_num = 0;
+    uint32_t skip_mode = 0;
+
+    if (NULL == stream) {
+        LOGE("stream object is null");
+        return;
+    }
+
+    uint32_t dumpFrmCnt = stream->mDumpFrame;
+
+    if (true == m_bIntRawEvtPending) {
+        enabled = QCAMERA_DUMP_FRM_RAW;
+    }
+
+    if((enabled & QCAMERA_DUMP_FRM_MASK_ALL)) {
+        if((enabled & dump_type) && stream && frame) {
+            frm_num = ((enabled & 0xffff0000) >> 16);
+            if(frm_num == 0) {
+                frm_num = 10; //default 10 frames
+            }
+            if(frm_num > 256) {
+                frm_num = 256; //256 buffers cycle around
+            }
+            skip_mode = ((enabled & 0x0000ff00) >> 8);
+            if(skip_mode == 0) {
+                skip_mode = 1; //no-skip
+            }
+            if(stream->mDumpSkipCnt == 0)
+                stream->mDumpSkipCnt = 1;
+
+            if( stream->mDumpSkipCnt % skip_mode == 0) {
+                if((frm_num == 256) && (dumpFrmCnt >= frm_num)) {
+                    // reset frame count if cycling
+                    dumpFrmCnt = 0;
+                }
+                if (dumpFrmCnt <= frm_num) {
+                    char buf[32];
+                    char timeBuf[128];
+                    time_t current_time;
+                    struct tm * timeinfo;
+
+                    memset(timeBuf, 0, sizeof(timeBuf));
+
+                    time (&current_time);
+                    timeinfo = localtime (&current_time);
+                    memset(buf, 0, sizeof(buf));
+
+                    cam_dimension_t dim;
+                    memset(&dim, 0, sizeof(dim));
+                    stream->getFrameDimension(dim);
+
+                    cam_frame_len_offset_t offset;
+                    memset(&offset, 0, sizeof(cam_frame_len_offset_t));
+                    stream->getFrameOffset(offset);
+
+                    if (NULL != timeinfo) {
+                        strftime(timeBuf, sizeof(timeBuf),
+                                QCAMERA_DUMP_FRM_LOCATION "%Y%m%d%H%M%S", timeinfo);
+                    }
+                    String8 filePath(timeBuf);
+                    switch (dump_type) {
+                    case QCAMERA_DUMP_FRM_PREVIEW:
+                        {
+                            snprintf(buf, sizeof(buf), "%dp_%dx%d_%d.yuv",
+                                    dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
+                        }
+                        break;
+                    case QCAMERA_DUMP_FRM_THUMBNAIL:
+                        {
+                            snprintf(buf, sizeof(buf), "%dt_%dx%d_%d.yuv",
+                                    dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
+                        }
+                        break;
+                    case QCAMERA_DUMP_FRM_SNAPSHOT:
+                        {
+                            if (!mParameters.isPostProcScaling()) {
+                                mParameters.getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT, dim);
+                            } else {
+                                stream->getFrameDimension(dim);
+                            }
+                            if (misc != NULL) {
+                                snprintf(buf, sizeof(buf), "%ds_%dx%d_%d_%s.yuv",
+                                        dumpFrmCnt, dim.width, dim.height, frame->frame_idx, misc);
+                            } else {
+                                snprintf(buf, sizeof(buf), "%ds_%dx%d_%d.yuv",
+                                        dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
+                            }
+                        }
+                        break;
+                    case QCAMERA_DUMP_FRM_INPUT_REPROCESS:
+                        {
+                            stream->getFrameDimension(dim);
+                            if (misc != NULL) {
+                                snprintf(buf, sizeof(buf), "%dir_%dx%d_%d_%s.yuv",
+                                        dumpFrmCnt, dim.width, dim.height, frame->frame_idx, misc);
+                            } else {
+                                snprintf(buf, sizeof(buf), "%dir_%dx%d_%d.yuv",
+                                        dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
+                            }
+                        }
+                        break;
+                    case QCAMERA_DUMP_FRM_VIDEO:
+                        {
+                            snprintf(buf, sizeof(buf), "%dv_%dx%d_%d.yuv",
+                                    dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
+                        }
+                        break;
+                    case QCAMERA_DUMP_FRM_RAW:
+                        {
+                            mParameters.getStreamDimension(CAM_STREAM_TYPE_RAW, dim);
+                            snprintf(buf, sizeof(buf), "%dr_%dx%d_%d.raw",
+                                    dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
+                        }
+                        break;
+                    case QCAMERA_DUMP_FRM_JPEG:
+                        {
+                            mParameters.getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT, dim);
+                            snprintf(buf, sizeof(buf), "%dj_%dx%d_%d.yuv",
+                                    dumpFrmCnt, dim.width, dim.height, frame->frame_idx);
+                        }
+                        break;
+                    default:
+                        LOGE("Not supported for dumping stream type %d",
+                               dump_type);
+                        return;
+                    }
+
+                    filePath.append(buf);
+                    int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
+                    ssize_t written_len = 0;
+                    if (file_fd >= 0) {
+                        void *data = NULL;
+
+                        fchmod(file_fd, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
+                        for (uint32_t i = 0; i < offset.num_planes; i++) {
+                            uint32_t index = offset.mp[i].offset;
+                            if (i > 0) {
+                                index += offset.mp[i-1].len;
+                            }
+
+                            if (offset.mp[i].meta_len != 0) {
+                                data = (void *)((uint8_t *)frame->buffer + index);
+                                written_len += write(file_fd, data,
+                                        (size_t)offset.mp[i].meta_len);
+                                index += (uint32_t)offset.mp[i].meta_len;
+                            }
+
+                            for (int j = 0; j < offset.mp[i].height; j++) {
+                                data = (void *)((uint8_t *)frame->buffer + index);
+                                written_len += write(file_fd, data,
+                                        (size_t)offset.mp[i].width);
+                                index += (uint32_t)offset.mp[i].stride;
+                            }
+                        }
+
+                        LOGH("written number of bytes %ld\n",
+                             written_len);
+                        close(file_fd);
+                    } else {
+                        LOGE("fail to open file for image dumping");
+                    }
+                    if (true == m_bIntRawEvtPending) {
+                        strlcpy(m_BackendFileName, filePath.string(), QCAMERA_MAX_FILEPATH_LENGTH);
+                        mBackendFileSize = (size_t)written_len;
+                    } else {
+                        dumpFrmCnt++;
+                    }
+                }
+            }
+            stream->mDumpSkipCnt++;
+        }
+    } else {
+        dumpFrmCnt = 0;
+    }
+    stream->mDumpFrame = dumpFrmCnt;
+}
+
+/*===========================================================================
+ * FUNCTION   : debugShowVideoFPS
+ *
+ * DESCRIPTION: helper function to log video frame FPS for debug purpose.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::debugShowVideoFPS()
+{
+    mVFrameCount++;
+    nsecs_t now = systemTime();
+    nsecs_t diff = now - mVLastFpsTime;
+    if (diff > ms2ns(250)) {
+        mVFps = (((double)(mVFrameCount - mVLastFrameCount)) *
+                (double)(s2ns(1))) / (double)diff;
+        LOGI("[KPI Perf]: PROFILE_VIDEO_FRAMES_PER_SECOND: %.4f Cam ID = %d",
+                mVFps, mCameraId);
+        mVLastFpsTime = now;
+        mVLastFrameCount = mVFrameCount;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : debugShowPreviewFPS
+ *
+ * DESCRIPTION: helper function to log preview frame FPS for debug purpose.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::debugShowPreviewFPS()
+{
+    mPFrameCount++;
+    nsecs_t now = systemTime();
+    nsecs_t diff = now - mPLastFpsTime;
+    if (diff > ms2ns(250)) {
+        mPFps = (((double)(mPFrameCount - mPLastFrameCount)) *
+                (double)(s2ns(1))) / (double)diff;
+        LOGI("[KPI Perf]: PROFILE_PREVIEW_FRAMES_PER_SECOND : %.4f Cam ID = %d",
+                 mPFps, mCameraId);
+        mPLastFpsTime = now;
+        mPLastFrameCount = mPFrameCount;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : fillFacesData
+ *
+ * DESCRIPTION: helper function to fill in face related metadata into a struct.
+ *
+ * PARAMETERS :
+ *   @faces_data : face features data to be filled
+ *   @metadata   : metadata structure to read face features from
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::fillFacesData(cam_faces_data_t &faces_data,
+        metadata_buffer_t *metadata)
+{
+    memset(&faces_data, 0, sizeof(cam_faces_data_t));
+
+    IF_META_AVAILABLE(cam_face_detection_data_t, p_detection_data,
+            CAM_INTF_META_FACE_DETECTION, metadata) {
+        faces_data.detection_data = *p_detection_data;
+        if (faces_data.detection_data.num_faces_detected > MAX_ROI) {
+            faces_data.detection_data.num_faces_detected = MAX_ROI;
+        }
+
+        LOGH("[KPI Perf] PROFILE_NUMBER_OF_FACES_DETECTED %d",
+                faces_data.detection_data.num_faces_detected);
+
+        IF_META_AVAILABLE(cam_face_recog_data_t, p_recog_data,
+                CAM_INTF_META_FACE_RECOG, metadata) {
+            faces_data.recog_valid = true;
+            faces_data.recog_data = *p_recog_data;
+        }
+
+        IF_META_AVAILABLE(cam_face_blink_data_t, p_blink_data,
+                CAM_INTF_META_FACE_BLINK, metadata) {
+            faces_data.blink_valid = true;
+            faces_data.blink_data = *p_blink_data;
+        }
+
+        IF_META_AVAILABLE(cam_face_gaze_data_t, p_gaze_data,
+                CAM_INTF_META_FACE_GAZE, metadata) {
+            faces_data.gaze_valid = true;
+            faces_data.gaze_data = *p_gaze_data;
+        }
+
+        IF_META_AVAILABLE(cam_face_smile_data_t, p_smile_data,
+                CAM_INTF_META_FACE_SMILE, metadata) {
+            faces_data.smile_valid = true;
+            faces_data.smile_data = *p_smile_data;
+        }
+
+        IF_META_AVAILABLE(cam_face_landmarks_data_t, p_landmarks,
+                CAM_INTF_META_FACE_LANDMARK, metadata) {
+            faces_data.landmark_valid = true;
+            faces_data.landmark_data = *p_landmarks;
+        }
+
+        IF_META_AVAILABLE(cam_face_contour_data_t, p_contour,
+                CAM_INTF_META_FACE_CONTOUR, metadata) {
+            faces_data.contour_valid = true;
+            faces_data.contour_data = *p_contour;
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraCbNotifier
+ *
+ * DESCRIPTION: Destructor for exiting the callback context.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraCbNotifier::~QCameraCbNotifier()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : exit
+ *
+ * DESCRIPTION: exit notify thread.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraCbNotifier::exit()
+{
+    mActive = false;
+    mProcTh.exit();
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseNotifications
+ *
+ * DESCRIPTION: callback for releasing data stored in the callback queue.
+ *
+ * PARAMETERS :
+ *   @data      : data to be released
+ *   @user_data : context data
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraCbNotifier::releaseNotifications(void *data, void *user_data)
+{
+    qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
+
+    if ( ( NULL != arg ) && ( NULL != user_data ) ) {
+        if ( arg->release_cb ) {
+            arg->release_cb(arg->user_data, arg->cookie, FAILED_TRANSACTION);
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : matchSnapshotNotifications
+ *
+ * DESCRIPTION: matches snapshot data callbacks
+ *
+ * PARAMETERS :
+ *   @data      : data to match
+ *   @user_data : context data
+ *
+ * RETURN     : bool match
+ *              true - match found
+ *              false- match not found
+ *==========================================================================*/
+bool QCameraCbNotifier::matchSnapshotNotifications(void *data,
+                                                   void */*user_data*/)
+{
+    qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
+    if ( NULL != arg ) {
+        if ( QCAMERA_DATA_SNAPSHOT_CALLBACK == arg->cb_type ) {
+            return true;
+        }
+    }
+
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : matchPreviewNotifications
+ *
+ * DESCRIPTION: matches preview data callbacks
+ *
+ * PARAMETERS :
+ *   @data      : data to match
+ *   @user_data : context data
+ *
+ * RETURN     : bool match
+ *              true - match found
+ *              false- match not found
+ *==========================================================================*/
+bool QCameraCbNotifier::matchPreviewNotifications(void *data,
+        void */*user_data*/)
+{
+    qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
+    if (NULL != arg) {
+        if ((QCAMERA_DATA_CALLBACK == arg->cb_type) &&
+                (CAMERA_MSG_PREVIEW_FRAME == arg->msg_type)) {
+            return true;
+        }
+    }
+
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : matchTimestampNotifications
+ *
+ * DESCRIPTION: matches timestamp data callbacks
+ *
+ * PARAMETERS :
+ *   @data      : data to match
+ *   @user_data : context data
+ *
+ * RETURN     : bool match
+ *              true - match found
+ *              false- match not found
+ *==========================================================================*/
+bool QCameraCbNotifier::matchTimestampNotifications(void *data,
+        void */*user_data*/)
+{
+    qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
+    if (NULL != arg) {
+        if ((QCAMERA_DATA_TIMESTAMP_CALLBACK == arg->cb_type) &&
+                (CAMERA_MSG_VIDEO_FRAME == arg->msg_type)) {
+            return true;
+        }
+    }
+
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : cbNotifyRoutine
+ *
+ * DESCRIPTION: callback thread which interfaces with the upper layers
+ *              given input commands.
+ *
+ * PARAMETERS :
+ *   @data    : context data
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void * QCameraCbNotifier::cbNotifyRoutine(void * data)
+{
+    int running = 1;
+    int ret;
+    QCameraCbNotifier *pme = (QCameraCbNotifier *)data;
+    QCameraCmdThread *cmdThread = &pme->mProcTh;
+    cmdThread->setName("CAM_cbNotify");
+    uint8_t isSnapshotActive = FALSE;
+    bool longShotEnabled = false;
+    uint32_t numOfSnapshotExpected = 0;
+    uint32_t numOfSnapshotRcvd = 0;
+    int32_t cbStatus = NO_ERROR;
+
+    LOGD("E");
+    do {
+        do {
+            ret = cam_sem_wait(&cmdThread->cmd_sem);
+            if (ret != 0 && errno != EINVAL) {
+                LOGD("cam_sem_wait error (%s)",
+                            strerror(errno));
+                return NULL;
+            }
+        } while (ret != 0);
+
+        camera_cmd_type_t cmd = cmdThread->getCmd();
+        LOGD("get cmd %d", cmd);
+        switch (cmd) {
+        case CAMERA_CMD_TYPE_START_DATA_PROC:
+            {
+                isSnapshotActive = TRUE;
+                numOfSnapshotExpected = pme->mParent->numOfSnapshotsExpected();
+                longShotEnabled = pme->mParent->isLongshotEnabled();
+                LOGD("Num Snapshots Expected = %d",
+                       numOfSnapshotExpected);
+                numOfSnapshotRcvd = 0;
+            }
+            break;
+        case CAMERA_CMD_TYPE_STOP_DATA_PROC:
+            {
+                pme->mDataQ.flushNodes(matchSnapshotNotifications);
+                isSnapshotActive = FALSE;
+
+                numOfSnapshotExpected = 0;
+                numOfSnapshotRcvd = 0;
+            }
+            break;
+        case CAMERA_CMD_TYPE_DO_NEXT_JOB:
+            {
+                qcamera_callback_argm_t *cb =
+                    (qcamera_callback_argm_t *)pme->mDataQ.dequeue();
+                cbStatus = NO_ERROR;
+                if (NULL != cb) {
+                    LOGD("cb type %d received",
+                              cb->cb_type);
+
+                    if (pme->mParent->msgTypeEnabledWithLock(cb->msg_type)) {
+                        switch (cb->cb_type) {
+                        case QCAMERA_NOTIFY_CALLBACK:
+                            {
+                                if (cb->msg_type == CAMERA_MSG_FOCUS) {
+                                    KPI_ATRACE_INT("Camera:AutoFocus", 0);
+                                    LOGH("[KPI Perf] : PROFILE_SENDING_FOCUS_EVT_TO APP");
+                                }
+                                if (pme->mNotifyCb) {
+                                    pme->mNotifyCb(cb->msg_type,
+                                                  cb->ext1,
+                                                  cb->ext2,
+                                                  pme->mCallbackCookie);
+                                } else {
+                                    LOGW("notify callback not set!");
+                                }
+                                if (cb->release_cb) {
+                                    cb->release_cb(cb->user_data, cb->cookie,
+                                            cbStatus);
+                                }
+                            }
+                            break;
+                        case QCAMERA_DATA_CALLBACK:
+                            {
+                                if (pme->mDataCb) {
+                                    pme->mDataCb(cb->msg_type,
+                                                 cb->data,
+                                                 cb->index,
+                                                 cb->metadata,
+                                                 pme->mCallbackCookie);
+                                } else {
+                                    LOGW("data callback not set!");
+                                }
+                                if (cb->release_cb) {
+                                    cb->release_cb(cb->user_data, cb->cookie,
+                                            cbStatus);
+                                }
+                            }
+                            break;
+                        case QCAMERA_DATA_TIMESTAMP_CALLBACK:
+                            {
+                                if(pme->mDataCbTimestamp) {
+                                    pme->mDataCbTimestamp(cb->timestamp,
+                                                          cb->msg_type,
+                                                          cb->data,
+                                                          cb->index,
+                                                          pme->mCallbackCookie);
+                                } else {
+                                    LOGE("Timestamp data callback not set!");
+                                }
+                                if (cb->release_cb) {
+                                    cb->release_cb(cb->user_data, cb->cookie,
+                                            cbStatus);
+                                }
+                            }
+                            break;
+                        case QCAMERA_DATA_SNAPSHOT_CALLBACK:
+                            {
+                                if (TRUE == isSnapshotActive && pme->mDataCb ) {
+                                    if (!longShotEnabled) {
+                                        numOfSnapshotRcvd++;
+                                        LOGI("Num Snapshots Received = %d Expected = %d",
+                                                numOfSnapshotRcvd, numOfSnapshotExpected);
+                                        if (numOfSnapshotExpected > 0 &&
+                                           (numOfSnapshotExpected == numOfSnapshotRcvd)) {
+                                            LOGI("Received all snapshots");
+                                            // notify HWI that snapshot is done
+                                            pme->mParent->processSyncEvt(QCAMERA_SM_EVT_SNAPSHOT_DONE,
+                                                                         NULL);
+                                        }
+                                    }
+                                    if (pme->mJpegCb) {
+                                        LOGI("Calling JPEG Callback!! for camera %d"
+                                                "release_data %p",
+                                                "frame_idx %d",
+                                                 pme->mParent->getCameraId(),
+                                                cb->user_data,
+                                                cb->frame_index);
+                                        pme->mJpegCb(cb->msg_type, cb->data,
+                                                cb->index, cb->metadata,
+                                                pme->mJpegCallbackCookie,
+                                                cb->frame_index, cb->release_cb,
+                                                cb->cookie, cb->user_data);
+                                        // incase of non-null Jpeg cb we transfer
+                                        // ownership of buffer to muxer. hence
+                                        // release_cb should not be called
+                                        // muxer will release after its done with
+                                        // processing the buffer
+                                    } else if(pme->mDataCb){
+                                        pme->mDataCb(cb->msg_type, cb->data, cb->index,
+                                                cb->metadata, pme->mCallbackCookie);
+                                        if (cb->release_cb) {
+                                            cb->release_cb(cb->user_data, cb->cookie,
+                                                    cbStatus);
+                                        }
+                                    }
+                                }
+                            }
+                            break;
+                        default:
+                            {
+                                LOGE("invalid cb type %d",
+                                          cb->cb_type);
+                                cbStatus = BAD_VALUE;
+                                if (cb->release_cb) {
+                                    cb->release_cb(cb->user_data, cb->cookie,
+                                            cbStatus);
+                                }
+                            }
+                            break;
+                        };
+                    } else {
+                        LOGW("cb message type %d not enabled!",
+                                  cb->msg_type);
+                        cbStatus = INVALID_OPERATION;
+                        if (cb->release_cb) {
+                            cb->release_cb(cb->user_data, cb->cookie, cbStatus);
+                        }
+                    }
+                    delete cb;
+                } else {
+                    LOGW("invalid cb type passed");
+                }
+            }
+            break;
+        case CAMERA_CMD_TYPE_EXIT:
+            {
+                running = 0;
+                pme->mDataQ.flush();
+            }
+            break;
+        default:
+            break;
+        }
+    } while (running);
+    LOGD("X");
+
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : notifyCallback
+ *
+ * DESCRIPTION: Enqueus pending callback notifications for the upper layers.
+ *
+ * PARAMETERS :
+ *   @cbArgs  : callback arguments
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCbNotifier::notifyCallback(qcamera_callback_argm_t &cbArgs)
+{
+    if (!mActive) {
+        LOGE("notify thread is not active");
+        return UNKNOWN_ERROR;
+    }
+
+    qcamera_callback_argm_t *cbArg = new qcamera_callback_argm_t();
+    if (NULL == cbArg) {
+        LOGE("no mem for qcamera_callback_argm_t");
+        return NO_MEMORY;
+    }
+    memset(cbArg, 0, sizeof(qcamera_callback_argm_t));
+    *cbArg = cbArgs;
+
+    if (mDataQ.enqueue((void *)cbArg)) {
+        return mProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+    } else {
+        LOGE("Error adding cb data into queue");
+        delete cbArg;
+        return UNKNOWN_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setCallbacks
+ *
+ * DESCRIPTION: Initializes the callback functions, which would be used for
+ *              communication with the upper layers and launches the callback
+ *              context in which the callbacks will occur.
+ *
+ * PARAMETERS :
+ *   @notifyCb          : notification callback
+ *   @dataCb            : data callback
+ *   @dataCbTimestamp   : data with timestamp callback
+ *   @callbackCookie    : callback context data
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraCbNotifier::setCallbacks(camera_notify_callback notifyCb,
+                                     camera_data_callback dataCb,
+                                     camera_data_timestamp_callback dataCbTimestamp,
+                                     void *callbackCookie)
+{
+    if ( ( NULL == mNotifyCb ) &&
+         ( NULL == mDataCb ) &&
+         ( NULL == mDataCbTimestamp ) &&
+         ( NULL == mCallbackCookie ) ) {
+        mNotifyCb = notifyCb;
+        mDataCb = dataCb;
+        mDataCbTimestamp = dataCbTimestamp;
+        mCallbackCookie = callbackCookie;
+        mActive = true;
+        mProcTh.launch(cbNotifyRoutine, this);
+    } else {
+        LOGE("Camera callback notifier already initialized!");
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setJpegCallBacks
+ *
+ * DESCRIPTION: Initializes the JPEG callback function, which would be used for
+ *              communication with the upper layers and launches the callback
+ *              context in which the callbacks will occur.
+ *
+ * PARAMETERS :
+ *   @jpegCb          : notification callback
+ *   @callbackCookie    : callback context data
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraCbNotifier::setJpegCallBacks(
+        jpeg_data_callback jpegCb, void *callbackCookie)
+{
+    LOGH("Setting JPEG Callback notifier");
+    mJpegCb        = jpegCb;
+    mJpegCallbackCookie  = callbackCookie;
+}
+
+/*===========================================================================
+ * FUNCTION   : flushPreviewNotifications
+ *
+ * DESCRIPTION: flush all pending preview notifications
+ *              from the notifier queue
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCbNotifier::flushPreviewNotifications()
+{
+    if (!mActive) {
+        LOGE("notify thread is not active");
+        return UNKNOWN_ERROR;
+    }
+    mDataQ.flushNodes(matchPreviewNotifications);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : flushVideoNotifications
+ *
+ * DESCRIPTION: flush all pending video notifications
+ *              from the notifier queue
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCbNotifier::flushVideoNotifications()
+{
+    if (!mActive) {
+        LOGE("notify thread is not active");
+        return UNKNOWN_ERROR;
+    }
+    mDataQ.flushNodes(matchTimestampNotifications);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : startSnapshots
+ *
+ * DESCRIPTION: Enables snapshot mode
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCbNotifier::startSnapshots()
+{
+    return mProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC, FALSE, TRUE);
+}
+
+/*===========================================================================
+ * FUNCTION   : stopSnapshots
+ *
+ * DESCRIPTION: Disables snapshot processing mode
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraCbNotifier::stopSnapshots()
+{
+    mProcTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, FALSE, TRUE);
+}
+
+}; // namespace qcamera
diff --git a/msmcobalt/QCamera2/HAL/QCameraAllocator.h b/msmcobalt/QCamera2/HAL/QCameraAllocator.h
new file mode 100644
index 0000000..ca15a6a
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/QCameraAllocator.h
@@ -0,0 +1,63 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_ALLOCATOR__
+#define __QCAMERA_ALLOCATOR__
+
+extern "C" {
+#include "mm_camera_interface.h"
+}
+
+namespace qcamera {
+
+class QCameraMemory;
+class QCameraHeapMemory;
+
+typedef struct {
+    int32_t (*bgFunction) (void *);
+    void* bgArgs;
+} BackgroundTask;
+
+class QCameraAllocator {
+public:
+    virtual QCameraMemory *allocateStreamBuf(cam_stream_type_t stream_type,
+            size_t size, int stride, int scanline, uint8_t &bufferCnt) = 0;
+    virtual int32_t allocateMoreStreamBuf(QCameraMemory *mem_obj,
+            size_t size, uint8_t &bufferCnt) = 0;
+    virtual QCameraHeapMemory *allocateStreamInfoBuf(cam_stream_type_t stream_type) = 0;
+    virtual QCameraHeapMemory *allocateMiscBuf(cam_stream_info_t *streamInfo) = 0;
+    virtual QCameraMemory *allocateStreamUserBuf(cam_stream_info_t *streamInfo) = 0;
+    virtual void waitForDeferredAlloc(cam_stream_type_t stream_type) = 0;
+    virtual uint32_t scheduleBackgroundTask(BackgroundTask* bgTask) = 0;
+    virtual int32_t waitForBackgroundTask(uint32_t &taskId) = 0;
+    virtual ~QCameraAllocator() {}
+};
+
+}; /* namespace qcamera */
+#endif /* __QCAMERA_ALLOCATOR__ */
diff --git a/msmcobalt/QCamera2/HAL/QCameraChannel.cpp b/msmcobalt/QCamera2/HAL/QCameraChannel.cpp
new file mode 100644
index 0000000..b5a59ef
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/QCameraChannel.cpp
@@ -0,0 +1,1600 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCameraChannel"
+
+// System dependencies
+#include <utils/Errors.h>
+
+// Camera dependencies
+#include "QCamera2HWI.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+using namespace android;
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : QCameraChannel
+ *
+ * DESCRIPTION: constrcutor of QCameraChannel
+ *
+ * PARAMETERS :
+ *   @cam_handle : camera handle
+ *   @cam_ops    : ptr to camera ops table
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraChannel::QCameraChannel(uint32_t cam_handle,
+                               mm_camera_ops_t *cam_ops)
+{
+    m_camHandle = cam_handle;
+    m_camOps = cam_ops;
+    m_bIsActive = false;
+    m_bAllowDynBufAlloc = false;
+
+    m_handle = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraChannel
+ *
+ * DESCRIPTION: default constrcutor of QCameraChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraChannel::QCameraChannel()
+{
+    m_camHandle = 0;
+    m_camOps = NULL;
+    m_bIsActive = false;
+
+    m_handle = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraChannel
+ *
+ * DESCRIPTION: destructor of QCameraChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraChannel::~QCameraChannel()
+{
+    if (m_bIsActive) {
+        stop();
+    }
+    for (size_t i = 0; i < mStreams.size(); i++) {
+        if (mStreams[i] != NULL) {
+                if (m_handle == mStreams[i]->getChannelHandle()) {
+                    delete mStreams[i];
+                }
+        }
+    }
+    mStreams.clear();
+    m_camOps->delete_channel(m_camHandle, m_handle);
+    m_handle = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : deleteChannel
+ *
+ * DESCRIPTION: deletes a camera channel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraChannel::deleteChannel()
+{
+    if (m_bIsActive) {
+        stop();
+    }
+    for (size_t i = 0; i < mStreams.size(); i++) {
+        if ((mStreams[i] != NULL) && (m_handle == mStreams[i]->getChannelHandle())) {
+            mStreams[i]->deleteStream();
+        }
+    }
+    m_camOps->delete_channel(m_camHandle, m_handle);
+}
+
+/*===========================================================================
+ * FUNCTION   : setStreamSyncCB
+ *
+ * DESCRIPTION: reg callback function to stream of stream type
+ *
+ * PARAMETERS :
+ *    @stream_type : Stream type for which callback needs to be registered.
+ *    @stream_cb   : Callback function
+
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              non-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::setStreamSyncCB (cam_stream_type_t stream_type,
+        stream_cb_routine stream_cb)
+{
+    int32_t rc = UNKNOWN_ERROR;
+    for (size_t i = 0; i < mStreams.size(); i++) {
+        if ((mStreams[i] != NULL) && (stream_type == mStreams[i]->getMyType())) {
+            rc = mStreams[i]->setSyncDataCB(stream_cb);
+            break;
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : init
+ *
+ * DESCRIPTION: initialization of channel
+ *
+ * PARAMETERS :
+ *   @attr    : channel bundle attribute setting
+ *   @dataCB  : data notify callback
+ *   @userData: user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::init(mm_camera_channel_attr_t *attr,
+                             mm_camera_buf_notify_t dataCB,
+                             void *userData)
+{
+    m_handle = m_camOps->add_channel(m_camHandle,
+                                      attr,
+                                      dataCB,
+                                      userData);
+    if (m_handle == 0) {
+        LOGE("Add channel failed");
+        return UNKNOWN_ERROR;
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : addStream
+ *
+ * DESCRIPTION: add a stream into channel
+ *
+ * PARAMETERS :
+ *   @allocator      : stream related buffer allocator
+ *   @streamInfoBuf  : ptr to buf that contains stream info
+ *   @miscBuf        : ptr to buf that contains misc buffers
+ *   @minStreamBufNum: number of stream buffers needed
+ *   @paddingInfo    : padding information
+ *   @stream_cb      : stream data notify callback
+ *   @userdata       : user data ptr
+ *   @bDynAllocBuf   : flag indicating if allow allocate buffers in 2 steps
+ *   @online_rotation: rotation applied online
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::addStream(QCameraAllocator &allocator,
+        QCameraHeapMemory *streamInfoBuf, QCameraHeapMemory *miscBuf,
+        uint8_t minStreamBufNum, cam_padding_info_t *paddingInfo,
+        stream_cb_routine stream_cb, void *userdata, bool bDynAllocBuf,
+        bool bDeffAlloc, cam_rotation_t online_rotation)
+{
+    int32_t rc = NO_ERROR;
+    if (mStreams.size() >= MAX_STREAM_NUM_IN_BUNDLE) {
+        LOGE("stream number (%zu) exceeds max limit (%d)",
+               mStreams.size(), MAX_STREAM_NUM_IN_BUNDLE);
+        if (streamInfoBuf != NULL) {
+            streamInfoBuf->deallocate();
+            delete streamInfoBuf;
+            streamInfoBuf = NULL;
+        }
+        return BAD_VALUE;
+    }
+    QCameraStream *pStream = new QCameraStream(allocator,
+            m_camHandle, m_handle, m_camOps, paddingInfo, bDeffAlloc,
+            online_rotation);
+    if (pStream == NULL) {
+        LOGE("No mem for Stream");
+        if (streamInfoBuf != NULL) {
+            streamInfoBuf->deallocate();
+            delete streamInfoBuf;
+            streamInfoBuf = NULL;
+        }
+        return NO_MEMORY;
+    }
+
+    rc = pStream->init(streamInfoBuf, miscBuf, minStreamBufNum,
+                       stream_cb, userdata, bDynAllocBuf);
+    if (rc == 0) {
+        mStreams.add(pStream);
+    } else {
+        delete pStream;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : linkStream
+ *
+ * DESCRIPTION: link a stream into channel
+ *
+ * PARAMETERS :
+ *   @ch      : Channel which the stream belongs to
+ *   @stream  : Stream which needs to be linked
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::linkStream(QCameraChannel *ch, QCameraStream *stream)
+{
+    int32_t rc = NO_ERROR;
+
+    if ((0 == m_handle) || (NULL == ch) || (NULL == stream)) {
+        return NO_INIT;
+    }
+
+    int32_t handle = m_camOps->link_stream(m_camHandle,
+            ch->getMyHandle(),
+            stream->getMyHandle(),
+            m_handle);
+    if (0 == handle) {
+        LOGE("Linking of stream failed");
+        rc = INVALID_OPERATION;
+    } else {
+        mStreams.add(stream);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : start
+ *
+ * DESCRIPTION: start channel, which will start all streams belong to this channel
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::start()
+{
+    int32_t rc = NO_ERROR;
+
+    if(m_bIsActive) {
+        LOGW("Attempt to start active channel");
+        return rc;
+    }
+    if (mStreams.size() > 1) {
+        // there is more than one stream in the channel
+        // we need to notify mctl that all streams in this channel need to be bundled
+        cam_bundle_config_t bundleInfo;
+        memset(&bundleInfo, 0, sizeof(bundleInfo));
+        rc = m_camOps->get_bundle_info(m_camHandle, m_handle, &bundleInfo);
+        if (rc != NO_ERROR) {
+            LOGE("get_bundle_info failed");
+            return rc;
+        }
+        if (bundleInfo.num_of_streams > 1) {
+            for (int i = 0; i < bundleInfo.num_of_streams; i++) {
+                QCameraStream *pStream = getStreamByServerID(bundleInfo.stream_ids[i]);
+                if (pStream != NULL) {
+                    if ((pStream->isTypeOf(CAM_STREAM_TYPE_METADATA))
+                            || (pStream->isTypeOf(CAM_STREAM_TYPE_OFFLINE_PROC))) {
+                        // Skip metadata for reprocess now because PP module cannot handle meta data
+                        // May need furthur discussion if Imaginglib need meta data
+                        continue;
+                    }
+
+                    cam_stream_parm_buffer_t param;
+                    memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+                    param.type = CAM_STREAM_PARAM_TYPE_SET_BUNDLE_INFO;
+                    param.bundleInfo = bundleInfo;
+                    rc = pStream->setParameter(param);
+                    if (rc != NO_ERROR) {
+                        LOGE("stream setParameter for set bundle failed");
+                        return rc;
+                    }
+                }
+            }
+        }
+    }
+
+    for (size_t i = 0; i < mStreams.size(); i++) {
+        if ((mStreams[i] != NULL) &&
+                (m_handle == mStreams[i]->getChannelHandle())) {
+            mStreams[i]->start();
+        }
+    }
+    rc = m_camOps->start_channel(m_camHandle, m_handle);
+
+    if (rc != NO_ERROR) {
+        for (size_t i = 0; i < mStreams.size(); i++) {
+            if ((mStreams[i] != NULL) &&
+                    (m_handle == mStreams[i]->getChannelHandle())) {
+                mStreams[i]->stop();
+            }
+        }
+    } else {
+        m_bIsActive = true;
+        for (size_t i = 0; i < mStreams.size(); i++) {
+            if (mStreams[i] != NULL) {
+                mStreams[i]->cond_signal();
+            }
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop
+ *
+ * DESCRIPTION: stop a channel, which will stop all streams belong to this channel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::stop()
+{
+    int32_t rc = NO_ERROR;
+    size_t i = 0;
+
+    if (!m_bIsActive) {
+        return NO_INIT;
+    }
+
+    while(i < mStreams.size()) {
+        if (mStreams[i] != NULL) {
+            if (m_handle == mStreams[i]->getChannelHandle()) {
+                mStreams[i]->stop();
+                i++;
+            } else {
+                // Remove linked stream from stream list
+                mStreams.removeAt(i);
+            }
+        }
+    }
+
+    rc = m_camOps->stop_channel(m_camHandle, m_handle);
+
+    m_bIsActive = false;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : bufDone
+ *
+ * DESCRIPTION: return a stream buf back to kernel
+ *
+ * PARAMETERS :
+ *   @recvd_frame  : stream buf frame to be returned
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::bufDone(mm_camera_super_buf_t *recvd_frame)
+{
+    int32_t rc = NO_ERROR;
+    for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
+        if (recvd_frame->bufs[i] != NULL) {
+            for (size_t j = 0; j < mStreams.size(); j++) {
+                if (mStreams[j] != NULL &&
+                        mStreams[j]->getMyHandle() == recvd_frame->bufs[i]->stream_id) {
+                    rc = mStreams[j]->bufDone(recvd_frame->bufs[i]->buf_idx);
+                    break; // break loop j
+                }
+            }
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : bufDone
+ *
+ * DESCRIPTION: return specified buffer from super buffer to kernel
+ *
+ * PARAMETERS :
+ *   @recvd_frame  : stream buf frame to be returned
+ *   @stream_id      : stream ID of the buffer to be released
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::bufDone(mm_camera_super_buf_t *recvd_frame, uint32_t stream_id)
+{
+    int32_t rc = NO_ERROR;
+    int32_t index;
+    for (int32_t i = 0; i < (int32_t)recvd_frame->num_bufs; i++) {
+        index = -1;
+        if ((recvd_frame->bufs[i] != NULL) &&
+                (recvd_frame->bufs[i]->stream_id == stream_id)) {
+            for (size_t j = 0; j < mStreams.size(); j++) {
+                if ((mStreams[j] != NULL) &&
+                        (mStreams[j]->getMyHandle() == stream_id)) {
+                    rc = mStreams[j]->bufDone(recvd_frame->bufs[i]->buf_idx);
+                    index = i;
+                    break; // break loop j
+                }
+            }
+            if ((index >= 0) && (index < (int32_t)recvd_frame->num_bufs)) {
+                for (int32_t j = index; j < (int32_t)(recvd_frame->num_bufs - 1); j++) {
+                    recvd_frame->bufs[j] = recvd_frame->bufs[j + 1];
+                }
+                recvd_frame->num_bufs--;
+                i--;
+            }
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : processZoomDone
+ *
+ * DESCRIPTION: process zoom done event
+ *
+ * PARAMETERS :
+ *   @previewWindoe : ptr to preview window ops table, needed to set preview
+ *                    crop information
+ *   @crop_info     : crop info as a result of zoom operation
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::processZoomDone(preview_stream_ops_t *previewWindow,
+                                        cam_crop_data_t &crop_info)
+{
+    int32_t rc = NO_ERROR;
+    for (size_t i = 0; i < mStreams.size(); i++) {
+        if ((mStreams[i] != NULL) &&
+                (m_handle == mStreams[i]->getChannelHandle())) {
+            rc = mStreams[i]->processZoomDone(previewWindow, crop_info);
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamByHandle
+ *
+ * DESCRIPTION: return stream object by stream handle
+ *
+ * PARAMETERS :
+ *   @streamHandle : stream handle
+ *
+ * RETURN     : stream object. NULL if not found
+ *==========================================================================*/
+QCameraStream *QCameraChannel::getStreamByHandle(uint32_t streamHandle)
+{
+    for (size_t i = 0; i < mStreams.size(); i++) {
+        if (mStreams[i] != NULL && mStreams[i]->getMyHandle() == streamHandle) {
+            return mStreams[i];
+        }
+    }
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamByServerID
+ *
+ * DESCRIPTION: return stream object by stream server ID from daemon
+ *
+ * PARAMETERS :
+ *   @serverID : stream server ID
+ *
+ * RETURN     : stream object. NULL if not found
+ *==========================================================================*/
+QCameraStream *QCameraChannel::getStreamByServerID(uint32_t serverID)
+{
+    for (size_t i = 0; i < mStreams.size(); i++) {
+        if (mStreams[i] != NULL && mStreams[i]->getMyServerID() == serverID) {
+            return mStreams[i];
+        }
+    }
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamByIndex
+ *
+ * DESCRIPTION: return stream object by index of streams in the channel
+ *
+ * PARAMETERS :
+ *   @index : index of stream in the channel
+ *
+ * RETURN     : stream object. NULL if not found
+ *==========================================================================*/
+QCameraStream *QCameraChannel::getStreamByIndex(uint32_t index)
+{
+    if (index >= MAX_STREAM_NUM_IN_BUNDLE) {
+        return NULL;
+    }
+
+    if (index < mStreams.size()) {
+        return mStreams[index];
+    }
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : UpdateStreamBasedParameters
+ *
+ * DESCRIPTION: update any stream based settings from parameters
+ *
+ * PARAMETERS :
+ *   @param   : reference to parameters object
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::UpdateStreamBasedParameters(QCameraParametersIntf &param)
+{
+    int32_t rc = NO_ERROR;
+    if (param.isPreviewFlipChanged()) {
+        // try to find preview stream
+        for (size_t i = 0; i < mStreams.size(); i++) {
+            if ((mStreams[i] != NULL) &&
+                    (m_handle == mStreams[i]->getChannelHandle()) &&
+                    (mStreams[i]->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+                    (mStreams[i]->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW))) ) {
+                cam_stream_parm_buffer_t param_buf;
+                memset(&param_buf, 0, sizeof(cam_stream_parm_buffer_t));
+                param_buf.type = CAM_STREAM_PARAM_TYPE_SET_FLIP;
+                param_buf.flipInfo.flip_mask =
+                        (uint32_t)param.getFlipMode(CAM_STREAM_TYPE_PREVIEW);
+                rc = mStreams[i]->setParameter(param_buf);
+                if (rc != NO_ERROR) {
+                    LOGW("set preview stream flip failed");
+                }
+            }
+        }
+    }
+    if (param.isVideoFlipChanged()) {
+        // try to find video stream
+        for (size_t i = 0; i < mStreams.size(); i++) {
+            if ((mStreams[i] != NULL) &&
+                    (m_handle == mStreams[i]->getChannelHandle()) &&
+                    (mStreams[i]->isTypeOf(CAM_STREAM_TYPE_VIDEO) ||
+                    (mStreams[i]->isOrignalTypeOf(CAM_STREAM_TYPE_VIDEO))) ) {
+                cam_stream_parm_buffer_t param_buf;
+                memset(&param_buf, 0, sizeof(cam_stream_parm_buffer_t));
+                param_buf.type = CAM_STREAM_PARAM_TYPE_SET_FLIP;
+                param_buf.flipInfo.flip_mask =
+                        (uint32_t)param.getFlipMode(CAM_STREAM_TYPE_VIDEO);
+                rc = mStreams[i]->setParameter(param_buf);
+                if (rc != NO_ERROR) {
+                    LOGW("set video stream flip failed");
+                }
+            }
+        }
+    }
+    if (param.isSnapshotFlipChanged()) {
+        // try to find snapshot/postview stream
+        for (size_t i = 0; i < mStreams.size(); i++) {
+            if (mStreams[i] != NULL &&
+                    (m_handle == mStreams[i]->getChannelHandle()) &&
+                    (mStreams[i]->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+                     mStreams[i]->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+                     mStreams[i]->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
+                 mStreams[i]->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW) ) ) {
+                cam_stream_parm_buffer_t param_buf;
+                memset(&param_buf, 0, sizeof(cam_stream_parm_buffer_t));
+                param_buf.type = CAM_STREAM_PARAM_TYPE_SET_FLIP;
+                param_buf.flipInfo.flip_mask =
+                        (uint32_t)param.getFlipMode(CAM_STREAM_TYPE_SNAPSHOT);
+                rc = mStreams[i]->setParameter(param_buf);
+                if (rc != NO_ERROR) {
+                    LOGW("set snapshot stream flip failed");
+                }
+            }
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraPicChannel
+ *
+ * DESCRIPTION: constructor of QCameraPicChannel
+ *
+ * PARAMETERS :
+ *   @cam_handle : camera handle
+ *   @cam_ops    : ptr to camera ops table
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraPicChannel::QCameraPicChannel(uint32_t cam_handle,
+                                     mm_camera_ops_t *cam_ops) :
+    QCameraChannel(cam_handle, cam_ops)
+{
+    m_bAllowDynBufAlloc = true;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraPicChannel
+ *
+ * DESCRIPTION: default constructor of QCameraPicChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraPicChannel::QCameraPicChannel()
+{
+    m_bAllowDynBufAlloc = true;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraPicChannel
+ *
+ * DESCRIPTION: destructor of QCameraPicChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraPicChannel::~QCameraPicChannel()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : takePicture
+ *
+ * DESCRIPTION: send request for queued snapshot frames
+ *
+ * PARAMETERS :
+ *   @buf : request buf info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPicChannel::takePicture (mm_camera_req_buf_t *buf)
+{
+    int32_t rc = m_camOps->request_super_buf(m_camHandle, m_handle, buf);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : cancelPicture
+ *
+ * DESCRIPTION: cancel request for queued snapshot frames
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPicChannel::cancelPicture()
+{
+    int32_t rc = m_camOps->cancel_super_buf_request(m_camHandle, m_handle);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stopAdvancedCapture
+ *
+ * DESCRIPTION: stop advanced capture based on advanced capture type.
+ *
+ * PARAMETERS :
+ *   @type : advanced capture type.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPicChannel::stopAdvancedCapture(mm_camera_advanced_capture_t type)
+{
+    int32_t rc = m_camOps->process_advanced_capture(m_camHandle,
+            m_handle, type, 0, NULL);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : startAdvancedCapture
+ *
+ * DESCRIPTION: start advanced capture based on advanced capture type.
+ *
+ * PARAMETERS :
+ *   @type : advanced capture type.
+ *   @config: advance capture config
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPicChannel::startAdvancedCapture(mm_camera_advanced_capture_t type,
+        cam_capture_frame_config_t *config)
+{
+    int32_t rc = NO_ERROR;
+
+    rc = m_camOps->process_advanced_capture(m_camHandle, m_handle, type,
+            1, config);
+    return rc;
+}
+
+/*===========================================================================
+* FUNCTION   : flushSuperbuffer
+ *
+ * DESCRIPTION: flush the all superbuffer frames.
+ *
+ * PARAMETERS :
+ *   @frame_idx : frame index of focused frame
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPicChannel::flushSuperbuffer(uint32_t frame_idx)
+{
+    int32_t rc = m_camOps->flush_super_buf_queue(m_camHandle, m_handle, frame_idx);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraVideoChannel
+ *
+ * DESCRIPTION: constructor of QCameraVideoChannel
+ *
+ * PARAMETERS :
+ *   @cam_handle : camera handle
+ *   @cam_ops    : ptr to camera ops table
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraVideoChannel::QCameraVideoChannel(uint32_t cam_handle,
+                                         mm_camera_ops_t *cam_ops) :
+    QCameraChannel(cam_handle, cam_ops)
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraVideoChannel
+ *
+ * DESCRIPTION: default constructor of QCameraVideoChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraVideoChannel::QCameraVideoChannel()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraVideoChannel
+ *
+ * DESCRIPTION: destructor of QCameraVideoChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraVideoChannel::~QCameraVideoChannel()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : takePicture
+ *
+ * DESCRIPTION: send request for queued snapshot frames
+ *
+ * PARAMETERS :
+ *   @mm_camera_req_buf_t : request buf info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraVideoChannel::takePicture(mm_camera_req_buf_t *buf)
+{
+    int32_t rc = m_camOps->request_super_buf(m_camHandle, m_handle, buf);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : cancelPicture
+ *
+ * DESCRIPTION: cancel request for queued snapshot frames
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraVideoChannel::cancelPicture()
+{
+    int32_t rc = m_camOps->cancel_super_buf_request(m_camHandle, m_handle);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseFrame
+ *
+ * DESCRIPTION: return video frame from app
+ *
+ * PARAMETERS :
+ *   @opaque     : ptr to video frame to be returned
+ *   @isMetaData : if frame is a metadata or real frame
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraVideoChannel::releaseFrame(const void * opaque, bool isMetaData)
+{
+    QCameraStream *pVideoStream = NULL;
+    for (size_t i = 0; i < mStreams.size(); i++) {
+        if (mStreams[i] != NULL && mStreams[i]->isTypeOf(CAM_STREAM_TYPE_VIDEO)) {
+            pVideoStream = mStreams[i];
+            break;
+        }
+    }
+
+    if (NULL == pVideoStream) {
+        LOGE("No video stream in the channel");
+        return BAD_VALUE;
+    }
+
+    int32_t rc = pVideoStream->bufDone(opaque, isMetaData);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraReprocessChannel
+ *
+ * DESCRIPTION: constructor of QCameraReprocessChannel
+ *
+ * PARAMETERS :
+ *   @cam_handle : camera handle
+ *   @cam_ops    : ptr to camera ops table
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraReprocessChannel::QCameraReprocessChannel(uint32_t cam_handle,
+                                                 mm_camera_ops_t *cam_ops) :
+    QCameraChannel(cam_handle, cam_ops),
+    m_pSrcChannel(NULL),
+    mPassCount(0)
+{
+    memset(mSrcStreamHandles, 0, sizeof(mSrcStreamHandles));
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraReprocessChannel
+ *
+ * DESCRIPTION: default constructor of QCameraReprocessChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraReprocessChannel::QCameraReprocessChannel() :
+    m_pSrcChannel(NULL),
+    mPassCount(0)
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraReprocessChannel
+ *
+ * DESCRIPTION: destructor of QCameraReprocessChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraReprocessChannel::~QCameraReprocessChannel()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : addReprocStreamsFromSource
+ *
+ * DESCRIPTION: add reprocess streams from input source channel
+ *
+ * PARAMETERS :
+ *   @allocator      : stream related buffer allocator
+ *   @featureConfig  : pp feature configuration
+ *   @pSrcChannel    : ptr to input source channel that needs reprocess
+ *   @minStreamBufNum: number of stream buffers needed
+ *   @burstNum       : number of burst captures needed
+ *   @paddingInfo    : padding information
+ *   @param          : reference to parameters
+ *   @contStream     : continous streaming mode or burst
+ *   @offline        : configure for offline reprocessing
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocessChannel::addReprocStreamsFromSource(
+        QCameraAllocator& allocator, cam_pp_feature_config_t &featureConfig,
+        QCameraChannel *pSrcChannel, uint8_t minStreamBufNum, uint8_t burstNum,
+        cam_padding_info_t *paddingInfo, QCameraParametersIntf &param, bool contStream,
+        bool offline)
+{
+    int32_t rc = 0;
+    QCameraStream *pStream = NULL;
+    QCameraHeapMemory *pStreamInfoBuf = NULL;
+    QCameraHeapMemory *pMiscBuf = NULL;
+    cam_stream_info_t *streamInfo = NULL;
+    cam_padding_info_t padding;
+
+    memset(mSrcStreamHandles, 0, sizeof(mSrcStreamHandles));
+    if (NULL == paddingInfo) {
+        return BAD_VALUE;
+    }
+    padding = *paddingInfo;
+    //Use maximum padding so that the buffer
+    //can be rotated
+    padding.width_padding = MAX(padding.width_padding, padding.height_padding);
+    padding.height_padding = padding.width_padding;
+    padding.offset_info.offset_x = 0;
+    padding.offset_info.offset_y = 0;
+
+    LOGD("num of src stream = %d", pSrcChannel->getNumOfStreams());
+
+    for (uint32_t i = 0; i < pSrcChannel->getNumOfStreams(); i++) {
+        cam_pp_feature_config_t pp_featuremask = featureConfig;
+        pStream = pSrcChannel->getStreamByIndex(i);
+        if (pStream != NULL) {
+            if (param.getofflineRAW() && !((pStream->isTypeOf(CAM_STREAM_TYPE_RAW))
+                    || (pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW))
+                    || (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA))
+                    || (pStream->isOrignalTypeOf(CAM_STREAM_TYPE_RAW)))) {
+                //Skip all the stream other than RAW and POSTVIEW incase of offline of RAW
+                continue;
+            }
+
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_RAW)
+                    && (!param.getofflineRAW())) {
+                // Skip raw for reprocess now because PP module cannot handle
+                // meta data&raw. May need furthur discussion if Imaginglib need meta data
+                continue;
+            }
+
+            if (((pStream->isTypeOf(CAM_STREAM_TYPE_METADATA))
+                    && !(param.getManualCaptureMode() >=
+                    CAM_MANUAL_CAPTURE_TYPE_3))
+                    || (pStream->isTypeOf(CAM_STREAM_TYPE_ANALYSIS))) {
+                // Skip metadata
+                continue;
+            }
+
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+                    pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
+                    pStream->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+                    pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW)) {
+                cam_feature_mask_t feature_mask = featureConfig.feature_mask;
+
+                // skip thumbnail reprocessing if not needed
+                if (!param.needThumbnailReprocess(&feature_mask)) {
+                    continue;
+                }
+                // CAC, SHARPNESS, FLIP and WNR would have been already applied -
+                // on preview/postview stream in realtime.
+                // So, need not apply again.
+                feature_mask &= ~(CAM_QCOM_FEATURE_DENOISE2D |
+                        CAM_QCOM_FEATURE_CAC |
+                        CAM_QCOM_FEATURE_SHARPNESS |
+                        CAM_QCOM_FEATURE_FLIP |
+                        CAM_QCOM_FEATURE_RAW_PROCESSING);
+                if (!feature_mask) {
+                    // Skip thumbnail stream reprocessing since no other
+                    //reprocessing is enabled.
+                    continue;
+                }
+            }
+
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+                pp_featuremask.feature_mask = 0;
+                pp_featuremask.feature_mask |= CAM_QCOM_FEATURE_METADATA_PROCESSING;
+            }
+
+            pStreamInfoBuf = allocator.allocateStreamInfoBuf(CAM_STREAM_TYPE_OFFLINE_PROC);
+            if (pStreamInfoBuf == NULL) {
+                LOGE("no mem for stream info buf");
+                rc = NO_MEMORY;
+                break;
+            }
+
+            streamInfo = (cam_stream_info_t *)pStreamInfoBuf->getPtr(0);
+            memset(streamInfo, 0, sizeof(cam_stream_info_t));
+            streamInfo->stream_type = CAM_STREAM_TYPE_OFFLINE_PROC;
+            // Enable CPP high performance mode to put it in turbo frequency mode for
+            // burst/longshot/HDR snapshot cases
+            streamInfo->perf_mode = CAM_PERF_HIGH_PERFORMANCE;
+            if (param.getofflineRAW() && pStream->isTypeOf(CAM_STREAM_TYPE_RAW)) {
+                streamInfo->fmt = CAM_FORMAT_YUV_420_NV21;
+            } else {
+                rc = pStream->getFormat(streamInfo->fmt);
+            }
+
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+                    pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
+                    pStream->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+                    pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW)) {
+                if (pp_featuremask.feature_mask & CAM_QCOM_FEATURE_SCALE) {
+                    param.getThumbnailSize(&(streamInfo->dim.width),
+                            &(streamInfo->dim.height));
+                } else {
+                    pStream->getFrameDimension(streamInfo->dim);
+                }
+            } else {
+                if ((param.isPostProcScaling()) &&
+                        (pp_featuremask.feature_mask & CAM_QCOM_FEATURE_SCALE)) {
+                    rc = param.getStreamDimension(CAM_STREAM_TYPE_OFFLINE_PROC,
+                            streamInfo->dim);
+                } else if ((param.getofflineRAW()) &&
+                        (pStream->isTypeOf(CAM_STREAM_TYPE_RAW))) {
+                    param.getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT,streamInfo->dim);
+                } else {
+                    rc = pStream->getFrameDimension(streamInfo->dim);
+                }
+            }
+
+            if ( contStream ) {
+                streamInfo->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+                streamInfo->num_of_burst = 0;
+            } else {
+                streamInfo->streaming_mode = CAM_STREAMING_MODE_BURST;
+                streamInfo->num_of_burst = burstNum;
+            }
+            streamInfo->num_bufs = minStreamBufNum;
+
+            cam_stream_reproc_config_t rp_cfg;
+            memset(&rp_cfg, 0, sizeof(cam_stream_reproc_config_t));
+            if (offline) {
+                cam_frame_len_offset_t offset;
+                memset(&offset, 0, sizeof(cam_frame_len_offset_t));
+
+                rp_cfg.pp_type = CAM_OFFLINE_REPROCESS_TYPE;
+                pStream->getFormat(rp_cfg.offline.input_fmt);
+                pStream->getFrameDimension(rp_cfg.offline.input_dim);
+                pStream->getFrameOffset(offset);
+                rp_cfg.offline.input_buf_planes.plane_info = offset;
+                rp_cfg.offline.input_type = pStream->getMyOriginalType();
+                //For input metadata + input buffer
+                rp_cfg.offline.num_of_bufs = 2;
+            } else {
+                rp_cfg.pp_type = CAM_ONLINE_REPROCESS_TYPE;
+                rp_cfg.online.input_stream_id = pStream->getMyServerID();
+                rp_cfg.online.input_stream_type = pStream->getMyOriginalType();
+            }
+            param.getStreamRotation(streamInfo->stream_type,
+                    streamInfo->pp_config, streamInfo->dim);
+            streamInfo->reprocess_config = rp_cfg;
+            streamInfo->reprocess_config.pp_feature_config = pp_featuremask;
+
+            if (!(pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT)
+                || pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)
+                || pStream->isTypeOf(CAM_STREAM_TYPE_RAW)
+                || pStream->isOrignalTypeOf(CAM_STREAM_TYPE_RAW))) {
+                // CAC, SHARPNESS, FLIP and WNR would have been already applied -
+                // on preview/postview stream in realtime. Need not apply again.
+                streamInfo->reprocess_config.pp_feature_config.feature_mask &=
+                        ~CAM_QCOM_FEATURE_CAC;
+                streamInfo->reprocess_config.pp_feature_config.feature_mask &=
+                        ~CAM_QCOM_FEATURE_SHARPNESS;
+                streamInfo->reprocess_config.pp_feature_config.feature_mask &=
+                        ~CAM_QCOM_FEATURE_FLIP;
+                //Don't do WNR for thumbnail
+                streamInfo->reprocess_config.pp_feature_config.feature_mask &=
+                        ~CAM_QCOM_FEATURE_DENOISE2D;
+                streamInfo->reprocess_config.pp_feature_config.feature_mask &=
+                        ~CAM_QCOM_FEATURE_CDS;
+                streamInfo->reprocess_config.pp_feature_config.feature_mask &=
+                        ~CAM_QCOM_FEATURE_DSDN;
+                //No need of RAW processing for other than RAW streams
+                streamInfo->reprocess_config.pp_feature_config.feature_mask &=
+                        ~CAM_QCOM_FEATURE_RAW_PROCESSING;
+
+                if (param.isHDREnabled()
+                  && !param.isHDRThumbnailProcessNeeded()){
+                    streamInfo->reprocess_config.pp_feature_config.feature_mask
+                      &= ~CAM_QCOM_FEATURE_HDR;
+                }
+            }
+
+            cam_stream_type_t type = CAM_STREAM_TYPE_DEFAULT;
+            if (offline) {
+                type = streamInfo->reprocess_config.offline.input_type;
+            } else {
+                type = streamInfo->reprocess_config.online.input_stream_type;
+            }
+            if (type == CAM_STREAM_TYPE_SNAPSHOT) {
+                int flipMode = param.getFlipMode(type);
+                if (flipMode > 0) {
+                    streamInfo->reprocess_config.pp_feature_config.feature_mask |=
+                            CAM_QCOM_FEATURE_FLIP;
+                    streamInfo->reprocess_config.pp_feature_config.flip = (uint32_t)flipMode;
+                }
+            }
+
+            if ((streamInfo->reprocess_config.pp_feature_config.feature_mask
+                    & CAM_QCOM_FEATURE_SCALE)
+                    && param.isReprocScaleEnabled()
+                    && param.isUnderReprocScaling()) {
+                //we only Scale Snapshot frame
+                if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
+                    streamInfo->dim.width =
+                            streamInfo->reprocess_config.pp_feature_config.scale_param.output_width;
+                    streamInfo->dim.height =
+                            streamInfo->reprocess_config.pp_feature_config.scale_param.output_height;
+                }
+                LOGH("stream width=%d, height=%d.",
+                         streamInfo->dim.width, streamInfo->dim.height);
+            }
+
+            // save source stream handler
+            mSrcStreamHandles[mStreams.size()] = pStream->getMyHandle();
+
+            pMiscBuf = allocator.allocateMiscBuf(streamInfo);
+
+            LOGH("Configure Reprocessing: stream = %d, res = %dX%d, fmt = %d, type = %d",
+                    pStream->getMyOriginalType(), streamInfo->dim.width,
+                    streamInfo->dim.height, streamInfo->fmt, type);
+
+            // add reprocess stream
+            if (streamInfo->reprocess_config.pp_feature_config.feature_mask
+                    & CAM_QCOM_FEATURE_ROTATION) {
+                rc = addStream(allocator, pStreamInfoBuf, pMiscBuf,
+                        minStreamBufNum, &padding, NULL, NULL, false, false,
+                        streamInfo->reprocess_config.pp_feature_config.rotation);
+            } else {
+                rc = addStream(allocator, pStreamInfoBuf, pMiscBuf,
+                        minStreamBufNum, &padding, NULL, NULL, false, false);
+            }
+            if (rc != NO_ERROR) {
+                LOGE("add reprocess stream failed, ret = %d", rc);
+                break;
+            }
+        }
+    }
+
+    if (rc == NO_ERROR) {
+        m_pSrcChannel = pSrcChannel;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamBySrouceHandle
+ *
+ * DESCRIPTION: find reprocess stream by its source stream handle
+ *
+ * PARAMETERS :
+ *   @srcHandle : source stream handle
+ *
+ * RETURN     : ptr to reprocess stream if found. NULL if not found
+ *==========================================================================*/
+QCameraStream * QCameraReprocessChannel::getStreamBySrouceHandle(uint32_t srcHandle)
+{
+    QCameraStream *pStream = NULL;
+
+    for (size_t i = 0; i < mStreams.size(); i++) {
+        if (mSrcStreamHandles[i] == srcHandle) {
+            pStream = mStreams[i];
+            break;
+        }
+    }
+
+    return pStream;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop
+ *
+ * DESCRIPTION: stop channel and unmap offline buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocessChannel::stop()
+{
+    int32_t rc = QCameraChannel::stop();
+
+    if (!mOfflineBuffers.empty()) {
+        QCameraStream *stream = NULL;
+        List<OfflineBuffer>::iterator it = mOfflineBuffers.begin();
+        int error = NO_ERROR;
+        for( ; it != mOfflineBuffers.end(); it++) {
+            stream = (*it).stream;
+            if (NULL != stream) {
+                error = stream->unmapBuf((*it).type,
+                                         (*it).index,
+                                         -1);
+                if (NO_ERROR != error) {
+                    LOGE("Error during offline buffer unmap %d",
+                           error);
+                }
+            }
+        }
+        mOfflineBuffers.clear();
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : doReprocessOffline
+ *
+ * DESCRIPTION: request to do offline reprocess on the frame
+ *
+ * PARAMETERS :
+ *   @frame   : frame to be performed a reprocess
+ *   @meta_buf : Metadata buffer for reprocessing
+ *   @pStream  : Actual reprocess stream
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocessChannel::doReprocessOffline(mm_camera_buf_def_t *frame,
+        mm_camera_buf_def_t *meta_buf, QCameraStream *pStream)
+{
+    int32_t rc = 0;
+    OfflineBuffer mappedBuffer;
+    uint32_t buf_index = 0;
+    uint32_t meta_buf_index = 0;
+
+    if ((frame == NULL) || (meta_buf == NULL)) {
+        LOGE("Invalid Input Paramters");
+        return INVALID_OPERATION;
+    }
+
+    if (pStream == NULL) {
+        pStream = getStreamBySrouceHandle(frame->stream_id);
+        if (pStream == NULL) {
+            LOGE("Input validation failed.");
+            return INVALID_OPERATION;
+        }
+    }
+
+    if (!mOfflineBuffers.empty()) {
+        List<OfflineBuffer>::iterator it = mOfflineBuffers.begin();
+        for( ; it != mOfflineBuffers.end(); it++) {
+            buf_index = (buf_index < ((*it).index)) ? ((*it).index) : buf_index;
+        }
+        buf_index += 1;
+    }
+
+    meta_buf_index = buf_index;
+    if (meta_buf != NULL) {
+        rc = pStream->mapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF,
+                meta_buf_index,
+                -1,
+                meta_buf->fd,
+                meta_buf->buffer,
+                meta_buf->frame_len);
+        if (NO_ERROR != rc ) {
+            LOGE("Error during metadata buffer mapping");
+            rc = -1;
+            return rc;
+        }
+
+        mappedBuffer.index = meta_buf_index;
+        mappedBuffer.stream = pStream;
+        mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF;
+        mOfflineBuffers.push_back(mappedBuffer);
+        buf_index += 1;
+    }
+
+    rc = pStream->mapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
+             buf_index,
+             -1,
+             frame->fd,
+             frame->buffer,
+             frame->frame_len);
+    if (NO_ERROR != rc ) {
+        LOGE("Error during reprocess input buffer mapping");
+        rc = -1;
+        return rc;
+    }
+    mappedBuffer.index = buf_index;
+    mappedBuffer.stream = pStream;
+    mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF;
+    mOfflineBuffers.push_back(mappedBuffer);
+
+    cam_stream_parm_buffer_t param;
+    memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+
+    param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
+    param.reprocess.buf_index = buf_index;
+    param.reprocess.frame_idx = frame->frame_idx;
+
+    if (meta_buf != NULL) {
+        param.reprocess.meta_present = 1;
+        param.reprocess.meta_buf_index = meta_buf_index;
+    }
+
+    LOGI("Offline reprocessing id = %d buf Id = %d meta index = %d type = %d",
+             param.reprocess.frame_idx, param.reprocess.buf_index,
+            param.reprocess.meta_buf_index, pStream->getMyOriginalType());
+
+    rc = pStream->setParameter(param);
+    if (rc != NO_ERROR) {
+        LOGE("stream setParameter for reprocess failed");
+        return rc;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : doReprocessOffline
+ *
+ * DESCRIPTION: request to do offline reprocess on the frame
+ *
+ * PARAMETERS :
+ *   @frame   : frame to be performed a reprocess
+ *   @meta_buf : Metadata buffer for reprocessing
+ *   @mParameter : camera parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocessChannel::doReprocessOffline(mm_camera_super_buf_t *frame,
+        mm_camera_buf_def_t *meta_buf, QCameraParametersIntf &mParameter)
+{
+    int32_t rc = 0;
+    QCameraStream *pStream = NULL;
+
+    if (mStreams.size() < 1) {
+        LOGE("No reprocess streams");
+        return -1;
+    }
+    if (m_pSrcChannel == NULL) {
+        LOGE("No source channel for reprocess");
+        return -1;
+    }
+
+    if (frame == NULL) {
+        LOGE("Invalid source frame");
+        return BAD_VALUE;
+    }
+
+    for (uint32_t i = 0; i < frame->num_bufs; i++) {
+        pStream = getStreamBySrouceHandle(frame->bufs[i]->stream_id);
+        if ((pStream != NULL) &&
+                (m_handle == pStream->getChannelHandle())) {
+            if (mParameter.getofflineRAW() &&
+                    !((pStream->isOrignalTypeOf(CAM_STREAM_TYPE_RAW))
+                    || (pStream->isOrignalTypeOf(CAM_STREAM_TYPE_METADATA)))) {
+                continue;
+            }
+
+            if ((pStream->isOrignalTypeOf(CAM_STREAM_TYPE_METADATA)
+                     && (mParameter.getManualCaptureMode()
+                     < CAM_MANUAL_CAPTURE_TYPE_3))
+                     || (pStream->isTypeOf(CAM_STREAM_TYPE_ANALYSIS))) {
+                // Skip metadata for reprocess now because PP module cannot handle meta data
+                // May need furthur discussion if Imaginglib need meta data
+                continue;
+            }
+
+            // Update Metadata
+            if (meta_buf != NULL) {
+                uint32_t stream_id = frame->bufs[i]->stream_id;
+                QCameraStream *srcStream =
+                        m_pSrcChannel->getStreamByHandle(stream_id);
+                metadata_buffer_t *pMetaData =
+                        (metadata_buffer_t *)meta_buf->buffer;
+                if ((NULL != pMetaData) && (NULL != srcStream)) {
+                    IF_META_AVAILABLE(cam_crop_data_t, crop,
+                            CAM_INTF_META_CROP_DATA, pMetaData) {
+                        if (MAX_NUM_STREAMS > crop->num_of_streams) {
+                            for (int j = 0; j < MAX_NUM_STREAMS; j++) {
+                                if (crop->crop_info[j].stream_id ==
+                                            srcStream->getMyServerID()) {
+                                    // Store crop/roi information for offline reprocess
+                                    // in the reprocess stream slot
+                                    crop->crop_info[crop->num_of_streams].crop =
+                                            crop->crop_info[j].crop;
+                                    crop->crop_info[crop->num_of_streams].roi_map =
+                                            crop->crop_info[j].roi_map;
+                                    for (uint8_t k = 0; k < mStreams.size(); k++) {
+                                        if (srcStream->getMyType() ==
+                                                mStreams[k]->getMyOriginalType()) {
+                                            crop->crop_info[crop->num_of_streams].stream_id =
+                                                    mStreams[k]->getMyServerID();
+                                            break;
+                                        }
+                                    }
+                                    crop->num_of_streams++;
+                                    break;
+                                }
+                            }
+                        } else {
+                            LOGE("No space to add reprocess stream crop/roi information");
+                        }
+                    }
+                }
+            }
+
+            rc = doReprocessOffline (frame->bufs[i], meta_buf, pStream);
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : doReprocess
+ *
+ * DESCRIPTION: request to do a reprocess on the frame
+ *
+ * PARAMETERS :
+ *   @frame   : frame to be performed a reprocess
+ *   @mParameter : camera parameters
+ *   @pMetaStream: Metadata stream handle
+ *   @meta_buf_index : Metadata buffer index
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocessChannel::doReprocess(mm_camera_super_buf_t *frame,
+        QCameraParametersIntf &mParameter, QCameraStream *pMetaStream,
+        uint8_t meta_buf_index)
+{
+    int32_t rc = 0;
+    if (mStreams.size() < 1) {
+        LOGE("No reprocess streams");
+        return -1;
+    }
+    if (m_pSrcChannel == NULL) {
+        LOGE("No source channel for reprocess");
+        return -1;
+    }
+
+    if (pMetaStream == NULL) {
+        LOGW("Null Metadata buffer for processing");
+    }
+
+    for (uint32_t i = 0; i < frame->num_bufs; i++) {
+        QCameraStream *pStream = getStreamBySrouceHandle(frame->bufs[i]->stream_id);
+        if ((pStream != NULL) && (m_handle == pStream->getChannelHandle())) {
+            if (mParameter.getofflineRAW() && !((pStream->isOrignalTypeOf(CAM_STREAM_TYPE_RAW))
+                    || (pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW))
+                    || (pStream->isOrignalTypeOf(CAM_STREAM_TYPE_METADATA)))) {
+                //Skip all the stream other than RAW and POSTVIEW incase of offline of RAW
+                continue;
+            }
+            if ((pStream->isOrignalTypeOf(CAM_STREAM_TYPE_METADATA)
+                     && (mParameter.getManualCaptureMode()
+                     < CAM_MANUAL_CAPTURE_TYPE_3))
+                     || (pStream->isTypeOf(CAM_STREAM_TYPE_ANALYSIS))) {
+                // Skip metadata for reprocess now because PP module cannot handle meta data
+                // May need furthur discussion if Imaginglib need meta data
+                continue;
+            }
+
+            cam_stream_parm_buffer_t param;
+            memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+            param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
+            param.reprocess.buf_index = frame->bufs[i]->buf_idx;
+            param.reprocess.frame_idx = frame->bufs[i]->frame_idx;
+            if (pMetaStream != NULL) {
+                // we have meta data frame bundled, sent together with reprocess frame
+                param.reprocess.meta_present = 1;
+                param.reprocess.meta_stream_handle = pMetaStream->getMyServerID();
+                param.reprocess.meta_buf_index = meta_buf_index;
+            }
+
+            LOGI("Online reprocessing id = %d buf Id = %d meta index = %d type = %d",
+                     param.reprocess.frame_idx, param.reprocess.buf_index,
+                    param.reprocess.meta_buf_index, pStream->getMyOriginalType());
+
+            rc = pStream->setParameter(param);
+            if (rc != NO_ERROR) {
+                LOGE("stream setParameter for reprocess failed");
+                break;
+            }
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : doReprocess
+ *
+ * DESCRIPTION: request to do a reprocess on the frame
+ *
+ * PARAMETERS :
+ *   @buf_fd     : fd to the input buffer that needs reprocess
+ *   @buffer     : buffer pointer of actual buffer
+ *   @buf_lenght : length of the input buffer
+ *   @ret_val    : result of reprocess.
+ *                 Example: Could be faceID in case of register face image.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocessChannel::doReprocess(int buf_fd, void *buffer,
+        size_t buf_length, int32_t &ret_val)
+{
+    int32_t rc = 0;
+    if (mStreams.size() < 1) {
+        LOGE("No reprocess streams");
+        return -1;
+    }
+
+    uint32_t buf_idx = 0;
+    for (size_t i = 0; i < mStreams.size(); i++) {
+        if ((mStreams[i] != NULL) &&
+                (m_handle != mStreams[i]->getChannelHandle())) {
+            continue;
+        }
+        rc = mStreams[i]->mapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
+                                 buf_idx, -1,
+                                 buf_fd, buffer, buf_length);
+
+        if (rc == NO_ERROR) {
+            cam_stream_parm_buffer_t param;
+            memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+            param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
+            param.reprocess.buf_index = buf_idx;
+            rc = mStreams[i]->setParameter(param);
+            if (rc == NO_ERROR) {
+                ret_val = param.reprocess.ret_val;
+            }
+            mStreams[i]->unmapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
+                                  buf_idx, -1);
+        }
+    }
+    return rc;
+}
+
+}; // namespace qcamera
diff --git a/msmcobalt/QCamera2/HAL/QCameraChannel.h b/msmcobalt/QCamera2/HAL/QCameraChannel.h
new file mode 100644
index 0000000..65112e0
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/QCameraChannel.h
@@ -0,0 +1,171 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_CHANNEL_H__
+#define __QCAMERA_CHANNEL_H__
+
+#include "camera.h"
+#include "QCameraMem.h"
+#include "QCameraParameters.h"
+#include "QCameraStream.h"
+
+extern "C" {
+#include "mm_camera_interface.h"
+}
+
+namespace qcamera {
+
+class QCameraChannel
+{
+public:
+    QCameraChannel(uint32_t cam_handle,
+                   mm_camera_ops_t *cam_ops);
+    QCameraChannel();
+    virtual ~QCameraChannel();
+    virtual int32_t init(mm_camera_channel_attr_t *attr,
+                         mm_camera_buf_notify_t dataCB, // data CB for channel data
+                         void *userData);
+    // Owner of memory is transferred from the caller to the caller with this call.
+    virtual int32_t addStream(QCameraAllocator& allocator,
+            QCameraHeapMemory *streamInfoBuf, QCameraHeapMemory *miscBuf,
+            uint8_t minStreamBufnum, cam_padding_info_t *paddingInfo,
+            stream_cb_routine stream_cb, void *userdata, bool bDynAllocBuf,
+            bool bDeffAlloc = false, cam_rotation_t online_rotation = ROTATE_0);
+    virtual int32_t linkStream(QCameraChannel *ch, QCameraStream *stream);
+    virtual int32_t start();
+    virtual int32_t stop();
+    virtual int32_t bufDone(mm_camera_super_buf_t *recvd_frame);
+    virtual int32_t bufDone(mm_camera_super_buf_t *recvd_frame, uint32_t stream_id);
+    virtual int32_t processZoomDone(preview_stream_ops_t *previewWindow,
+                                    cam_crop_data_t &crop_info);
+    QCameraStream *getStreamByHandle(uint32_t streamHandle);
+    uint32_t getMyHandle() const {return m_handle;};
+    uint32_t getNumOfStreams() const {return (uint32_t) mStreams.size();};
+    QCameraStream *getStreamByIndex(uint32_t index);
+    QCameraStream *getStreamByServerID(uint32_t serverID);
+    int32_t UpdateStreamBasedParameters(QCameraParametersIntf &param);
+    void deleteChannel();
+    int32_t setStreamSyncCB (cam_stream_type_t stream_type,
+            stream_cb_routine stream_cb);
+    bool isActive() { return m_bIsActive; }
+protected:
+    uint32_t m_camHandle;
+    mm_camera_ops_t *m_camOps;
+    bool m_bIsActive;
+    bool m_bAllowDynBufAlloc; // if buf allocation can be in two steps
+
+    uint32_t m_handle;
+    Vector<QCameraStream *> mStreams;
+    mm_camera_buf_notify_t mDataCB;
+    void *mUserData;
+};
+
+// burst pic channel: i.e. zsl burst mode
+class QCameraPicChannel : public QCameraChannel
+{
+public:
+    QCameraPicChannel(uint32_t cam_handle,
+                      mm_camera_ops_t *cam_ops);
+    QCameraPicChannel();
+    virtual ~QCameraPicChannel();
+    int32_t takePicture(mm_camera_req_buf_t *buf);
+    int32_t cancelPicture();
+    int32_t stopAdvancedCapture(mm_camera_advanced_capture_t type);
+    int32_t startAdvancedCapture(mm_camera_advanced_capture_t type,
+            cam_capture_frame_config_t *config = NULL);
+    int32_t flushSuperbuffer(uint32_t frame_idx);
+};
+
+// video channel class
+class QCameraVideoChannel : public QCameraChannel
+{
+public:
+    QCameraVideoChannel(uint32_t cam_handle,
+                        mm_camera_ops_t *cam_ops);
+    QCameraVideoChannel();
+    virtual ~QCameraVideoChannel();
+    int32_t takePicture(mm_camera_req_buf_t *buf);
+    int32_t cancelPicture();
+    int32_t releaseFrame(const void *opaque, bool isMetaData);
+};
+
+// reprocess channel class
+class QCameraReprocessChannel : public QCameraChannel
+{
+public:
+    QCameraReprocessChannel(uint32_t cam_handle,
+                            mm_camera_ops_t *cam_ops);
+    QCameraReprocessChannel();
+    virtual ~QCameraReprocessChannel();
+    int32_t addReprocStreamsFromSource(QCameraAllocator& allocator,
+                                       cam_pp_feature_config_t &config,
+                                       QCameraChannel *pSrcChannel,
+                                       uint8_t minStreamBufNum,
+                                       uint8_t burstNum,
+                                       cam_padding_info_t *paddingInfo,
+                                       QCameraParametersIntf &param,
+                                       bool contStream,
+                                       bool offline);
+    // online reprocess
+    int32_t doReprocess(mm_camera_super_buf_t *frame,
+            QCameraParametersIntf &param, QCameraStream *pMetaStream,
+            uint8_t meta_buf_index);
+
+    // offline reprocess
+    int32_t doReprocess(int buf_fd, void *buffer, size_t buf_length, int32_t &ret_val);
+
+    int32_t doReprocessOffline(mm_camera_super_buf_t *frame,
+             mm_camera_buf_def_t *meta_buf, QCameraParametersIntf &param);
+
+    int32_t doReprocessOffline(mm_camera_buf_def_t *frame,
+             mm_camera_buf_def_t *meta_buf, QCameraStream *pStream = NULL);
+
+    int32_t stop();
+    QCameraChannel *getSrcChannel(){return m_pSrcChannel;};
+    int8_t getReprocCount(){return mPassCount;};
+    void setReprocCount(int8_t count) {mPassCount = count;};
+
+private:
+    QCameraStream *getStreamBySrouceHandle(uint32_t srcHandle);
+
+    typedef struct {
+        QCameraStream *stream;
+        cam_mapping_buf_type type;
+        uint32_t index;
+    } OfflineBuffer;
+
+    uint32_t mSrcStreamHandles[MAX_STREAM_NUM_IN_BUNDLE];
+    QCameraChannel *m_pSrcChannel; // ptr to source channel for reprocess
+    android::List<OfflineBuffer> mOfflineBuffers;
+    int8_t mPassCount;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_CHANNEL_H__ */
diff --git a/msmcobalt/QCamera2/HAL/QCameraMem.cpp b/msmcobalt/QCamera2/HAL/QCameraMem.cpp
new file mode 100755
index 0000000..2e2a3a3
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/QCameraMem.cpp
@@ -0,0 +1,2476 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+#define LOG_TAG "QCameraHWI_Mem"
+
+// System dependencies
+#include <fcntl.h>
+#include <stdio.h>
+#include <utils/Errors.h>
+#define MMAN_H <SYSTEM_HEADER_PREFIX/mman.h>
+#include MMAN_H
+#include "gralloc.h"
+#include "gralloc_priv.h"
+
+// Camera dependencies
+#include "QCamera2HWI.h"
+#include "QCameraMem.h"
+#include "QCameraParameters.h"
+#include "QCameraTrace.h"
+
+// Media dependencies
+#include "OMX_QCOMExtns.h"
+#ifdef USE_MEDIA_EXTENSIONS
+#include <media/hardware/HardwareAPI.h>
+typedef struct VideoNativeHandleMetadata media_metadata_buffer;
+#else
+#include "QComOMXMetadata.h"
+typedef struct encoder_media_buffer_type media_metadata_buffer;
+#endif
+
+extern "C" {
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+}
+
+using namespace android;
+
+namespace qcamera {
+
+// QCaemra2Memory base class
+
+/*===========================================================================
+ * FUNCTION   : QCameraMemory
+ *
+ * DESCRIPTION: default constructor of QCameraMemory
+ *
+ * PARAMETERS :
+ *   @cached  : flag indicates if using cached memory
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraMemory::QCameraMemory(bool cached,
+        QCameraMemoryPool *pool,
+        cam_stream_type_t streamType, QCameraMemType bufType)
+    :m_bCached(cached),
+     mMemoryPool(pool),
+     mStreamType(streamType),
+     mBufType(bufType)
+{
+    mBufferCount = 0;
+    reset();
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraMemory
+ *
+ * DESCRIPTION: deconstructor of QCameraMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraMemory::~QCameraMemory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : cacheOpsInternal
+ *
+ * DESCRIPTION: ion related memory cache operations
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *   @cmd     : cache ops command
+ *   @vaddr   : ptr to the virtual address
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraMemory::cacheOpsInternal(uint32_t index, unsigned int cmd, void *vaddr)
+{
+    if (!m_bCached) {
+        // Memory is not cached, no need for cache ops
+        LOGD("No cache ops here for uncached memory");
+        return OK;
+    }
+
+    struct ion_flush_data cache_inv_data;
+    struct ion_custom_data custom_data;
+    int ret = OK;
+
+    if (index >= mBufferCount) {
+        LOGE("index %d out of bound [0, %d)", index, mBufferCount);
+        return BAD_INDEX;
+    }
+
+    memset(&cache_inv_data, 0, sizeof(cache_inv_data));
+    memset(&custom_data, 0, sizeof(custom_data));
+    cache_inv_data.vaddr = vaddr;
+    cache_inv_data.fd = mMemInfo[index].fd;
+    cache_inv_data.handle = mMemInfo[index].handle;
+    cache_inv_data.length =
+            ( /* FIXME: Should remove this after ION interface changes */ unsigned int)
+            mMemInfo[index].size;
+    custom_data.cmd = cmd;
+    custom_data.arg = (unsigned long)&cache_inv_data;
+
+    LOGH("addr = %p, fd = %d, handle = %lx length = %d, ION Fd = %d",
+          cache_inv_data.vaddr, cache_inv_data.fd,
+         (unsigned long)cache_inv_data.handle, cache_inv_data.length,
+         mMemInfo[index].main_ion_fd);
+    ret = ioctl(mMemInfo[index].main_ion_fd, ION_IOC_CUSTOM, &custom_data);
+    if (ret < 0) {
+        LOGE("Cache Invalidate failed: %s\n", strerror(errno));
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFd
+ *
+ * DESCRIPTION: return file descriptor of the indexed buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : file descriptor
+ *==========================================================================*/
+int QCameraMemory::getFd(uint32_t index) const
+{
+    if (index >= mBufferCount)
+        return BAD_INDEX;
+
+    return mMemInfo[index].fd;
+}
+
+/*===========================================================================
+ * FUNCTION   : getSize
+ *
+ * DESCRIPTION: return buffer size of the indexed buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : buffer size
+ *==========================================================================*/
+ssize_t QCameraMemory::getSize(uint32_t index) const
+{
+    if (index >= mBufferCount)
+        return BAD_INDEX;
+
+    return (ssize_t)mMemInfo[index].size;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCnt
+ *
+ * DESCRIPTION: query number of buffers allocated
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of buffers allocated
+ *==========================================================================*/
+uint8_t QCameraMemory::getCnt() const
+{
+    return mBufferCount;
+}
+
+/*===========================================================================
+ * FUNCTION   : reset
+ *
+ * DESCRIPTION: reset member variables
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraMemory::reset()
+{
+    size_t i, count;
+
+    memset(mMemInfo, 0, sizeof(mMemInfo));
+
+    count = sizeof(mMemInfo) / sizeof(mMemInfo[0]);
+    for (i = 0; i < count; i++) {
+        mMemInfo[i].fd = -1;
+        mMemInfo[i].main_ion_fd = -1;
+    }
+
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMappable
+ *
+ * DESCRIPTION: query number of buffers available to map
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of buffers available to map
+ *==========================================================================*/
+uint8_t QCameraMemory::getMappable() const
+{
+    return mBufferCount;
+}
+
+/*===========================================================================
+ * FUNCTION   : checkIfAllBuffersMapped
+ *
+ * DESCRIPTION: query if all buffers are mapped
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : 1 as buffer count is always equal to mappable count
+ *==========================================================================*/
+uint8_t QCameraMemory::checkIfAllBuffersMapped() const
+{
+    return 1;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : getBufDef
+ *
+ * DESCRIPTION: query detailed buffer information
+ *
+ * PARAMETERS :
+ *   @offset  : [input] frame buffer offset
+ *   @bufDef  : [output] reference to struct to store buffer definition
+ *   @index   : [input] index of the buffer
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraMemory::getBufDef(const cam_frame_len_offset_t &offset,
+        mm_camera_buf_def_t &bufDef, uint32_t index) const
+{
+    if (!mBufferCount) {
+        LOGE("Memory not allocated");
+        return;
+    }
+    bufDef.fd = mMemInfo[index].fd;
+    bufDef.frame_len = mMemInfo[index].size;
+    bufDef.buf_type = CAM_STREAM_BUF_TYPE_MPLANE;
+    bufDef.mem_info = (void *)this;
+    bufDef.planes_buf.num_planes = (int8_t)offset.num_planes;
+    bufDef.buffer = getPtr(index);
+    bufDef.buf_idx = index;
+
+    /* Plane 0 needs to be set separately. Set other planes in a loop */
+    bufDef.planes_buf.planes[0].length = offset.mp[0].len;
+    bufDef.planes_buf.planes[0].m.userptr = (long unsigned int)mMemInfo[index].fd;
+    bufDef.planes_buf.planes[0].data_offset = offset.mp[0].offset;
+    bufDef.planes_buf.planes[0].reserved[0] = 0;
+    for (int i = 1; i < bufDef.planes_buf.num_planes; i++) {
+         bufDef.planes_buf.planes[i].length = offset.mp[i].len;
+         bufDef.planes_buf.planes[i].m.userptr = (long unsigned int)mMemInfo[i].fd;
+         bufDef.planes_buf.planes[i].data_offset = offset.mp[i].offset;
+         bufDef.planes_buf.planes[i].reserved[0] =
+                 bufDef.planes_buf.planes[i-1].reserved[0] +
+                 bufDef.planes_buf.planes[i-1].length;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getUserBufDef
+ *
+ * DESCRIPTION: Fill Buffer structure with user buffer information
+                           This also fills individual stream buffers inside batch baffer strcuture
+ *
+ * PARAMETERS :
+ *   @buf_info : user buffer information
+ *   @bufDef  : Buffer strcuture to fill user buf info
+ *   @index   : index of the buffer
+ *   @plane_offset : plane buffer information
+ *   @planeBufDef  : [input] frame buffer offset
+ *   @bufs    : Stream Buffer object
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraMemory::getUserBufDef(const cam_stream_user_buf_info_t &buf_info,
+        mm_camera_buf_def_t &bufDef,
+        uint32_t index,
+        const cam_frame_len_offset_t &plane_offset,
+        mm_camera_buf_def_t *planeBufDef,
+        QCameraMemory *bufs) const
+{
+    struct msm_camera_user_buf_cont_t *cont_buf = NULL;
+    uint32_t plane_idx = (index * buf_info.frame_buf_cnt);
+
+    if (!mBufferCount) {
+        LOGE("Memory not allocated");
+        return INVALID_OPERATION;
+    }
+
+    for (int count = 0; count < mBufferCount; count++) {
+        bufDef.fd = mMemInfo[count].fd;
+        bufDef.buf_type = CAM_STREAM_BUF_TYPE_USERPTR;
+        bufDef.frame_len = buf_info.size;
+        bufDef.mem_info = (void *)this;
+        bufDef.buffer = (void *)((uint8_t *)getPtr(count)
+                + (index * buf_info.size));
+        bufDef.buf_idx = index;
+        bufDef.user_buf.num_buffers = (int8_t)buf_info.frame_buf_cnt;
+        bufDef.user_buf.bufs_used = (int8_t)buf_info.frame_buf_cnt;
+
+        //Individual plane buffer structure to be filled
+        cont_buf = (struct msm_camera_user_buf_cont_t *)bufDef.buffer;
+        cont_buf->buf_cnt = bufDef.user_buf.num_buffers;
+
+        for (int i = 0; i < bufDef.user_buf.num_buffers; i++) {
+            bufs->getBufDef(plane_offset, planeBufDef[plane_idx], plane_idx);
+            bufDef.user_buf.buf_idx[i] = -1;
+            cont_buf->buf_idx[i] = planeBufDef[plane_idx].buf_idx;
+            plane_idx++;
+        }
+        bufDef.user_buf.plane_buf = planeBufDef;
+
+        LOGD("num_buf = %d index = %d plane_idx = %d",
+                 bufDef.user_buf.num_buffers, index, plane_idx);
+    }
+    return NO_ERROR;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : alloc
+ *
+ * DESCRIPTION: allocate requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *   @heap_id : heap id to indicate where the buffers will be allocated from
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraMemory::alloc(int count, size_t size, unsigned int heap_id,
+        uint32_t secure_mode)
+{
+    int rc = OK;
+
+    int new_bufCnt = mBufferCount + count;
+    ATRACE_BEGIN_SNPRINTF("%s %zu %d", "Memsize", size, count);
+
+    if (new_bufCnt > MM_CAMERA_MAX_NUM_FRAMES) {
+        LOGE("Buffer count %d out of bound. Max is %d",
+               new_bufCnt, MM_CAMERA_MAX_NUM_FRAMES);
+        ATRACE_END();
+        return BAD_INDEX;
+    }
+
+    for (int i = mBufferCount; i < new_bufCnt; i ++) {
+        if ( NULL == mMemoryPool ) {
+            LOGH("No memory pool available, allocating now");
+            rc = allocOneBuffer(mMemInfo[i], heap_id, size, m_bCached,
+                     secure_mode);
+            if (rc < 0) {
+                LOGE("AllocateIonMemory failed");
+                for (int j = i-1; j >= 0; j--)
+                    deallocOneBuffer(mMemInfo[j]);
+                break;
+            }
+        } else {
+            rc = mMemoryPool->allocateBuffer(mMemInfo[i],
+                                             heap_id,
+                                             size,
+                                             m_bCached,
+                                             mStreamType,
+                                             secure_mode);
+            if (rc < 0) {
+                LOGE("Memory pool allocation failed");
+                for (int j = i-1; j >= 0; j--)
+                    mMemoryPool->releaseBuffer(mMemInfo[j],
+                                               mStreamType);
+                break;
+            }
+        }
+
+    }
+    ATRACE_END();
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : dealloc
+ *
+ * DESCRIPTION: deallocate buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraMemory::dealloc()
+{
+    for (int i = 0; i < mBufferCount; i++) {
+        if ( NULL == mMemoryPool ) {
+            deallocOneBuffer(mMemInfo[i]);
+        } else {
+            mMemoryPool->releaseBuffer(mMemInfo[i], mStreamType);
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : allocOneBuffer
+ *
+ * DESCRIPTION: impl of allocating one buffers of certain size
+ *
+ * PARAMETERS :
+ *   @memInfo : [output] reference to struct to store additional memory allocation info
+ *   @heap    : [input] heap id to indicate where the buffers will be allocated from
+ *   @size    : [input] lenght of the buffer to be allocated
+ *   @cached  : [input] flag whether buffer needs to be cached
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraMemory::allocOneBuffer(QCameraMemInfo &memInfo,
+        unsigned int heap_id, size_t size, bool cached, uint32_t secure_mode)
+{
+    int rc = OK;
+    struct ion_handle_data handle_data;
+    struct ion_allocation_data alloc;
+    struct ion_fd_data ion_info_fd;
+    int main_ion_fd = -1;
+
+    main_ion_fd = open("/dev/ion", O_RDONLY);
+    if (main_ion_fd < 0) {
+        LOGE("Ion dev open failed: %s\n", strerror(errno));
+        goto ION_OPEN_FAILED;
+    }
+
+    memset(&alloc, 0, sizeof(alloc));
+    alloc.len = size;
+    /* to make it page size aligned */
+    alloc.len = (alloc.len + 4095U) & (~4095U);
+    alloc.align = 4096;
+    if (cached) {
+        alloc.flags = ION_FLAG_CACHED;
+    }
+    alloc.heap_id_mask = heap_id;
+    if (secure_mode == SECURE) {
+        LOGD("Allocate secure buffer\n");
+        alloc.flags = ION_SECURE;
+        alloc.heap_id_mask = ION_HEAP(ION_CP_MM_HEAP_ID);
+        alloc.align = 1048576; // 1 MiB alignment to be able to protect later
+        alloc.len = (alloc.len + 1048575U) & (~1048575U);
+    }
+
+    rc = ioctl(main_ion_fd, ION_IOC_ALLOC, &alloc);
+    if (rc < 0) {
+        LOGE("ION allocation failed: %s\n", strerror(errno));
+        goto ION_ALLOC_FAILED;
+    }
+
+    memset(&ion_info_fd, 0, sizeof(ion_info_fd));
+    ion_info_fd.handle = alloc.handle;
+    rc = ioctl(main_ion_fd, ION_IOC_SHARE, &ion_info_fd);
+    if (rc < 0) {
+        LOGE("ION map failed %s\n", strerror(errno));
+        goto ION_MAP_FAILED;
+    }
+
+    memInfo.main_ion_fd = main_ion_fd;
+    memInfo.fd = ion_info_fd.fd;
+    memInfo.handle = ion_info_fd.handle;
+    memInfo.size = alloc.len;
+    memInfo.cached = cached;
+    memInfo.heap_id = heap_id;
+
+    LOGD("ION buffer %lx with size %d allocated",
+             (unsigned long)memInfo.handle, alloc.len);
+    return OK;
+
+ION_MAP_FAILED:
+    memset(&handle_data, 0, sizeof(handle_data));
+    handle_data.handle = ion_info_fd.handle;
+    ioctl(main_ion_fd, ION_IOC_FREE, &handle_data);
+ION_ALLOC_FAILED:
+    close(main_ion_fd);
+ION_OPEN_FAILED:
+    return NO_MEMORY;
+}
+
+/*===========================================================================
+ * FUNCTION   : deallocOneBuffer
+ *
+ * DESCRIPTION: impl of deallocating one buffers
+ *
+ * PARAMETERS :
+ *   @memInfo : reference to struct that stores additional memory allocation info
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraMemory::deallocOneBuffer(QCameraMemInfo &memInfo)
+{
+    struct ion_handle_data handle_data;
+
+    if (memInfo.fd >= 0) {
+        close(memInfo.fd);
+        memInfo.fd = -1;
+    }
+
+    if (memInfo.main_ion_fd >= 0) {
+        memset(&handle_data, 0, sizeof(handle_data));
+        handle_data.handle = memInfo.handle;
+        ioctl(memInfo.main_ion_fd, ION_IOC_FREE, &handle_data);
+        close(memInfo.main_ion_fd);
+        memInfo.main_ion_fd = -1;
+    }
+    memInfo.handle = 0;
+    memInfo.size = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraMemoryPool
+ *
+ * DESCRIPTION: default constructor of QCameraMemoryPool
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraMemoryPool::QCameraMemoryPool()
+{
+    pthread_mutex_init(&mLock, NULL);
+}
+
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraMemoryPool
+ *
+ * DESCRIPTION: deconstructor of QCameraMemoryPool
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraMemoryPool::~QCameraMemoryPool()
+{
+    clear();
+    pthread_mutex_destroy(&mLock);
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseBuffer
+ *
+ * DESCRIPTION: release one cached buffers
+ *
+ * PARAMETERS :
+ *   @memInfo : reference to struct that stores additional memory allocation info
+ *   @streamType: Type of stream the buffers belongs to
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraMemoryPool::releaseBuffer(
+        struct QCameraMemory::QCameraMemInfo &memInfo,
+        cam_stream_type_t streamType)
+{
+    pthread_mutex_lock(&mLock);
+
+    mPools[streamType].push_back(memInfo);
+
+    pthread_mutex_unlock(&mLock);
+}
+
+/*===========================================================================
+ * FUNCTION   : clear
+ *
+ * DESCRIPTION: clears all cached buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraMemoryPool::clear()
+{
+    pthread_mutex_lock(&mLock);
+
+    for (int i = CAM_STREAM_TYPE_DEFAULT; i < CAM_STREAM_TYPE_MAX; i++ ) {
+        List<struct QCameraMemory::QCameraMemInfo>::iterator it;
+        it = mPools[i].begin();
+        for( ; it != mPools[i].end() ; it++) {
+            QCameraMemory::deallocOneBuffer(*it);
+        }
+
+        mPools[i].clear();
+    }
+
+    pthread_mutex_unlock(&mLock);
+}
+
+/*===========================================================================
+ * FUNCTION   : findBufferLocked
+ *
+ * DESCRIPTION: search for a appropriate cached buffer
+ *
+ * PARAMETERS :
+ *   @memInfo : reference to struct that stores additional memory allocation info
+ *   @heap_id : type of heap
+ *   @size    : size of the buffer
+ *   @cached  : whether the buffer should be cached
+ *   @streaType: type of stream this buffer belongs to
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraMemoryPool::findBufferLocked(
+        struct QCameraMemory::QCameraMemInfo &memInfo, unsigned int heap_id,
+        size_t size, bool cached, cam_stream_type_t streamType)
+{
+    int rc = NAME_NOT_FOUND;
+
+    if (mPools[streamType].empty()) {
+        return NAME_NOT_FOUND;
+    }
+
+    List<struct QCameraMemory::QCameraMemInfo>::iterator it = mPools[streamType].begin();
+    if (streamType == CAM_STREAM_TYPE_OFFLINE_PROC) {
+        for( ; it != mPools[streamType].end() ; it++) {
+            if( ((*it).size == size) &&
+                    ((*it).heap_id == heap_id) &&
+                    ((*it).cached == cached) ) {
+                memInfo = *it;
+                LOGD("Found buffer %lx size %d",
+                         (unsigned long)memInfo.handle, memInfo.size);
+                mPools[streamType].erase(it);
+                rc = NO_ERROR;
+                break;
+            }
+        }
+    } else {
+        for( ; it != mPools[streamType].end() ; it++) {
+            if(((*it).size >= size) &&
+                    ((*it).heap_id == heap_id) &&
+                    ((*it).cached == cached) ) {
+                memInfo = *it;
+                LOGD("Found buffer %lx size %d",
+                         (unsigned long)memInfo.handle, memInfo.size);
+                mPools[streamType].erase(it);
+                rc = NO_ERROR;
+                break;
+            }
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocateBuffer
+ *
+ * DESCRIPTION: allocates a buffer from the memory pool,
+ *              it will re-use cached buffers if possible
+ *
+ * PARAMETERS :
+ *   @memInfo : reference to struct that stores additional memory allocation info
+ *   @heap_id : type of heap
+ *   @size    : size of the buffer
+ *   @cached  : whether the buffer should be cached
+ *   @streaType: type of stream this buffer belongs to
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraMemoryPool::allocateBuffer(
+        struct QCameraMemory::QCameraMemInfo &memInfo, unsigned int heap_id,
+        size_t size, bool cached, cam_stream_type_t streamType,
+        uint32_t secure_mode)
+{
+    int rc = NO_ERROR;
+
+    pthread_mutex_lock(&mLock);
+
+    rc = findBufferLocked(memInfo, heap_id, size, cached, streamType);
+    if (NAME_NOT_FOUND == rc ) {
+        LOGD("Buffer not found!");
+        rc = QCameraMemory::allocOneBuffer(memInfo, heap_id, size, cached,
+                 secure_mode);
+    }
+
+    pthread_mutex_unlock(&mLock);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraHeapMemory
+ *
+ * DESCRIPTION: constructor of QCameraHeapMemory for ion memory used internally in HAL
+ *
+ * PARAMETERS :
+ *   @cached  : flag indicates if using cached memory
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraHeapMemory::QCameraHeapMemory(bool cached)
+    : QCameraMemory(cached)
+{
+    for (int i = 0; i < MM_CAMERA_MAX_NUM_FRAMES; i ++)
+        mPtr[i] = NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraHeapMemory
+ *
+ * DESCRIPTION: deconstructor of QCameraHeapMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraHeapMemory::~QCameraHeapMemory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : getPtr
+ *
+ * DESCRIPTION: return buffer pointer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : buffer ptr
+ *==========================================================================*/
+void *QCameraHeapMemory::getPtr(uint32_t index) const
+{
+    if (index >= mBufferCount) {
+        LOGE("index out of bound");
+        return (void *)BAD_INDEX;
+    }
+    return mPtr[index];
+}
+
+/*===========================================================================
+ * FUNCTION   : allocate
+ *
+ * DESCRIPTION: allocate requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraHeapMemory::allocate(uint8_t count, size_t size, uint32_t isSecure)
+{
+    int rc = -1;
+    ATRACE_BEGIN_SNPRINTF("%s %zu %d", "HeapMemsize", size, count);
+    uint32_t heap_id_mask = 0x1 << ION_IOMMU_HEAP_ID;
+    if (isSecure == SECURE) {
+        rc = alloc(count, size, heap_id_mask, SECURE);
+        if (rc < 0) {
+            ATRACE_END();
+            return rc;
+        }
+    } else {
+        rc = alloc(count, size, heap_id_mask, NON_SECURE);
+        if (rc < 0) {
+            ATRACE_END();
+            return rc;
+        }
+
+        for (int i = 0; i < count; i ++) {
+            void *vaddr = mmap(NULL,
+                        mMemInfo[i].size,
+                        PROT_READ | PROT_WRITE,
+                        MAP_SHARED,
+                        mMemInfo[i].fd, 0);
+            if (vaddr == MAP_FAILED) {
+                for (int j = i-1; j >= 0; j --) {
+                    munmap(mPtr[j], mMemInfo[j].size);
+                    mPtr[j] = NULL;
+                    deallocOneBuffer(mMemInfo[j]);
+                }
+                // Deallocate remaining buffers that have already been allocated
+                for (int j = i; j < count; j++) {
+                    deallocOneBuffer(mMemInfo[j]);
+                }
+                ATRACE_END();
+                return NO_MEMORY;
+            } else
+                mPtr[i] = vaddr;
+        }
+    }
+    if (rc == 0) {
+        mBufferCount = count;
+    }
+    ATRACE_END();
+    return OK;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocateMore
+ *
+ * DESCRIPTION: allocate more requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraHeapMemory::allocateMore(uint8_t count, size_t size)
+{
+    ATRACE_BEGIN_SNPRINTF("%s %zu %d", "HeapMemsize", size, count);
+    unsigned int heap_id_mask = 0x1 << ION_IOMMU_HEAP_ID;
+    int rc = alloc(count, size, heap_id_mask, NON_SECURE);
+    if (rc < 0) {
+        ATRACE_END();
+        return rc;
+    }
+
+    for (int i = mBufferCount; i < count + mBufferCount; i ++) {
+        void *vaddr = mmap(NULL,
+                    mMemInfo[i].size,
+                    PROT_READ | PROT_WRITE,
+                    MAP_SHARED,
+                    mMemInfo[i].fd, 0);
+        if (vaddr == MAP_FAILED) {
+            for (int j = i-1; j >= mBufferCount; j --) {
+                munmap(mPtr[j], mMemInfo[j].size);
+                mPtr[j] = NULL;
+                deallocOneBuffer(mMemInfo[j]);
+            }
+            ATRACE_END();
+            return NO_MEMORY;
+        } else {
+            mPtr[i] = vaddr;
+        }
+    }
+    mBufferCount = (uint8_t)(mBufferCount + count);
+    ATRACE_END();
+    return OK;
+}
+
+/*===========================================================================
+ * FUNCTION   : deallocate
+ *
+ * DESCRIPTION: deallocate buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraHeapMemory::deallocate()
+{
+    for (int i = 0; i < mBufferCount; i++) {
+        munmap(mPtr[i], mMemInfo[i].size);
+        mPtr[i] = NULL;
+    }
+    dealloc();
+    mBufferCount = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : cacheOps
+ *
+ * DESCRIPTION: ion related memory cache operations
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *   @cmd     : cache ops command
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraHeapMemory::cacheOps(uint32_t index, unsigned int cmd)
+{
+    if (index >= mBufferCount)
+        return BAD_INDEX;
+    return cacheOpsInternal(index, cmd, mPtr[index]);
+}
+
+/*===========================================================================
+ * FUNCTION   : getRegFlags
+ *
+ * DESCRIPTION: query initial reg flags
+ *
+ * PARAMETERS :
+ *   @regFlags: initial reg flags of the allocated buffers
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraHeapMemory::getRegFlags(uint8_t * /*regFlags*/) const
+{
+    return INVALID_OPERATION;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMemory
+ *
+ * DESCRIPTION: get camera memory
+ *
+ * PARAMETERS :
+ *   @index   : buffer index
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : camera memory ptr
+ *              NULL if not supported or failed
+ *==========================================================================*/
+camera_memory_t *QCameraHeapMemory::getMemory(uint32_t /*index*/, bool /*metadata*/) const
+{
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMatchBufIndex
+ *
+ * DESCRIPTION: query buffer index by opaque ptr
+ *
+ * PARAMETERS :
+ *   @opaque  : opaque ptr
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : buffer index if match found,
+ *              -1 if failed
+ *==========================================================================*/
+int QCameraHeapMemory::getMatchBufIndex(const void *opaque,
+                                        bool metadata) const
+{
+    int index = -1;
+    if (metadata) {
+        return -1;
+    }
+    for (int i = 0; i < mBufferCount; i++) {
+        if (mPtr[i] == opaque) {
+            index = i;
+            break;
+        }
+    }
+    return index;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraMetadataStreamMemory
+ *
+ * DESCRIPTION: constructor of QCameraMetadataStreamMemory
+ *              for ion memory used internally in HAL for metadata
+ *
+ * PARAMETERS :
+ *   @cached  : flag indicates if using cached memory
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraMetadataStreamMemory::QCameraMetadataStreamMemory(bool cached)
+    : QCameraHeapMemory(cached)
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraMetadataStreamMemory
+ *
+ * DESCRIPTION: destructor of QCameraMetadataStreamMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraMetadataStreamMemory::~QCameraMetadataStreamMemory()
+{
+    if (mBufferCount > 0) {
+        LOGH("%s, buf_cnt > 0, deallocate buffers now.\n", __func__);
+        deallocate();
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getRegFlags
+ *
+ * DESCRIPTION: query initial reg flags
+ *
+ * PARAMETERS :
+ *   @regFlags: initial reg flags of the allocated buffers
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraMetadataStreamMemory::getRegFlags(uint8_t *regFlags) const
+{
+    for (int i = 0; i < mBufferCount; i ++) {
+        regFlags[i] = 1;
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraStreamMemory
+ *
+ * DESCRIPTION: constructor of QCameraStreamMemory
+ *              ION memory allocated directly from /dev/ion and shared with framework
+ *
+ * PARAMETERS :
+ *   @memory    : camera memory request ops table
+ *   @cached    : flag indicates if using cached memory
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraStreamMemory::QCameraStreamMemory(camera_request_memory memory,
+        bool cached,
+        QCameraMemoryPool *pool,
+        cam_stream_type_t streamType, __unused cam_stream_buf_type bufType)
+    :QCameraMemory(cached, pool, streamType),
+     mGetMemory(memory)
+{
+    for (int i = 0; i < MM_CAMERA_MAX_NUM_FRAMES; i ++)
+        mCameraMemory[i] = NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraStreamMemory
+ *
+ * DESCRIPTION: deconstructor of QCameraStreamMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraStreamMemory::~QCameraStreamMemory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : allocate
+ *
+ * DESCRIPTION: allocate requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraStreamMemory::allocate(uint8_t count, size_t size, uint32_t isSecure)
+{
+    ATRACE_BEGIN_SNPRINTF("%s %zu %d", "StreamMemsize", size, count);
+    unsigned int heap_id_mask = 0x1 << ION_IOMMU_HEAP_ID;
+    int rc = alloc(count, size, heap_id_mask, isSecure);
+    if (rc < 0) {
+        ATRACE_END();
+        return rc;
+    }
+
+    for (int i = 0; i < count; i ++) {
+        if (isSecure == SECURE) {
+            mCameraMemory[i] = 0;
+        } else {
+            mCameraMemory[i] = mGetMemory(mMemInfo[i].fd, mMemInfo[i].size, 1, this);
+        }
+    }
+    mBufferCount = count;
+    ATRACE_END();
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocateMore
+ *
+ * DESCRIPTION: allocate more requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraStreamMemory::allocateMore(uint8_t count, size_t size)
+{
+    ATRACE_BEGIN_SNPRINTF("%s %zu %d", "StreamMemsize", size, count);
+    unsigned int heap_id_mask = 0x1 << ION_IOMMU_HEAP_ID;
+    int rc = alloc(count, size, heap_id_mask, NON_SECURE);
+    if (rc < 0) {
+        ATRACE_END();
+        return rc;
+    }
+
+    for (int i = mBufferCount; i < mBufferCount + count; i++) {
+        mCameraMemory[i] = mGetMemory(mMemInfo[i].fd, mMemInfo[i].size, 1, this);
+    }
+    mBufferCount = (uint8_t)(mBufferCount + count);
+    ATRACE_END();
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : deallocate
+ *
+ * DESCRIPTION: deallocate buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraStreamMemory::deallocate()
+{
+    for (int i = 0; i < mBufferCount; i ++) {
+        if (mCameraMemory[i])
+            mCameraMemory[i]->release(mCameraMemory[i]);
+        mCameraMemory[i] = NULL;
+    }
+    dealloc();
+    mBufferCount = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : cacheOps
+ *
+ * DESCRIPTION: ion related memory cache operations
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *   @cmd     : cache ops command
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraStreamMemory::cacheOps(uint32_t index, unsigned int cmd)
+{
+    if (index >= mBufferCount)
+        return BAD_INDEX;
+    return cacheOpsInternal(index, cmd, mCameraMemory[index]->data);
+}
+
+/*===========================================================================
+ * FUNCTION   : getRegFlags
+ *
+ * DESCRIPTION: query initial reg flags
+ *
+ * PARAMETERS :
+ *   @regFlags: initial reg flags of the allocated buffers
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraStreamMemory::getRegFlags(uint8_t *regFlags) const
+{
+    for (int i = 0; i < mBufferCount; i ++)
+        regFlags[i] = 1;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMemory
+ *
+ * DESCRIPTION: get camera memory
+ *
+ * PARAMETERS :
+ *   @index   : buffer index
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : camera memory ptr
+ *              NULL if not supported or failed
+ *==========================================================================*/
+camera_memory_t *QCameraStreamMemory::getMemory(uint32_t index,
+        bool metadata) const
+{
+    if (index >= mBufferCount || metadata)
+        return NULL;
+    return mCameraMemory[index];
+}
+
+/*===========================================================================
+ * FUNCTION   : getMatchBufIndex
+ *
+ * DESCRIPTION: query buffer index by opaque ptr
+ *
+ * PARAMETERS :
+ *   @opaque  : opaque ptr
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : buffer index if match found,
+ *              -1 if failed
+ *==========================================================================*/
+int QCameraStreamMemory::getMatchBufIndex(const void *opaque,
+                                          bool metadata) const
+{
+    int index = -1;
+    if (metadata) {
+        return -1;
+    }
+    for (int i = 0; i < mBufferCount; i++) {
+        if (mCameraMemory[i]->data == opaque) {
+            index = i;
+            break;
+        }
+    }
+    return index;
+}
+
+/*===========================================================================
+ * FUNCTION   : getPtr
+ *
+ * DESCRIPTION: return buffer pointer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : buffer ptr
+ *==========================================================================*/
+void *QCameraStreamMemory::getPtr(uint32_t index) const
+{
+    if (index >= mBufferCount) {
+        LOGE("index out of bound");
+        return (void *)BAD_INDEX;
+    }
+    if (mCameraMemory[index] == 0) {
+        return NULL;
+    }
+    return mCameraMemory[index]->data;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraVideoMemory
+ *
+ * DESCRIPTION: constructor of QCameraVideoMemory
+ *              VideoStream buffers also include metadata buffers
+ *
+ * PARAMETERS :
+ *   @memory    : camera memory request ops table
+ *   @cached    : flag indicates if using cached ION memory
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraVideoMemory::QCameraVideoMemory(camera_request_memory memory,
+                                       bool cached, QCameraMemType bufType)
+    : QCameraStreamMemory(memory, cached)
+{
+    memset(mMetadata, 0, sizeof(mMetadata));
+    memset(mNativeHandle, 0, sizeof(mNativeHandle));
+    mMetaBufCount = 0;
+    mBufType = bufType;
+    //Set Default color conversion format
+    mUsage = private_handle_t::PRIV_FLAGS_ITU_R_601_FR;
+
+    //Set Default frame format
+    mFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraVideoMemory
+ *
+ * DESCRIPTION: deconstructor of QCameraVideoMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraVideoMemory::~QCameraVideoMemory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : allocate
+ *
+ * DESCRIPTION: allocate requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraVideoMemory::allocate(uint8_t count, size_t size, uint32_t isSecure)
+{
+    ATRACE_BEGIN_SNPRINTF("%s %zu %d", "VideoMemsize", size, count);
+    int rc = QCameraStreamMemory::allocate(count, size, isSecure);
+    if (rc < 0) {
+        ATRACE_END();
+        return rc;
+    }
+
+    if (!(mBufType & QCAMERA_MEM_TYPE_BATCH)) {
+        /*
+        *    FDs = 1
+        *    numInts  = 5 //offset, size, usage, timestamp, format
+        */
+        rc = allocateMeta(count, 1, VIDEO_METADATA_NUM_INTS);
+        if (rc != NO_ERROR) {
+            ATRACE_END();
+            return rc;
+        }
+        for (int i = 0; i < count; i ++) {
+            native_handle_t *nh =  mNativeHandle[i];
+            if (!nh) {
+                LOGE("Error in getting video native handle");
+                ATRACE_END();
+                return NO_MEMORY;
+            }
+            nh->data[0] = mMemInfo[i].fd;
+            nh->data[1] = 0;
+            nh->data[2] = (int)mMemInfo[i].size;
+            nh->data[3] = mUsage;
+            nh->data[4] = 0; //dummy value for timestamp in non-batch mode
+            nh->data[5] = mFormat;
+        }
+    }
+    mBufferCount = count;
+    ATRACE_END();
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocateMore
+ *
+ * DESCRIPTION: allocate more requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraVideoMemory::allocateMore(uint8_t count, size_t size)
+{
+    ATRACE_BEGIN_SNPRINTF("%s %zu %d", "VideoMemsize", size, count);
+    int rc = QCameraStreamMemory::allocateMore(count, size);
+    if (rc < 0) {
+        ATRACE_END();
+        return rc;
+    }
+
+    if (!(mBufType & QCAMERA_MEM_TYPE_BATCH)) {
+        for (int i = mBufferCount; i < count + mBufferCount; i ++) {
+            mMetadata[i] = mGetMemory(-1,
+                    sizeof(media_metadata_buffer), 1, this);
+            if (!mMetadata[i]) {
+                LOGE("allocation of video metadata failed.");
+                for (int j = mBufferCount; j <= i-1; j ++) {
+                    mMetadata[j]->release(mMetadata[j]);
+                    mCameraMemory[j]->release(mCameraMemory[j]);
+                    mCameraMemory[j] = NULL;
+                    deallocOneBuffer(mMemInfo[j]);;
+                }
+                ATRACE_END();
+                return NO_MEMORY;
+            }
+            media_metadata_buffer * packet =
+                    (media_metadata_buffer *)mMetadata[i]->data;
+            //FDs = 1
+            //numInts  = 5 (offset, size, usage, timestamp, format)
+            mNativeHandle[i] = native_handle_create(1, VIDEO_METADATA_NUM_INTS);
+#ifdef USE_MEDIA_EXTENSIONS
+            packet->eType = kMetadataBufferTypeNativeHandleSource;
+            packet->pHandle = mNativeHandle[i];
+#else
+            packet->buffer_type = kMetadataBufferTypeCameraSource;
+            packet->meta_handle = mNativeHandle[i];
+#endif
+            native_handle_t *nh =  mNativeHandle[i];
+            if (!nh) {
+                LOGE("Error in getting video native handle");
+                ATRACE_END();
+                return NO_MEMORY;
+            }
+            nh->data[0] = mMemInfo[i].fd;
+            nh->data[1] = 0;
+            nh->data[2] = (int)mMemInfo[i].size;
+            nh->data[3] = mUsage;
+            nh->data[4] = 0; //dummy value for timestamp in non-batch mode
+            nh->data[5] = mFormat;
+        }
+    }
+    mBufferCount = (uint8_t)(mBufferCount + count);
+    mMetaBufCount = mBufferCount;
+    ATRACE_END();
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocateMeta
+ *
+ * DESCRIPTION: allocate video encoder metadata structure
+ *
+ * PARAMETERS :
+ *   @fd_cnt : Total FD count
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraVideoMemory::allocateMeta(uint8_t buf_cnt, int numFDs, int numInts)
+{
+    int rc = NO_ERROR;
+
+    for (int i = 0; i < buf_cnt; i++) {
+        mMetadata[i] = mGetMemory(-1,
+                sizeof(media_metadata_buffer), 1, this);
+        if (!mMetadata[i]) {
+            LOGE("allocation of video metadata failed.");
+            for (int j = (i - 1); j >= 0; j--) {
+                if (NULL != mNativeHandle[j]) {
+                   native_handle_delete(mNativeHandle[j]);
+                }
+                mMetadata[j]->release(mMetadata[j]);
+            }
+            return NO_MEMORY;
+        }
+        media_metadata_buffer *packet =
+                (media_metadata_buffer *)mMetadata[i]->data;
+        mNativeHandle[i] = native_handle_create(numFDs, (numInts * numFDs));
+        if (mNativeHandle[i] == NULL) {
+            LOGE("Error in getting video native handle");
+            for (int j = (i - 1); j >= 0; j--) {
+                mMetadata[i]->release(mMetadata[i]);
+                if (NULL != mNativeHandle[j]) {
+                   native_handle_delete(mNativeHandle[j]);
+                }
+                mMetadata[j]->release(mMetadata[j]);
+            }
+            return NO_MEMORY;
+        }
+#ifdef USE_MEDIA_EXTENSIONS
+        packet->eType = kMetadataBufferTypeNativeHandleSource;
+        packet->pHandle = mNativeHandle[i];
+#else
+        packet->buffer_type = kMetadataBufferTypeCameraSource;
+        packet->meta_handle = mNativeHandle[i];
+#endif
+    }
+    mMetaBufCount = buf_cnt;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : deallocateMeta
+ *
+ * DESCRIPTION: deallocate video metadata buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraVideoMemory::deallocateMeta()
+{
+    for (int i = 0; i < mMetaBufCount; i++) {
+        native_handle_t *nh = mNativeHandle[i];
+        if (NULL != nh) {
+           if (native_handle_delete(nh)) {
+               LOGE("Unable to delete native handle");
+           }
+        } else {
+           LOGE("native handle not available");
+        }
+        mNativeHandle[i] = NULL;
+        mMetadata[i]->release(mMetadata[i]);
+        mMetadata[i] = NULL;
+    }
+    mMetaBufCount = 0;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : deallocate
+ *
+ * DESCRIPTION: deallocate buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraVideoMemory::deallocate()
+{
+    deallocateMeta();
+
+    QCameraStreamMemory::deallocate();
+    mBufferCount = 0;
+    mMetaBufCount = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMemory
+ *
+ * DESCRIPTION: get camera memory
+ *
+ * PARAMETERS :
+ *   @index   : buffer index
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : camera memory ptr
+ *              NULL if not supported or failed
+ *==========================================================================*/
+camera_memory_t *QCameraVideoMemory::getMemory(uint32_t index,
+        bool metadata) const
+{
+    if (index >= mMetaBufCount || (!metadata && index >= mBufferCount))
+        return NULL;
+
+    if (metadata)
+        return mMetadata[index];
+    else
+        return mCameraMemory[index];
+}
+
+/*===========================================================================
+ * FUNCTION   : updateNativeHandle
+ *
+ * DESCRIPTION: Updating native handle pointer
+ *
+ * PARAMETERS :
+ *   @index   : buffer index
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : camera native handle ptr
+ *              NULL if not supported or failed
+ *==========================================================================*/
+native_handle_t *QCameraVideoMemory::updateNativeHandle(uint32_t index, bool metadata)
+{
+    if (index >= mMetaBufCount || (!metadata && index >= mBufferCount)) {
+        return NULL;
+    }
+
+    native_handle_t *nh = NULL;
+    if (metadata && mMetadata[index] != NULL) {
+        media_metadata_buffer *packet =
+                (media_metadata_buffer *)mMetadata[index]->data;
+        nh = mNativeHandle[index];
+#ifdef USE_MEDIA_EXTENSIONS
+        packet->pHandle = nh;
+#else
+        packet->meta_handle = nh;
+#endif
+    }
+    return nh;
+}
+
+/*===========================================================================
+ * FUNCTION   : closeNativeHandle
+ *
+ * DESCRIPTION: close video native handle
+ *
+ * PARAMETERS :
+ *   @opaque  : ptr to video frame to be returned
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraVideoMemory::closeNativeHandle(const void *data, bool metadata)
+{
+    int32_t rc = NO_ERROR;
+    int32_t index = -1;
+
+#ifdef USE_MEDIA_EXTENSIONS
+    camera_memory_t *video_mem = NULL;
+
+    if (metadata) {
+        index = getMatchBufIndex(data, metadata);
+        if (index < 0) {
+            LOGE("Invalid buffer");
+            return BAD_VALUE;
+        }
+        video_mem = getMemory(index, metadata);
+        media_metadata_buffer * packet = NULL;
+        if (video_mem) {
+             packet = (media_metadata_buffer *)video_mem->data;
+        }
+
+        if (packet != NULL && packet->eType ==
+                kMetadataBufferTypeNativeHandleSource) {
+            native_handle_close(packet->pHandle);
+            native_handle_delete(packet->pHandle);
+            packet->pHandle = NULL;
+        } else {
+            LOGE("Invalid Data. Could not release");
+            return BAD_VALUE;
+        }
+    } else {
+        LOGW("Warning: Not of type video meta buffer");
+    }
+#endif
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMatchBufIndex
+ *
+ * DESCRIPTION: query buffer index by opaque ptr
+ *
+ * PARAMETERS :
+ *   @opaque  : opaque ptr
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : buffer index if match found,
+ *              -1 if failed
+ *==========================================================================*/
+int QCameraVideoMemory::getMatchBufIndex(const void *opaque,
+                                         bool metadata) const
+{
+    int index = -1;
+
+    if (metadata) {
+        for (int i = 0; i < mMetaBufCount; i++) {
+            if (mMetadata[i]->data == opaque) {
+                index = i;
+                break;
+            }
+        }
+    } else {
+        for (int i = 0; i < mBufferCount; i++) {
+            if (mCameraMemory[i]->data == opaque) {
+                index = i;
+                break;
+            }
+        }
+    }
+    return index;
+}
+
+/*===========================================================================
+ * FUNCTION   : setVideoInfo
+ *
+ * DESCRIPTION: set native window gralloc ops table
+ *
+ * PARAMETERS :
+ *   @usage : usage bit for video
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraVideoMemory::setVideoInfo(int usage, cam_format_t format)
+{
+    mUsage |= usage;
+    mFormat = convCamtoOMXFormat(format);
+}
+
+/*===========================================================================
+ * FUNCTION   : convCamtoOMXFormat
+ *
+ * DESCRIPTION: map cam_format_t to corresponding OMX format
+ *
+ * PARAMETERS :
+ *   @format : format in cam_format_t type
+ *
+ * RETURN     : omx format
+ *==========================================================================*/
+int QCameraVideoMemory::convCamtoOMXFormat(cam_format_t format)
+{
+    int omxFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+    switch (format) {
+        case CAM_FORMAT_YUV_420_NV21:
+        case CAM_FORMAT_YUV_420_NV21_VENUS:
+        case CAM_FORMAT_YUV_420_NV21_ADRENO:
+            omxFormat = QOMX_COLOR_FormatYVU420SemiPlanar;
+            break;
+        case CAM_FORMAT_YUV_420_NV12:
+        case CAM_FORMAT_YUV_420_NV12_VENUS:
+            omxFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+            break;
+#ifndef VANILLA_HAL
+        case CAM_FORMAT_YUV_420_NV12_UBWC:
+            omxFormat = QOMX_COLOR_FORMATYUV420PackedSemiPlanar32mCompressed;
+            break;
+#endif
+        default:
+            omxFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+    }
+    return omxFormat;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraGrallocMemory
+ *
+ * DESCRIPTION: constructor of QCameraGrallocMemory
+ *              preview stream buffers are allocated from gralloc native_windoe
+ *
+ * PARAMETERS :
+ *   @memory    : camera memory request ops table
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraGrallocMemory::QCameraGrallocMemory(camera_request_memory memory)
+        : QCameraMemory(true), mColorSpace(ITU_R_601_FR)
+{
+    mMinUndequeuedBuffers = 0;
+    mMappableBuffers = 0;
+    mWindow = NULL;
+    mWidth = mHeight = mStride = mScanline = mUsage = 0;
+    mFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
+    mGetMemory = memory;
+    for (int i = 0; i < MM_CAMERA_MAX_NUM_FRAMES; i ++) {
+        mBufferHandle[i] = NULL;
+        mLocalFlag[i] = BUFFER_NOT_OWNED;
+        mPrivateHandle[i] = NULL;
+        mBufferStatus[i] = STATUS_IDLE;
+        mCameraMemory[i] = NULL;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraGrallocMemory
+ *
+ * DESCRIPTION: deconstructor of QCameraGrallocMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraGrallocMemory::~QCameraGrallocMemory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : setWindowInfo
+ *
+ * DESCRIPTION: set native window gralloc ops table
+ *
+ * PARAMETERS :
+ *   @window  : gralloc ops table ptr
+ *   @width   : width of preview frame
+ *   @height  : height of preview frame
+ *   @stride  : stride of preview frame
+ *   @scanline: scanline of preview frame
+ *   @foramt  : format of preview image
+ *   @maxFPS : max fps of preview stream
+ *   @usage : usage bit for gralloc
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraGrallocMemory::setWindowInfo(preview_stream_ops_t *window,
+        int width, int height, int stride, int scanline, int format, int maxFPS, int usage)
+{
+    mWindow = window;
+    mWidth = width;
+    mHeight = height;
+    mStride = stride;
+    mScanline = scanline;
+    mFormat = format;
+    mUsage = usage;
+    setMaxFPS(maxFPS);
+}
+
+/*===========================================================================
+ * FUNCTION   : setMaxFPS
+ *
+ * DESCRIPTION: set max fps
+ *
+ * PARAMETERS :
+ *   @maxFPS : max fps of preview stream
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraGrallocMemory::setMaxFPS(int maxFPS)
+{
+    /* input will be in multiples of 1000 */
+    maxFPS = (maxFPS + 500)/1000;
+
+    /* set the lower cap to 30 always, because we are not supporting runtime update of fps info
+      to display. Otherwise MDP may result in underruns (for example if initial fps is 15max and later
+      changed to 30).*/
+    if (maxFPS < 30) {
+        maxFPS = 30;
+    }
+
+    /* the new fps will be updated in metadata of the next frame enqueued to display*/
+    mMaxFPS = maxFPS;
+    LOGH("Setting max fps %d to display", maxFPS);
+}
+
+/*===========================================================================
+ * FUNCTION   : displayBuffer
+ *
+ * DESCRIPTION: send received frame to display
+ *
+ * PARAMETERS :
+ *   @index   : index of preview frame
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraGrallocMemory::displayBuffer(uint32_t index)
+{
+    int err = NO_ERROR;
+    int dequeuedIdx = BAD_INDEX;
+
+    if (BUFFER_NOT_OWNED == mLocalFlag[index]) {
+        LOGE("buffer to be enqueued is not owned");
+        return INVALID_OPERATION;
+    }
+
+    err = mWindow->enqueue_buffer(mWindow, (buffer_handle_t *)mBufferHandle[index]);
+    if(err != 0) {
+        LOGE("enqueue_buffer failed, err = %d", err);
+    } else {
+        LOGD("enqueue_buffer hdl=%p", *mBufferHandle[index]);
+        mLocalFlag[index] = BUFFER_NOT_OWNED;
+    }
+
+    buffer_handle_t *buffer_handle = NULL;
+    int stride = 0;
+    err = mWindow->dequeue_buffer(mWindow, &buffer_handle, &stride);
+    if (err == NO_ERROR && buffer_handle != NULL) {
+        int i;
+        LOGD("dequed buf hdl =%p", *buffer_handle);
+        for(i = 0; i < mMappableBuffers; i++) {
+            if(mBufferHandle[i] == buffer_handle) {
+                LOGD("Found buffer in idx:%d", i);
+                mLocalFlag[i] = BUFFER_OWNED;
+                dequeuedIdx = i;
+                break;
+            }
+        }
+
+        if ((dequeuedIdx == BAD_INDEX) && (mMappableBuffers < mBufferCount)) {
+            dequeuedIdx = mMappableBuffers;
+            LOGD("Placing buffer in idx:%d", dequeuedIdx);
+            mBufferHandle[dequeuedIdx] = buffer_handle;
+            mLocalFlag[dequeuedIdx] = BUFFER_OWNED;
+
+            mPrivateHandle[dequeuedIdx] =
+                    (struct private_handle_t *)(*mBufferHandle[dequeuedIdx]);
+            mMemInfo[dequeuedIdx].main_ion_fd = open("/dev/ion", O_RDONLY);
+            if (mMemInfo[dequeuedIdx].main_ion_fd < 0) {
+                LOGE("failed: could not open ion device");
+                return BAD_INDEX;
+            }
+
+            struct ion_fd_data ion_info_fd;
+            memset(&ion_info_fd, 0, sizeof(ion_info_fd));
+            ion_info_fd.fd = mPrivateHandle[dequeuedIdx]->fd;
+            if (ioctl(mMemInfo[dequeuedIdx].main_ion_fd,
+                      ION_IOC_IMPORT, &ion_info_fd) < 0) {
+                LOGE("ION import failed\n");
+                return BAD_INDEX;
+            }
+
+            mCameraMemory[dequeuedIdx] =
+                    mGetMemory(mPrivateHandle[dequeuedIdx]->fd,
+                    (size_t)mPrivateHandle[dequeuedIdx]->size,
+                    1,
+                    (void *)this);
+            LOGH("idx = %d, fd = %d, size = %d, offset = %d",
+                     dequeuedIdx, mPrivateHandle[dequeuedIdx]->fd,
+                    mPrivateHandle[dequeuedIdx]->size,
+                    mPrivateHandle[dequeuedIdx]->offset);
+            mMemInfo[dequeuedIdx].fd = mPrivateHandle[dequeuedIdx]->fd;
+            mMemInfo[dequeuedIdx].size =
+                    (size_t)mPrivateHandle[dequeuedIdx]->size;
+            mMemInfo[dequeuedIdx].handle = ion_info_fd.handle;
+
+            mMappableBuffers++;
+        }
+    } else {
+        LOGW("dequeue_buffer, no free buffer from display now");
+    }
+    return dequeuedIdx;
+}
+
+/*===========================================================================
+ * FUNCTION   : enqueueBuffer
+ *
+ * DESCRIPTION: enqueue camera frame to display
+ *
+ * PARAMETERS :
+ *   @index   : index of frame
+ *   @timeStamp : frame presentation time
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraGrallocMemory::enqueueBuffer(uint32_t index, nsecs_t timeStamp)
+{
+    int32_t err = NO_ERROR;
+
+    if (BUFFER_NOT_OWNED == mLocalFlag[index]) {
+        LOGE("buffer to be enqueued is not owned");
+        return INVALID_OPERATION;
+    }
+
+    if (timeStamp != 0) {
+        err = mWindow->set_timestamp(mWindow, timeStamp);
+        if (err != NO_ERROR){
+            LOGE("Failed to native window timestamp");
+        }
+    }
+
+    err = mWindow->enqueue_buffer(mWindow, (buffer_handle_t *)mBufferHandle[index]);
+    if(err != 0) {
+        LOGE("enqueue_buffer failed, err = %d", err);
+    } else {
+        LOGD("enqueue_buffer hdl=%p", *mBufferHandle[index]);
+        mLocalFlag[index] = BUFFER_NOT_OWNED;
+    }
+    return err;
+}
+
+/*===========================================================================
+ * FUNCTION   : dequeueBuffer
+ *
+ * DESCRIPTION: receive a buffer from gralloc
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t
+ *              NO_ERROR/Buffer index : Success
+ *              < 0 failure code
+ *==========================================================================*/
+int32_t QCameraGrallocMemory::dequeueBuffer()
+{
+    int32_t err = NO_ERROR;
+    int32_t dequeuedIdx = BAD_INDEX;
+    buffer_handle_t *buffer_handle = NULL;
+    int32_t stride = 0;
+
+    dequeuedIdx = BAD_INDEX;
+    err = mWindow->dequeue_buffer(mWindow, &buffer_handle, &stride);
+    if ((err == NO_ERROR) && (buffer_handle != NULL)) {
+        int i;
+        LOGD("dequed buf hdl =%p", *buffer_handle);
+        for(i = 0; i < mMappableBuffers; i++) {
+            if(mBufferHandle[i] == buffer_handle) {
+                LOGD("Found buffer in idx:%d", i);
+                mLocalFlag[i] = BUFFER_OWNED;
+                dequeuedIdx = i;
+                break;
+            }
+        }
+
+        if ((dequeuedIdx == BAD_INDEX) &&
+                (mMappableBuffers < mBufferCount)) {
+            dequeuedIdx = mMappableBuffers;
+            LOGD("Placing buffer in idx:%d", dequeuedIdx);
+            mBufferHandle[dequeuedIdx] = buffer_handle;
+            mLocalFlag[dequeuedIdx] = BUFFER_OWNED;
+
+            mPrivateHandle[dequeuedIdx] =
+                    (struct private_handle_t *)(*mBufferHandle[dequeuedIdx]);
+            //update max fps info
+            setMetaData(mPrivateHandle[dequeuedIdx], UPDATE_REFRESH_RATE, (void*)&mMaxFPS);
+            mMemInfo[dequeuedIdx].main_ion_fd = open("/dev/ion", O_RDONLY);
+            if (mMemInfo[dequeuedIdx].main_ion_fd < 0) {
+                LOGE("failed: could not open ion device");
+                return BAD_INDEX;
+            }
+
+            struct ion_fd_data ion_info_fd;
+            memset(&ion_info_fd, 0, sizeof(ion_info_fd));
+            ion_info_fd.fd = mPrivateHandle[dequeuedIdx]->fd;
+            if (ioctl(mMemInfo[dequeuedIdx].main_ion_fd,
+                    ION_IOC_IMPORT, &ion_info_fd) < 0) {
+                LOGE("ION import failed\n");
+                return BAD_INDEX;
+            }
+
+            setMetaData(mPrivateHandle[dequeuedIdx], UPDATE_COLOR_SPACE,
+                    &mColorSpace);
+            mCameraMemory[dequeuedIdx] =
+                    mGetMemory(mPrivateHandle[dequeuedIdx]->fd,
+                    (size_t)mPrivateHandle[dequeuedIdx]->size,
+                    1,
+                    (void *)this);
+            LOGH("idx = %d, fd = %d, size = %d, offset = %d",
+                     dequeuedIdx, mPrivateHandle[dequeuedIdx]->fd,
+                    mPrivateHandle[dequeuedIdx]->size,
+                    mPrivateHandle[dequeuedIdx]->offset);
+            mMemInfo[dequeuedIdx].fd = mPrivateHandle[dequeuedIdx]->fd;
+            mMemInfo[dequeuedIdx].size =
+                    (size_t)mPrivateHandle[dequeuedIdx]->size;
+            mMemInfo[dequeuedIdx].handle = ion_info_fd.handle;
+
+            mMappableBuffers++;
+        }
+    } else {
+        LOGW("dequeue_buffer, no free buffer from display now");
+    }
+
+    return dequeuedIdx;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : allocate
+ *
+ * DESCRIPTION: allocate requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraGrallocMemory::allocate(uint8_t count, size_t /*size*/,
+        uint32_t /*isSecure*/)
+{
+    ATRACE_BEGIN_SNPRINTF("%s %d", "Grallocbufcnt", count);
+    int err = 0;
+    status_t ret = NO_ERROR;
+    int gralloc_usage = 0;
+    struct ion_fd_data ion_info_fd;
+    memset(&ion_info_fd, 0, sizeof(ion_info_fd));
+
+    LOGD("E ");
+
+    if (!mWindow) {
+        LOGE("Invalid native window");
+        ATRACE_END();
+        ret = INVALID_OPERATION;
+        goto end;
+    }
+
+    // Increment buffer count by min undequeued buffer.
+    err = mWindow->get_min_undequeued_buffer_count(mWindow,&mMinUndequeuedBuffers);
+    if (err != 0) {
+        LOGE("get_min_undequeued_buffer_count  failed: %s (%d)",
+                strerror(-err), -err);
+        ret = UNKNOWN_ERROR;
+        goto end;
+    }
+
+    err = mWindow->set_buffer_count(mWindow, count);
+    if (err != 0) {
+         LOGE("set_buffer_count failed: %s (%d)",
+                    strerror(-err), -err);
+         ret = UNKNOWN_ERROR;
+         goto end;
+    }
+
+    err = mWindow->set_buffers_geometry(mWindow, mStride, mScanline, mFormat);
+    if (err != 0) {
+         LOGE("set_buffers_geometry failed: %s (%d)",
+                strerror(-err), -err);
+         ret = UNKNOWN_ERROR;
+         goto end;
+    }
+
+    err = mWindow->set_crop(mWindow, 0, 0, mWidth, mHeight);
+    if (err != 0) {
+         LOGE("set_crop failed: %s (%d)",
+                strerror(-err), -err);
+         ret = UNKNOWN_ERROR;
+         goto end;
+    }
+
+    gralloc_usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
+    gralloc_usage |= mUsage;
+    err = mWindow->set_usage(mWindow, gralloc_usage);
+    if(err != 0) {
+        /* set_usage error out */
+        LOGE("set_usage rc = %d", err);
+        ret = UNKNOWN_ERROR;
+        goto end;
+    }
+    LOGH("usage = %d, geometry: %p, %d, %d, %d, %d, %d",
+           gralloc_usage, mWindow, mWidth, mHeight, mStride,
+          mScanline, mFormat);
+
+    mBufferCount = count;
+    if ((count < mMappableBuffers) || (mMappableBuffers == 0)) {
+        mMappableBuffers = count;
+    }
+
+    //Allocate cnt number of buffers from native window
+    for (int cnt = 0; cnt < mMappableBuffers; cnt++) {
+        int stride;
+        err = mWindow->dequeue_buffer(mWindow, &mBufferHandle[cnt], &stride);
+        if(!err) {
+            LOGD("dequeue buf hdl =%p", mBufferHandle[cnt]);
+            mLocalFlag[cnt] = BUFFER_OWNED;
+        } else {
+            mLocalFlag[cnt] = BUFFER_NOT_OWNED;
+            LOGE("dequeue_buffer idx = %d err = %d", cnt, err);
+        }
+
+        LOGD("dequeue buf: %p\n", mBufferHandle[cnt]);
+
+        if(err != 0) {
+            LOGE("dequeue_buffer failed: %s (%d)",
+                   strerror(-err), -err);
+            ret = UNKNOWN_ERROR;
+            for(int i = 0; i < cnt; i++) {
+                // Deallocate buffers when the native window is gone
+                struct ion_handle_data ion_handle;
+                memset(&ion_handle, 0, sizeof(ion_handle));
+                ion_handle.handle = mMemInfo[i].handle;
+                if (ioctl(mMemInfo[i].main_ion_fd, ION_IOC_FREE, &ion_handle) < 0) {
+                    ALOGE("ion free failed");
+                }
+                close(mMemInfo[i].main_ion_fd);
+
+                if(mLocalFlag[i] != BUFFER_NOT_OWNED) {
+                    err = mWindow->cancel_buffer(mWindow, mBufferHandle[i]);
+                    LOGH("cancel_buffer: hdl =%p", (*mBufferHandle[i]));
+                }
+                mLocalFlag[i] = BUFFER_NOT_OWNED;
+                mBufferHandle[i] = NULL;
+            }
+            reset();
+            goto end;
+        }
+
+        mPrivateHandle[cnt] =
+            (struct private_handle_t *)(*mBufferHandle[cnt]);
+        //update max fps info
+        setMetaData(mPrivateHandle[cnt], UPDATE_REFRESH_RATE, (void*)&mMaxFPS);
+        mMemInfo[cnt].main_ion_fd = open("/dev/ion", O_RDONLY);
+        if (mMemInfo[cnt].main_ion_fd < 0) {
+            LOGE("failed: could not open ion device");
+            for(int i = 0; i < cnt; i++) {
+                struct ion_handle_data ion_handle;
+                memset(&ion_handle, 0, sizeof(ion_handle));
+                ion_handle.handle = mMemInfo[i].handle;
+                if (ioctl(mMemInfo[i].main_ion_fd, ION_IOC_FREE, &ion_handle) < 0) {
+                    LOGE("ion free failed");
+                }
+                close(mMemInfo[i].main_ion_fd);
+                if(mLocalFlag[i] != BUFFER_NOT_OWNED) {
+                    err = mWindow->cancel_buffer(mWindow, mBufferHandle[i]);
+                    LOGH("cancel_buffer: hdl =%p", (*mBufferHandle[i]));
+                }
+                mLocalFlag[i] = BUFFER_NOT_OWNED;
+                mBufferHandle[i] = NULL;
+            }
+            reset();
+            ret = UNKNOWN_ERROR;
+            goto end;
+        } else {
+            ion_info_fd.fd = mPrivateHandle[cnt]->fd;
+            if (ioctl(mMemInfo[cnt].main_ion_fd,
+                      ION_IOC_IMPORT, &ion_info_fd) < 0) {
+                LOGE("ION import failed\n");
+                for(int i = 0; i < cnt; i++) {
+                    struct ion_handle_data ion_handle;
+                    memset(&ion_handle, 0, sizeof(ion_handle));
+                    ion_handle.handle = mMemInfo[i].handle;
+                    if (ioctl(mMemInfo[i].main_ion_fd, ION_IOC_FREE, &ion_handle) < 0) {
+                        LOGE("ion free failed");
+                    }
+                    close(mMemInfo[i].main_ion_fd);
+
+                    if(mLocalFlag[i] != BUFFER_NOT_OWNED) {
+                        err = mWindow->cancel_buffer(mWindow, mBufferHandle[i]);
+                        LOGH("cancel_buffer: hdl =%p", (*mBufferHandle[i]));
+                    }
+                    mLocalFlag[i] = BUFFER_NOT_OWNED;
+                    mBufferHandle[i] = NULL;
+                }
+                close(mMemInfo[cnt].main_ion_fd);
+                reset();
+                ret = UNKNOWN_ERROR;
+                goto end;
+            }
+        }
+        setMetaData(mPrivateHandle[cnt], UPDATE_COLOR_SPACE, &mColorSpace);
+        mCameraMemory[cnt] =
+            mGetMemory(mPrivateHandle[cnt]->fd,
+                    (size_t)mPrivateHandle[cnt]->size,
+                    1,
+                    (void *)this);
+        LOGH("idx = %d, fd = %d, size = %d, offset = %d",
+               cnt, mPrivateHandle[cnt]->fd,
+              mPrivateHandle[cnt]->size,
+              mPrivateHandle[cnt]->offset);
+        mMemInfo[cnt].fd = mPrivateHandle[cnt]->fd;
+        mMemInfo[cnt].size = (size_t)mPrivateHandle[cnt]->size;
+        mMemInfo[cnt].handle = ion_info_fd.handle;
+    }
+
+    //Cancel min_undequeued_buffer buffers back to the window
+    for (int i = 0; i < mMinUndequeuedBuffers; i ++) {
+        err = mWindow->cancel_buffer(mWindow, mBufferHandle[i]);
+        mLocalFlag[i] = BUFFER_NOT_OWNED;
+    }
+
+end:
+    if (ret != NO_ERROR) {
+        mMappableBuffers = 0;
+    }
+    LOGD("X ");
+    ATRACE_END();
+    return ret;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : allocateMore
+ *
+ * DESCRIPTION: allocate more requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraGrallocMemory::allocateMore(uint8_t /*count*/, size_t /*size*/)
+{
+    LOGE("Not implenmented yet");
+    return UNKNOWN_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : deallocate
+ *
+ * DESCRIPTION: deallocate buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraGrallocMemory::deallocate()
+{
+    LOGD("E ", __FUNCTION__);
+
+    for (int cnt = 0; cnt < mMappableBuffers; cnt++) {
+        mCameraMemory[cnt]->release(mCameraMemory[cnt]);
+        struct ion_handle_data ion_handle;
+        memset(&ion_handle, 0, sizeof(ion_handle));
+        ion_handle.handle = mMemInfo[cnt].handle;
+        if (ioctl(mMemInfo[cnt].main_ion_fd, ION_IOC_FREE, &ion_handle) < 0) {
+            LOGE("ion free failed");
+        }
+        close(mMemInfo[cnt].main_ion_fd);
+        if(mLocalFlag[cnt] != BUFFER_NOT_OWNED) {
+            if (mWindow) {
+                mWindow->cancel_buffer(mWindow, mBufferHandle[cnt]);
+                LOGH("cancel_buffer: hdl =%p", (*mBufferHandle[cnt]));
+            } else {
+                LOGE("Preview window is NULL, cannot cancel_buffer: hdl =%p",
+                      (*mBufferHandle[cnt]));
+            }
+        }
+        mLocalFlag[cnt] = BUFFER_NOT_OWNED;
+        LOGH("put buffer %d successfully", cnt);
+    }
+    mBufferCount = 0;
+    mMappableBuffers = 0;
+    LOGD("X ",__FUNCTION__);
+}
+
+/*===========================================================================
+ * FUNCTION   : cacheOps
+ *
+ * DESCRIPTION: ion related memory cache operations
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *   @cmd     : cache ops command
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraGrallocMemory::cacheOps(uint32_t index, unsigned int cmd)
+{
+    if (index >= mMappableBuffers)
+        return BAD_INDEX;
+    return cacheOpsInternal(index, cmd, mCameraMemory[index]->data);
+}
+
+/*===========================================================================
+ * FUNCTION   : getRegFlags
+ *
+ * DESCRIPTION: query initial reg flags
+ *
+ * PARAMETERS :
+ *   @regFlags: initial reg flags of the allocated buffers
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraGrallocMemory::getRegFlags(uint8_t *regFlags) const
+{
+    int i = 0;
+    for (i = 0; i < mMinUndequeuedBuffers; i ++)
+        regFlags[i] = 0;
+    for (; i < mMappableBuffers; i ++)
+        regFlags[i] = 1;
+    for (; i < mBufferCount; i ++)
+        regFlags[i] = 0;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMemory
+ *
+ * DESCRIPTION: get camera memory
+ *
+ * PARAMETERS :
+ *   @index   : buffer index
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : camera memory ptr
+ *              NULL if not supported or failed
+ *==========================================================================*/
+camera_memory_t *QCameraGrallocMemory::getMemory(uint32_t index,
+        bool metadata) const
+{
+    if (index >= mMappableBuffers || metadata)
+        return NULL;
+    return mCameraMemory[index];
+}
+
+/*===========================================================================
+ * FUNCTION   : getMatchBufIndex
+ *
+ * DESCRIPTION: query buffer index by opaque ptr
+ *
+ * PARAMETERS :
+ *   @opaque  : opaque ptr
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : buffer index if match found,
+ *              -1 if failed
+ *==========================================================================*/
+int QCameraGrallocMemory::getMatchBufIndex(const void *opaque,
+                                           bool metadata) const
+{
+    int index = -1;
+    if (metadata) {
+        return -1;
+    }
+    for (int i = 0; i < mMappableBuffers; i++) {
+        if (mCameraMemory[i]->data == opaque) {
+            index = i;
+            break;
+        }
+    }
+    return index;
+}
+
+/*===========================================================================
+ * FUNCTION   : getPtr
+ *
+ * DESCRIPTION: return buffer pointer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : buffer ptr
+ *==========================================================================*/
+void *QCameraGrallocMemory::getPtr(uint32_t index) const
+{
+    if (index >= mMappableBuffers) {
+        LOGE("index out of bound");
+        return (void *)BAD_INDEX;
+    }
+    return mCameraMemory[index]->data;
+}
+
+/*===========================================================================
+ * FUNCTION   : setMappable
+ *
+ * DESCRIPTION: configure the number of buffers ready to map
+ *
+ * PARAMETERS :
+ *   @mappable : the number of desired mappable buffers
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraGrallocMemory::setMappable(uint8_t mappable)
+{
+    if (mMappableBuffers == 0) {
+        mMappableBuffers = mappable;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getMappable
+ *
+ * DESCRIPTION: query number of buffers already allocated
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of buffers already allocated
+ *==========================================================================*/
+uint8_t QCameraGrallocMemory::getMappable() const
+{
+    return mMappableBuffers;
+}
+
+/*===========================================================================
+ * FUNCTION   : checkIfAllBuffersMapped
+ *
+ * DESCRIPTION: check if all buffers for the are mapped
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : 1 if all buffers mapped
+ *              0 if total buffers not equal to mapped buffers
+ *==========================================================================*/
+uint8_t QCameraGrallocMemory::checkIfAllBuffersMapped() const
+{
+    LOGH("mBufferCount: %d, mMappableBuffers: %d",
+             mBufferCount, mMappableBuffers);
+    return (mBufferCount == mMappableBuffers);
+}
+
+/*===========================================================================
+ * FUNCTION   : setBufferStatus
+ *
+ * DESCRIPTION: set buffer status
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *   @status  : status of the buffer, whether skipped,etc
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraGrallocMemory::setBufferStatus(uint32_t index, BufferStatus status)
+{
+    if (index >= mBufferCount) {
+        LOGE("index out of bound");
+        return;
+    }
+    mBufferStatus[index] = status;
+}
+
+}; //namespace qcamera
diff --git a/msmcobalt/QCamera2/HAL/QCameraMem.h b/msmcobalt/QCamera2/HAL/QCameraMem.h
new file mode 100644
index 0000000..2979696
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/QCameraMem.h
@@ -0,0 +1,302 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA2HWI_MEM_H__
+#define __QCAMERA2HWI_MEM_H__
+
+// System dependencies
+#include <linux/msm_ion.h>
+#include <utils/Mutex.h>
+#include <utils/List.h>
+
+// Display dependencies
+#include "qdMetaData.h"
+
+// Camera dependencies
+#include "camera.h"
+
+extern "C" {
+#include "mm_camera_interface.h"
+}
+
+namespace qcamera {
+
+class QCameraMemoryPool;
+
+//OFFSET, SIZE, USAGE, TIMESTAMP, FORMAT
+#define VIDEO_METADATA_NUM_INTS          5
+
+enum QCameraMemType {
+    QCAMERA_MEM_TYPE_DEFAULT      = 0,
+    QCAMERA_MEM_TYPE_SECURE       = 1,
+    QCAMERA_MEM_TYPE_BATCH        = (1 << 1),
+    QCAMERA_MEM_TYPE_COMPRESSED   = (1 << 2),
+};
+
+typedef enum {
+    STATUS_IDLE,
+    STATUS_SKIPPED
+} BufferStatus;
+
+// Base class for all memory types. Abstract.
+class QCameraMemory {
+
+public:
+    int cleanCache(uint32_t index)
+    {
+        return cacheOps(index, ION_IOC_CLEAN_CACHES);
+    }
+    int invalidateCache(uint32_t index)
+    {
+        return cacheOps(index, ION_IOC_INV_CACHES);
+    }
+    int cleanInvalidateCache(uint32_t index)
+    {
+        return cacheOps(index, ION_IOC_CLEAN_INV_CACHES);
+    }
+    int getFd(uint32_t index) const;
+    ssize_t getSize(uint32_t index) const;
+    uint8_t getCnt() const;
+    virtual uint8_t getMappable() const;
+    virtual uint8_t checkIfAllBuffersMapped() const;
+
+    virtual int allocate(uint8_t count, size_t size, uint32_t is_secure) = 0;
+    virtual void deallocate() = 0;
+    virtual int allocateMore(uint8_t count, size_t size) = 0;
+    virtual int cacheOps(uint32_t index, unsigned int cmd) = 0;
+    virtual int getRegFlags(uint8_t *regFlags) const = 0;
+    virtual camera_memory_t *getMemory(uint32_t index,
+            bool metadata) const = 0;
+    virtual int getMatchBufIndex(const void *opaque, bool metadata) const = 0;
+    virtual void *getPtr(uint32_t index) const= 0;
+
+    QCameraMemory(bool cached,
+                  QCameraMemoryPool *pool = NULL,
+                  cam_stream_type_t streamType = CAM_STREAM_TYPE_DEFAULT,
+                  QCameraMemType buf_Type = QCAMERA_MEM_TYPE_DEFAULT);
+    virtual ~QCameraMemory();
+    virtual void reset();
+
+    void getBufDef(const cam_frame_len_offset_t &offset,
+            mm_camera_buf_def_t &bufDef, uint32_t index) const;
+
+    int32_t getUserBufDef(const cam_stream_user_buf_info_t &buf_info,
+            mm_camera_buf_def_t &bufDef, uint32_t index,
+            const cam_frame_len_offset_t &plane_offset,
+            mm_camera_buf_def_t *planebufDef, QCameraMemory *bufs) const;
+
+protected:
+
+    friend class QCameraMemoryPool;
+
+    struct QCameraMemInfo {
+        int fd;
+        int main_ion_fd;
+        ion_user_handle_t handle;
+        size_t size;
+        bool cached;
+        unsigned int heap_id;
+    };
+
+    int alloc(int count, size_t size, unsigned int heap_id,
+            uint32_t is_secure);
+    void dealloc();
+    static int allocOneBuffer(struct QCameraMemInfo &memInfo,
+            unsigned int heap_id, size_t size, bool cached, uint32_t is_secure);
+    static void deallocOneBuffer(struct QCameraMemInfo &memInfo);
+    int cacheOpsInternal(uint32_t index, unsigned int cmd, void *vaddr);
+
+    bool m_bCached;
+    uint8_t mBufferCount;
+    struct QCameraMemInfo mMemInfo[MM_CAMERA_MAX_NUM_FRAMES];
+    QCameraMemoryPool *mMemoryPool;
+    cam_stream_type_t mStreamType;
+    QCameraMemType mBufType;
+};
+
+class QCameraMemoryPool {
+
+public:
+
+    QCameraMemoryPool();
+    virtual ~QCameraMemoryPool();
+
+    int allocateBuffer(struct QCameraMemory::QCameraMemInfo &memInfo,
+            unsigned int heap_id, size_t size, bool cached,
+            cam_stream_type_t streamType, uint32_t is_secure);
+    void releaseBuffer(struct QCameraMemory::QCameraMemInfo &memInfo,
+            cam_stream_type_t streamType);
+    void clear();
+
+protected:
+
+    int findBufferLocked(struct QCameraMemory::QCameraMemInfo &memInfo,
+            unsigned int heap_id, size_t size, bool cached,
+            cam_stream_type_t streamType);
+
+    android::List<QCameraMemory::QCameraMemInfo> mPools[CAM_STREAM_TYPE_MAX];
+    pthread_mutex_t mLock;
+};
+
+// Internal heap memory is used for memories used internally
+// They are allocated from /dev/ion.
+class QCameraHeapMemory : public QCameraMemory {
+public:
+    QCameraHeapMemory(bool cached);
+    virtual ~QCameraHeapMemory();
+
+    virtual int allocate(uint8_t count, size_t size, uint32_t is_secure);
+    virtual int allocateMore(uint8_t count, size_t size);
+    virtual void deallocate();
+    virtual int cacheOps(uint32_t index, unsigned int cmd);
+    virtual int getRegFlags(uint8_t *regFlags) const;
+    virtual camera_memory_t *getMemory(uint32_t index, bool metadata) const;
+    virtual int getMatchBufIndex(const void *opaque, bool metadata) const;
+    virtual void *getPtr(uint32_t index) const;
+
+private:
+    void *mPtr[MM_CAMERA_MAX_NUM_FRAMES];
+};
+
+class QCameraMetadataStreamMemory : public QCameraHeapMemory {
+public:
+    QCameraMetadataStreamMemory(bool cached);
+    virtual ~QCameraMetadataStreamMemory();
+
+    virtual int getRegFlags(uint8_t *regFlags) const;
+};
+
+// Externel heap memory is used for memories shared with
+// framework. They are allocated from /dev/ion or gralloc.
+class QCameraStreamMemory : public QCameraMemory {
+public:
+    QCameraStreamMemory(camera_request_memory getMemory,
+                        bool cached,
+                        QCameraMemoryPool *pool = NULL,
+                        cam_stream_type_t streamType = CAM_STREAM_TYPE_DEFAULT,
+                        cam_stream_buf_type buf_Type = CAM_STREAM_BUF_TYPE_MPLANE);
+    virtual ~QCameraStreamMemory();
+
+    virtual int allocate(uint8_t count, size_t size, uint32_t is_secure);
+    virtual int allocateMore(uint8_t count, size_t size);
+    virtual void deallocate();
+    virtual int cacheOps(uint32_t index, unsigned int cmd);
+    virtual int getRegFlags(uint8_t *regFlags) const;
+    virtual camera_memory_t *getMemory(uint32_t index, bool metadata) const;
+    virtual int getMatchBufIndex(const void *opaque, bool metadata) const;
+    virtual void *getPtr(uint32_t index) const;
+
+protected:
+    camera_request_memory mGetMemory;
+    camera_memory_t *mCameraMemory[MM_CAMERA_MAX_NUM_FRAMES];
+};
+
+// Externel heap memory is used for memories shared with
+// framework. They are allocated from /dev/ion or gralloc.
+class QCameraVideoMemory : public QCameraStreamMemory {
+public:
+    QCameraVideoMemory(camera_request_memory getMemory, bool cached,
+            QCameraMemType bufType = QCAMERA_MEM_TYPE_DEFAULT);
+    virtual ~QCameraVideoMemory();
+
+    virtual int allocate(uint8_t count, size_t size, uint32_t is_secure);
+    virtual int allocateMore(uint8_t count, size_t size);
+    virtual void deallocate();
+    virtual camera_memory_t *getMemory(uint32_t index, bool metadata) const;
+    virtual int getMatchBufIndex(const void *opaque, bool metadata) const;
+    int allocateMeta(uint8_t buf_cnt, int numFDs, int numInts);
+    void deallocateMeta();
+    void setVideoInfo(int usage, cam_format_t format);
+    int getUsage(){return mUsage;};
+    int getFormat(){return mFormat;};
+    int convCamtoOMXFormat(cam_format_t format);
+    native_handle_t *updateNativeHandle(uint32_t index, bool metadata = true);
+    int closeNativeHandle(const void *data, bool metadata = true);
+private:
+    camera_memory_t *mMetadata[MM_CAMERA_MAX_NUM_FRAMES];
+    uint8_t mMetaBufCount;
+    int mUsage, mFormat;
+    native_handle_t *mNativeHandle[MM_CAMERA_MAX_NUM_FRAMES];
+};
+
+
+// Gralloc Memory is acquired from preview window
+class QCameraGrallocMemory : public QCameraMemory {
+    enum {
+        BUFFER_NOT_OWNED,
+        BUFFER_OWNED,
+    };
+public:
+    QCameraGrallocMemory(camera_request_memory getMemory);
+    void setNativeWindow(preview_stream_ops_t *anw);
+    virtual ~QCameraGrallocMemory();
+
+    virtual int allocate(uint8_t count, size_t size, uint32_t is_secure);
+    virtual int allocateMore(uint8_t count, size_t size);
+    virtual void deallocate();
+    virtual int cacheOps(uint32_t index, unsigned int cmd);
+    virtual int getRegFlags(uint8_t *regFlags) const;
+    virtual camera_memory_t *getMemory(uint32_t index, bool metadata) const;
+    virtual int getMatchBufIndex(const void *opaque, bool metadata) const;
+    virtual void *getPtr(uint32_t index) const;
+    virtual void setMappable(uint8_t mappable);
+    virtual uint8_t getMappable() const;
+    virtual uint8_t checkIfAllBuffersMapped() const;
+
+    void setWindowInfo(preview_stream_ops_t *window, int width, int height,
+        int stride, int scanline, int format, int maxFPS, int usage = 0);
+    // Enqueue/display buffer[index] onto the native window,
+    // and dequeue one buffer from it.
+    // Returns the buffer index of the dequeued buffer.
+    int displayBuffer(uint32_t index);
+    void setMaxFPS(int maxFPS);
+    int32_t enqueueBuffer(uint32_t index, nsecs_t timeStamp = 0);
+    int32_t dequeueBuffer();
+    inline bool isBufSkipped(uint32_t index){return (mBufferStatus[index] == STATUS_SKIPPED);};
+    void setBufferStatus(uint32_t index, BufferStatus status);
+private:
+    buffer_handle_t *mBufferHandle[MM_CAMERA_MAX_NUM_FRAMES];
+    int mLocalFlag[MM_CAMERA_MAX_NUM_FRAMES];
+    bool mBufferStatus[MM_CAMERA_MAX_NUM_FRAMES];
+    struct private_handle_t *mPrivateHandle[MM_CAMERA_MAX_NUM_FRAMES];
+    preview_stream_ops_t *mWindow;
+    int mWidth, mHeight, mFormat, mStride, mScanline, mUsage;
+    typeof (MetaData_t::refreshrate) mMaxFPS;
+    camera_request_memory mGetMemory;
+    camera_memory_t *mCameraMemory[MM_CAMERA_MAX_NUM_FRAMES];
+    int mMinUndequeuedBuffers;
+    enum ColorSpace_t mColorSpace;
+    uint8_t mMappableBuffers;
+    pthread_mutex_t mLock;
+    uint8_t mEnqueuedBuffers;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA2HWI_MEM_H__ */
diff --git a/msmcobalt/QCamera2/HAL/QCameraMuxer.cpp b/msmcobalt/QCamera2/HAL/QCameraMuxer.cpp
new file mode 100644
index 0000000..bad7393
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/QCameraMuxer.cpp
@@ -0,0 +1,2822 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#define LOG_TAG "QCameraMuxer"
+
+// System dependencies
+#include <fcntl.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <utils/Errors.h>
+#define STAT_H <SYSTEM_HEADER_PREFIX/stat.h>
+#include STAT_H
+
+// Camera dependencies
+#include "QCameraMuxer.h"
+#include "QCamera2HWI.h"
+#include "QCamera3HWI.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+/* Muxer implementation */
+using namespace android;
+namespace qcamera {
+
+QCameraMuxer *gMuxer = NULL;
+
+//Error Check Macros
+#define CHECK_MUXER() \
+    if (!gMuxer) { \
+        LOGE("Error getting muxer "); \
+        return; \
+    } \
+
+#define CHECK_MUXER_ERROR() \
+    if (!gMuxer) { \
+        LOGE("Error getting muxer "); \
+        return -ENODEV; \
+    } \
+
+#define CHECK_CAMERA(pCam) \
+    if (!pCam) { \
+        LOGE("Error getting physical camera"); \
+        return; \
+    } \
+
+#define CHECK_CAMERA_ERROR(pCam) \
+    if (!pCam) { \
+        LOGE("Error getting physical camera"); \
+        return -ENODEV; \
+    } \
+
+#define CHECK_HWI(hwi) \
+    if (!hwi) { \
+        LOGE("Error !! HWI not found!!"); \
+        return; \
+    } \
+
+#define CHECK_HWI_ERROR(hwi) \
+    if (!hwi) { \
+        LOGE("Error !! HWI not found!!"); \
+        return -ENODEV; \
+    } \
+
+
+/*===========================================================================
+ * FUNCTION         : getCameraMuxer
+ *
+ * DESCRIPTION     : Creates Camera Muxer if not created
+ *
+ * PARAMETERS:
+ *   @pMuxer               : Pointer to retrieve Camera Muxer
+ *   @num_of_cameras  : Number of Physical Cameras on device
+ *
+ * RETURN             :  NONE
+ *==========================================================================*/
+void QCameraMuxer::getCameraMuxer(
+        QCameraMuxer** pMuxer, uint32_t num_of_cameras)
+{
+    *pMuxer = NULL;
+    if (!gMuxer) {
+        gMuxer = new QCameraMuxer(num_of_cameras);
+    }
+    CHECK_MUXER();
+    *pMuxer = gMuxer;
+    LOGH("gMuxer: %p ", gMuxer);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION         : QCameraMuxer
+ *
+ * DESCRIPTION     : QCameraMuxer Constructor
+ *
+ * PARAMETERS:
+ *   @num_of_cameras  : Number of Physical Cameras on device
+ *
+ *==========================================================================*/
+QCameraMuxer::QCameraMuxer(uint32_t num_of_cameras)
+    : mJpegClientHandle(0),
+      m_pPhyCamera(NULL),
+      m_pLogicalCamera(NULL),
+      m_pCallbacks(NULL),
+      m_bAuxCameraExposed(FALSE),
+      m_nPhyCameras(num_of_cameras),
+      m_nLogicalCameras(0),
+      m_MainJpegQ(releaseJpegInfo, this),
+      m_AuxJpegQ(releaseJpegInfo, this),
+      m_pRelCamMpoJpeg(NULL),
+      m_pMpoCallbackCookie(NULL),
+      m_pJpegCallbackCookie(NULL),
+      m_bDumpImages(FALSE),
+      m_bMpoEnabled(TRUE),
+      m_bFrameSyncEnabled(FALSE),
+      m_bRecordingHintInternallySet(FALSE)
+{
+    setupLogicalCameras();
+    memset(&mJpegOps, 0, sizeof(mJpegOps));
+    memset(&mJpegMpoOps, 0, sizeof(mJpegMpoOps));
+    memset(&mGetMemoryCb, 0, sizeof(mGetMemoryCb));
+    memset(&mDataCb, 0, sizeof(mDataCb));
+
+    // initialize mutex for MPO composition
+    pthread_mutex_init(&m_JpegLock, NULL);
+    // launch MPO composition thread
+    m_ComposeMpoTh.launch(composeMpoRoutine, this);
+
+    //Check whether dual camera images need to be dumped
+    char prop[PROPERTY_VALUE_MAX];
+    property_get("persist.camera.dual.camera.dump", prop, "0");
+    m_bDumpImages = atoi(prop);
+    LOGH("dualCamera dump images:%d ", m_bDumpImages);
+}
+
+/*===========================================================================
+ * FUNCTION         : ~QCameraMuxer
+ *
+ * DESCRIPTION     : QCameraMuxer Desctructor
+ *
+ *==========================================================================*/
+QCameraMuxer::~QCameraMuxer() {
+    if (m_pLogicalCamera) {
+        delete [] m_pLogicalCamera;
+        m_pLogicalCamera = NULL;
+    }
+    if (m_pPhyCamera) {
+        delete [] m_pPhyCamera;
+        m_pPhyCamera = NULL;
+    }
+
+    if (NULL != m_pRelCamMpoJpeg) {
+        m_pRelCamMpoJpeg->release(m_pRelCamMpoJpeg);
+        m_pRelCamMpoJpeg = NULL;
+    }
+    // flush Jpeg Queues
+    m_MainJpegQ.flush();
+    m_AuxJpegQ.flush();
+
+    // stop and exit MPO composition thread
+    m_ComposeMpoTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, TRUE, FALSE);
+    m_ComposeMpoTh.exit();
+
+    pthread_mutex_destroy(&m_JpegLock);
+}
+
+/*===========================================================================
+ * FUNCTION         : get_number_of_cameras
+ *
+ * DESCRIPTION     : Provide number of Logical Cameras
+ *
+ * RETURN             :  Number of logical Cameras
+ *==========================================================================*/
+int QCameraMuxer::get_number_of_cameras()
+{
+    return gMuxer->getNumberOfCameras();
+}
+
+/*===========================================================================
+ * FUNCTION         : get_camera_info
+ *
+ * DESCRIPTION     : get logical camera info
+ *
+ * PARAMETERS:
+ *   @camera_id     : Logical Camera ID
+ *   @info              : Logical Main Camera Info
+ *
+ * RETURN     :
+ *              NO_ERROR  : success
+ *              ENODEV : Camera not found
+ *              other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::get_camera_info(int camera_id, struct camera_info *info)
+{
+    int rc = NO_ERROR;
+    LOGH("E");
+    cam_sync_type_t type;
+    if ((camera_id < 0) || (camera_id >= gMuxer->getNumberOfCameras())) {
+        LOGE("Camera id %d not found!", camera_id);
+        return -ENODEV;
+    }
+    if(info) {
+        rc = gMuxer->getCameraInfo(camera_id, info, &type);
+    }
+    LOGH("X, rc: %d", rc);
+    return rc;
+}
+
+
+/*===========================================================================
+ * FUNCTION         : set_callbacks
+ *
+ * DESCRIPTION     : Not Implemented
+ *
+ * PARAMETERS:
+ *   @callbacks      : Camera Module Callbacks
+ *
+ * RETURN     :
+ *              NO_ERROR  : success
+ *              other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::set_callbacks(__unused const camera_module_callbacks_t *callbacks)
+{
+    // Not implemented
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : camera_device_open
+ *
+ * DESCRIPTION: static function to open a camera device by its ID
+ *
+ * PARAMETERS :
+ *   @modue: hw module
+ *   @id : camera ID
+ *   @hw_device : ptr to struct storing camera hardware device info
+ *
+ * RETURN     :
+ *              NO_ERROR  : success
+ *              BAD_VALUE : Invalid Camera ID
+ *              other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::camera_device_open(
+        __unused const struct hw_module_t *module, const char *id,
+        struct hw_device_t **hw_device)
+{
+    int rc = NO_ERROR;
+    LOGH("id= %d",atoi(id));
+    if (!id) {
+        LOGE("Invalid camera id");
+        return BAD_VALUE;
+    }
+
+    rc =  gMuxer->cameraDeviceOpen(atoi(id), hw_device);
+    LOGH("id= %d, rc: %d", atoi(id), rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : open_legacy
+ *
+ * DESCRIPTION: static function to open a camera device by its ID
+ *
+ * PARAMETERS :
+ *   @modue: hw module
+ *   @id : camera ID
+ *   @halVersion: hal version
+ *   @hw_device : ptr to struct storing camera hardware device info
+ *
+ * RETURN     :
+ *              NO_ERROR  : success
+ *              BAD_VALUE : Invalid Camera ID
+ *              other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::open_legacy(__unused const struct hw_module_t* module,
+        const char* id, __unused uint32_t halVersion, struct hw_device_t** hw_device)
+{
+    int rc = NO_ERROR;
+    LOGH("id= %d", atoi(id));
+    if (!id) {
+        LOGE("Invalid camera id");
+        return BAD_VALUE;
+    }
+
+    rc =  gMuxer->cameraDeviceOpen(atoi(id), hw_device);
+    LOGH("id= %d, rc: %d", atoi(id), rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : set_preview_window
+ *
+ * DESCRIPTION: Set Preview window for main camera
+ *
+ * PARAMETERS :
+ *   @device : camera hardware device info
+ *   @window: Preview window ops
+ *
+ * RETURN     :
+ *              NO_ERROR  : success
+ *              other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::set_preview_window(struct camera_device * device,
+        struct preview_stream_ops *window)
+{
+    int rc = NO_ERROR;
+    CHECK_MUXER_ERROR();
+    qcamera_physical_descriptor_t *pCam = NULL;
+    qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+    CHECK_CAMERA_ERROR(cam);
+
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA_ERROR(pCam);
+
+        // Set preview window only for primary camera
+        if (pCam->mode == CAM_MODE_PRIMARY) {
+            QCamera2HardwareInterface *hwi = pCam->hwi;
+            CHECK_HWI_ERROR(hwi);
+            rc = hwi->set_preview_window(pCam->dev, window);
+            if (rc != NO_ERROR) {
+                LOGE("Error!! setting preview window");
+                return rc;
+            }
+            break;
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : set_callBacks
+ *
+ * DESCRIPTION: Set Framework callbacks to notify various frame data asynchronously
+ *
+ * PARAMETERS :
+ *   @device : camera hardware device info
+ *   @notify_cb: Notification callback
+ *   @data_cb: data callback
+ *   @data_cb_timestamp: data timestamp callback
+ *   @get_memory: callback to obtain memory
+ *   @user : userdata
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraMuxer::set_callBacks(struct camera_device * device,
+        camera_notify_callback notify_cb,
+        camera_data_callback data_cb,
+        camera_data_timestamp_callback data_cb_timestamp,
+        camera_request_memory get_memory,
+        void *user)
+{
+    LOGH("E");
+    CHECK_MUXER();
+    int rc = NO_ERROR;
+    qcamera_physical_descriptor_t *pCam = NULL;
+    qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+    CHECK_CAMERA(cam);
+
+    // Set callbacks to HWI
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI(hwi);
+
+        hwi->set_CallBacks(pCam->dev, notify_cb, data_cb, data_cb_timestamp,
+                get_memory, user);
+
+        // Set JPG callbacks
+        // sending the physical camera description with the Jpeg callback
+        // this will be retrieved in callbacks to get the cam instance
+        // delivering JPEGs
+        hwi->setJpegCallBacks(jpeg_data_callback, (void*)pCam);
+
+        if (pCam->mode == CAM_MODE_PRIMARY) {
+            rc = gMuxer->setMainJpegCallbackCookie((void*)(pCam));
+            if(rc != NO_ERROR) {
+                LOGW("Error setting Jpeg callback cookie");
+            }
+        }
+    }
+    // Store callback in Muxer to send data callbacks
+    rc = gMuxer->setDataCallback(data_cb);
+    if(rc != NO_ERROR) {
+        LOGW("Error setting data callback");
+    }
+    // memory callback stored to allocate memory for MPO buffer
+    rc = gMuxer->setMemoryCallback(get_memory);
+    if(rc != NO_ERROR) {
+        LOGW("Error setting memory callback");
+    }
+    // actual user callback cookie is saved in Muxer
+    // this will be used to deliver final MPO callback to the framework
+    rc = gMuxer->setMpoCallbackCookie(user);
+    if(rc != NO_ERROR) {
+        LOGW("Error setting mpo cookie");
+    }
+
+    LOGH("X");
+
+}
+
+/*===========================================================================
+ * FUNCTION   : enable_msg_type
+ *
+ * DESCRIPTION: Enable msg_type to send callbacks
+ *
+ * PARAMETERS :
+ *   @device : camera hardware device info
+ *   @msg_type: callback Message type to be enabled
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraMuxer::enable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+    LOGH("E");
+    CHECK_MUXER();
+    qcamera_physical_descriptor_t *pCam = NULL;
+    qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+    CHECK_CAMERA(cam);
+
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA(pCam);
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI(hwi);
+        hwi->enable_msg_type(pCam->dev, msg_type);
+    }
+    LOGH("X");
+}
+
+/*===========================================================================
+ * FUNCTION   : disable_msg_type
+ *
+ * DESCRIPTION: disable msg_type to send callbacks
+ *
+ * PARAMETERS :
+ *   @device : camera hardware device info
+ *   @msg_type: callback Message type to be disabled
+ *
+ * RETURN : None
+ *==========================================================================*/
+void QCameraMuxer::disable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+    LOGH("E");
+    CHECK_MUXER();
+    qcamera_physical_descriptor_t *pCam = NULL;
+    qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+    CHECK_CAMERA(cam);
+
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA(pCam);
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI(hwi);
+        hwi->disable_msg_type(pCam->dev, msg_type);
+    }
+    LOGH("X");
+}
+
+/*===========================================================================
+ * FUNCTION   : msg_type_enabled
+ *
+ * DESCRIPTION: Check if message type enabled
+ *
+ * PARAMETERS :
+ *   @device : camera hardware device info
+ *   @msg_type: message type
+ *
+ * RETURN : true/false
+ *==========================================================================*/
+int QCameraMuxer::msg_type_enabled(struct camera_device * device, int32_t msg_type)
+{
+    LOGH("E");
+    CHECK_MUXER_ERROR();
+    qcamera_physical_descriptor_t *pCam = NULL;
+    qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+    CHECK_CAMERA_ERROR(cam);
+
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA_ERROR(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI_ERROR(hwi);
+
+        if (pCam->mode == CAM_MODE_PRIMARY) {
+            return hwi->msg_type_enabled(pCam->dev, msg_type);
+        }
+    }
+    LOGH("X");
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : start_preview
+ *
+ * DESCRIPTION: Starts logical camera preview
+ *
+ * PARAMETERS :
+ *   @device : camera hardware device info
+ *
+ * RETURN     :
+ *              NO_ERROR  : success
+ *              other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::start_preview(struct camera_device * device)
+{
+    LOGH("E");
+    CHECK_MUXER_ERROR();
+    int rc = NO_ERROR;
+    qcamera_physical_descriptor_t *pCam = NULL;
+    qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+    CHECK_CAMERA_ERROR(cam);
+
+    // prepare preview first for all cameras
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA_ERROR(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI_ERROR(hwi);
+
+        rc = hwi->prepare_preview(pCam->dev);
+        if (rc != NO_ERROR) {
+            LOGE("Error preparing preview !! ");
+            return rc;
+        }
+    }
+
+    if (cam->numCameras > 1) {
+        uint sessionId = 0;
+        // Set up sync for camera sessions
+        for (uint32_t i = 0; i < cam->numCameras; i++) {
+            pCam = gMuxer->getPhysicalCamera(cam, i);
+            CHECK_CAMERA_ERROR(pCam);
+
+            QCamera2HardwareInterface *hwi = pCam->hwi;
+            CHECK_HWI_ERROR(hwi);
+
+            if(pCam->mode == CAM_MODE_PRIMARY) {
+                // bundle primary cam with all aux cameras
+                for (uint32_t j = 0; j < cam->numCameras; j++) {
+                    if (j == cam->nPrimaryPhyCamIndex) {
+                        continue;
+                    }
+                    sessionId = cam->sId[j];
+                    LOGH("Related cam id: %d, server id: %d sync ON"
+                            " related session_id %d",
+                            cam->pId[i], cam->sId[i], sessionId);
+                    rc = hwi->bundleRelatedCameras(true, sessionId);
+                    if (rc != NO_ERROR) {
+                        LOGE("Error Bundling physical cameras !! ");
+                        return rc;
+                    }
+                }
+            }
+
+            if (pCam->mode == CAM_MODE_SECONDARY) {
+                // bundle all aux cam with primary cams
+                sessionId = cam->sId[cam->nPrimaryPhyCamIndex];
+                LOGH("Related cam id: %d, server id: %d sync ON"
+                        " related session_id %d",
+                        cam->pId[i], cam->sId[i], sessionId);
+                rc = hwi->bundleRelatedCameras(true, sessionId);
+                if (rc != NO_ERROR) {
+                    LOGE("Error Bundling physical cameras !! ");
+                    return rc;
+                }
+            }
+        }
+
+        // Remember Sync is ON
+        cam->bSyncOn = true;
+    }
+    // Start Preview for all cameras
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA_ERROR(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI_ERROR(hwi);
+        rc = hwi->start_preview(pCam->dev);
+        if (rc != NO_ERROR) {
+            LOGE("Error starting preview !! ");
+            return rc;
+        }
+    }
+    LOGH("X");
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop_preview
+ *
+ * DESCRIPTION: Stops logical camera preview
+ *
+ * PARAMETERS :
+ *   @device : camera hardware device info
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraMuxer::stop_preview(struct camera_device * device)
+{
+    LOGH("E");
+    CHECK_MUXER();
+    qcamera_physical_descriptor_t *pCam = NULL;
+    qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+    CHECK_CAMERA(cam);
+
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI(hwi);
+
+        QCamera2HardwareInterface::stop_preview(pCam->dev);
+    }
+
+    //Flush JPEG Queues. Nodes in Main and Aux JPEGQ are not valid after preview stopped.
+    gMuxer->m_MainJpegQ.flush();
+    gMuxer->m_AuxJpegQ.flush();
+    LOGH(" X");
+}
+
+/*===========================================================================
+ * FUNCTION   : preview_enabled
+ *
+ * DESCRIPTION: Checks preview enabled
+ *
+ * PARAMETERS :
+ *   @device : camera hardware device info
+ *
+ * RETURN     : true/false
+ *==========================================================================*/
+int QCameraMuxer::preview_enabled(struct camera_device * device)
+{
+    LOGH("E");
+    CHECK_MUXER_ERROR();
+    qcamera_physical_descriptor_t *pCam = NULL;
+    qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+    CHECK_CAMERA_ERROR(cam);
+
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA_ERROR(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI_ERROR(hwi);
+
+        if (pCam->mode == CAM_MODE_PRIMARY) {
+            return hwi->preview_enabled(pCam->dev);
+        }
+    }
+    LOGH("X");
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : store_meta_data_in_buffers
+ *
+ * DESCRIPTION: Stores metadata in buffers
+ *
+ * PARAMETERS :
+ *   @device : camera hardware device info
+ *   @enable: Enable/disable metadata
+ *
+ * RETURN     :
+ *              NO_ERROR  : success
+ *              other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::store_meta_data_in_buffers(struct camera_device * device, int enable)
+{
+    LOGH("E");
+    CHECK_MUXER_ERROR();
+    int rc = NO_ERROR;
+    qcamera_physical_descriptor_t *pCam = NULL;
+    qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+    CHECK_CAMERA_ERROR(cam);
+
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA_ERROR(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI_ERROR(hwi);
+
+        rc = hwi->store_meta_data_in_buffers(pCam->dev, enable);
+        if (rc != NO_ERROR) {
+            LOGE("Error storing metat data !! ");
+            return rc;
+        }
+    }
+    LOGH("X");
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : start_recording
+ *
+ * DESCRIPTION: Starts recording on camcorder
+ *
+ * PARAMETERS :
+ *   @device : camera hardware device info
+ *
+ * RETURN     :
+ *              NO_ERROR  : success
+ *              other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::start_recording(struct camera_device * device)
+{
+    LOGH("E");
+    CHECK_MUXER_ERROR();
+    int rc = NO_ERROR;
+    bool previewRestartNeeded = false;
+    qcamera_physical_descriptor_t *pCam = NULL;
+    qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+    CHECK_CAMERA_ERROR(cam);
+
+    // In cases where recording hint is not set, hwi->start_recording will
+    // internally restart the preview.
+    // To take the preview restart control in muxer,
+    // 1. call pre_start_recording first
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA_ERROR(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI_ERROR(hwi);
+
+        rc = hwi->pre_start_recording(pCam->dev);
+        if (rc != NO_ERROR) {
+            LOGE("Error preparing recording start!! ");
+            return rc;
+        }
+    }
+
+    // 2. Check if preview restart is needed. Check all cameras.
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA_ERROR(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI_ERROR(hwi);
+
+        if (hwi->isPreviewRestartNeeded()) {
+            previewRestartNeeded = hwi->isPreviewRestartNeeded();
+            break;
+        }
+    }
+
+    if (previewRestartNeeded) {
+        // 3. if preview restart needed. stop the preview first
+        for (uint32_t i = 0; i < cam->numCameras; i++) {
+            pCam = gMuxer->getPhysicalCamera(cam, i);
+            CHECK_CAMERA_ERROR(pCam);
+
+            QCamera2HardwareInterface *hwi = pCam->hwi;
+            CHECK_HWI_ERROR(hwi);
+
+            rc = hwi->restart_stop_preview(pCam->dev);
+            if (rc != NO_ERROR) {
+                LOGE("Error in restart stop preview!! ");
+                return rc;
+            }
+        }
+
+        //4. Update the recording hint value to TRUE
+        for (uint32_t i = 0; i < cam->numCameras; i++) {
+            pCam = gMuxer->getPhysicalCamera(cam, i);
+            CHECK_CAMERA_ERROR(pCam);
+
+            QCamera2HardwareInterface *hwi = pCam->hwi;
+            CHECK_HWI_ERROR(hwi);
+
+            rc = hwi->setRecordingHintValue(TRUE);
+            if (rc != NO_ERROR) {
+                LOGE("Error in setting recording hint value!! ");
+                return rc;
+            }
+            gMuxer->m_bRecordingHintInternallySet = TRUE;
+        }
+
+        // 5. start the preview
+        for (uint32_t i = 0; i < cam->numCameras; i++) {
+            pCam = gMuxer->getPhysicalCamera(cam, i);
+            CHECK_CAMERA_ERROR(pCam);
+
+            QCamera2HardwareInterface *hwi = pCam->hwi;
+            CHECK_HWI_ERROR(hwi);
+
+            rc = hwi->restart_start_preview(pCam->dev);
+            if (rc != NO_ERROR) {
+                LOGE("Error in restart start preview!! ");
+                return rc;
+            }
+        }
+    }
+
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA_ERROR(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI_ERROR(hwi);
+
+        if (pCam->mode == CAM_MODE_PRIMARY) {
+            rc = hwi->start_recording(pCam->dev);
+            if (rc != NO_ERROR) {
+                LOGE("Error starting recording!! ");
+            }
+            break;
+        }
+    }
+    LOGH("X");
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop_recording
+ *
+ * DESCRIPTION: Stops recording on camcorder
+ *
+ * PARAMETERS :
+ *   @device : camera hardware device info
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraMuxer::stop_recording(struct camera_device * device)
+{
+
+    int rc = NO_ERROR;
+    LOGH("E");
+
+    CHECK_MUXER();
+    qcamera_physical_descriptor_t *pCam = NULL;
+    qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+    CHECK_CAMERA(cam);
+
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI(hwi);
+
+        if (pCam->mode == CAM_MODE_PRIMARY) {
+            QCamera2HardwareInterface::stop_recording(pCam->dev);
+            break;
+        }
+    }
+
+    // If recording hint is set internally to TRUE,
+    // we need to set it to FALSE.
+    // preview restart is needed in between
+    if (gMuxer->m_bRecordingHintInternallySet) {
+        // stop the preview first
+        for (uint32_t i = 0; i < cam->numCameras; i++) {
+            pCam = gMuxer->getPhysicalCamera(cam, i);
+            CHECK_CAMERA(pCam);
+
+            QCamera2HardwareInterface *hwi = pCam->hwi;
+            CHECK_HWI(hwi);
+
+            rc = hwi->restart_stop_preview(pCam->dev);
+            if (rc != NO_ERROR) {
+                LOGE("Error in restart stop preview!! ");
+                return;
+            }
+        }
+
+        // Update the recording hint value to FALSE
+        for (uint32_t i = 0; i < cam->numCameras; i++) {
+            pCam = gMuxer->getPhysicalCamera(cam, i);
+            CHECK_CAMERA(pCam);
+
+            QCamera2HardwareInterface *hwi = pCam->hwi;
+            CHECK_HWI(hwi);
+
+            rc = hwi->setRecordingHintValue(FALSE);
+            if (rc != NO_ERROR) {
+                LOGE("Error in setting recording hint value!! ");
+                return;
+            }
+            gMuxer->m_bRecordingHintInternallySet = FALSE;
+        }
+
+        // start the preview
+        for (uint32_t i = 0; i < cam->numCameras; i++) {
+            pCam = gMuxer->getPhysicalCamera(cam, i);
+            CHECK_CAMERA(pCam);
+
+            QCamera2HardwareInterface *hwi = pCam->hwi;
+            CHECK_HWI(hwi);
+
+            rc = hwi->restart_start_preview(pCam->dev);
+            if (rc != NO_ERROR) {
+                LOGE("Error in restart start preview!! ");
+                return;
+            }
+        }
+    }
+    LOGH("X");
+}
+
+/*===========================================================================
+ * FUNCTION   : recording_enabled
+ *
+ * DESCRIPTION: Checks for recording enabled
+ *
+ * PARAMETERS :
+ *   @device : camera hardware device info
+ *
+ * RETURN     : true/false
+ *==========================================================================*/
+int QCameraMuxer::recording_enabled(struct camera_device * device)
+{
+    LOGH("E");
+    CHECK_MUXER_ERROR();
+    qcamera_physical_descriptor_t *pCam = NULL;
+    qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+    CHECK_CAMERA_ERROR(cam);
+
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA_ERROR(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI_ERROR(hwi);
+
+        if (pCam->mode == CAM_MODE_PRIMARY) {
+            return hwi->recording_enabled(pCam->dev);
+        }
+    }
+    LOGH("X");
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : release_recording_frame
+ *
+ * DESCRIPTION: Release the recording frame
+ *
+ * PARAMETERS :
+ *   @device : camera hardware device info
+ *   @opaque: Frame to be released
+ *
+  * RETURN     : None
+ *==========================================================================*/
+void QCameraMuxer::release_recording_frame(struct camera_device * device,
+                const void *opaque)
+{
+    LOGH("E");
+    CHECK_MUXER();
+    qcamera_physical_descriptor_t *pCam = NULL;
+    qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+    CHECK_CAMERA(cam);
+
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI(hwi);
+
+        if (pCam->mode == CAM_MODE_PRIMARY) {
+            QCamera2HardwareInterface::release_recording_frame(pCam->dev, opaque);
+            break;
+        }
+    }
+    LOGH("X");
+}
+
+/*===========================================================================
+ * FUNCTION   : auto_focus
+ *
+ * DESCRIPTION: Performs auto focus on camera
+ *
+ * PARAMETERS :
+ *   @device : camera hardware device info
+ *
+ * RETURN     :
+ *              NO_ERROR  : success
+ *              other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::auto_focus(struct camera_device * device)
+{
+    LOGH("E");
+    CHECK_MUXER_ERROR();
+    int rc = NO_ERROR;
+    qcamera_physical_descriptor_t *pCam = NULL;
+    qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+    CHECK_CAMERA_ERROR(cam);
+
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA_ERROR(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI_ERROR(hwi);
+        // Call auto focus on main camera
+        if (pCam->mode == CAM_MODE_PRIMARY) {
+            rc = QCamera2HardwareInterface::auto_focus(pCam->dev);
+            if (rc != NO_ERROR) {
+                LOGE("Error auto focusing !! ");
+                return rc;
+            }
+            break;
+        }
+    }
+    LOGH("X");
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : cancel_auto_focus
+ *
+ * DESCRIPTION: Cancels auto focus
+ *
+ * PARAMETERS :
+ *   @device : camera hardware device info
+ *
+ * RETURN     :
+ *              NO_ERROR  : success
+ *              other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::cancel_auto_focus(struct camera_device * device)
+{
+    LOGH("E");
+    CHECK_MUXER_ERROR();
+    int rc = NO_ERROR;
+    qcamera_physical_descriptor_t *pCam = NULL;
+    qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+    CHECK_CAMERA_ERROR(cam);
+
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA_ERROR(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI_ERROR(hwi);
+        // Cancel auto focus on primary camera
+        if (pCam->mode == CAM_MODE_PRIMARY) {
+            rc = QCamera2HardwareInterface::cancel_auto_focus(pCam->dev);
+            if (rc != NO_ERROR) {
+                LOGE("Error cancelling auto focus !! ");
+                return rc;
+            }
+            break;
+        }
+    }
+    LOGH("X");
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : take_picture
+ *
+ * DESCRIPTION: Take snapshots on device
+ *
+ * PARAMETERS :
+ *   @device : camera hardware device info
+ *
+ * RETURN     :
+ *              NO_ERROR  : success
+ *              other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::take_picture(struct camera_device * device)
+{
+    LOGH("E");
+    CHECK_MUXER_ERROR();
+    int rc = NO_ERROR;
+    bool previewRestartNeeded = false;
+    qcamera_physical_descriptor_t *pCam = NULL;
+    qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+    CHECK_CAMERA_ERROR(cam);
+
+    char prop[PROPERTY_VALUE_MAX];
+    property_get("persist.camera.dual.camera.mpo", prop, "1");
+    gMuxer->m_bMpoEnabled = atoi(prop);
+    // If only one Physical Camera included in Logical, disable MPO
+    int numOfAcitvePhyCam = 0;
+    gMuxer->getActiveNumOfPhyCam(cam, numOfAcitvePhyCam);
+    if (gMuxer->m_bMpoEnabled && numOfAcitvePhyCam <= 1) {
+        gMuxer->m_bMpoEnabled = 0;
+    }
+    LOGH("dualCamera MPO Enabled:%d ", gMuxer->m_bMpoEnabled);
+
+    if (!gMuxer->mJpegClientHandle) {
+        // set up jpeg handles
+        pCam = gMuxer->getPhysicalCamera(cam, 0);
+        CHECK_CAMERA_ERROR(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI_ERROR(hwi);
+
+        rc = hwi->getJpegHandleInfo(&gMuxer->mJpegOps, &gMuxer->mJpegMpoOps,
+                &gMuxer->mJpegClientHandle);
+        if (rc != NO_ERROR) {
+            LOGE("Error retrieving jpeg handle!");
+            return rc;
+        }
+
+        for (uint32_t i = 1; i < cam->numCameras; i++) {
+            pCam = gMuxer->getPhysicalCamera(cam, i);
+            CHECK_CAMERA_ERROR(pCam);
+
+            QCamera2HardwareInterface *hwi = pCam->hwi;
+            CHECK_HWI_ERROR(hwi);
+
+            rc = hwi->setJpegHandleInfo(&gMuxer->mJpegOps, &gMuxer->mJpegMpoOps,
+                    gMuxer->mJpegClientHandle);
+            if (rc != NO_ERROR) {
+                LOGE("Error setting jpeg handle %d!", i);
+                return rc;
+            }
+        }
+    }
+
+    // prepare snapshot for main camera
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA_ERROR(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI_ERROR(hwi);
+
+        if (pCam->mode == CAM_MODE_PRIMARY) {
+            rc = hwi->prepare_snapshot(pCam->dev);
+            if (rc != NO_ERROR) {
+                LOGE("Error preparing for snapshot !! ");
+                return rc;
+            }
+        }
+        // set Mpo composition for each session
+        rc = hwi->setMpoComposition(gMuxer->m_bMpoEnabled);
+        //disable MPO if AOST features are enabled
+        if (rc != NO_ERROR) {
+            gMuxer->m_bMpoEnabled = 0;
+            rc = NO_ERROR;
+        }
+    }
+
+    // initialize Jpeg Queues
+    gMuxer->m_MainJpegQ.init();
+    gMuxer->m_AuxJpegQ.init();
+    gMuxer->m_ComposeMpoTh.sendCmd(
+            CAMERA_CMD_TYPE_START_DATA_PROC, FALSE, FALSE);
+
+    // In cases where recording hint is set, preview is running,
+    // hwi->take_picture will internally restart the preview.
+    // To take the preview restart control in muxer,
+    // 1. call pre_take_picture first
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA_ERROR(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI_ERROR(hwi);
+
+        // no need to call pre_take_pic on Aux if not MPO (for AOST,liveshot...etc.)
+        if ( (gMuxer->m_bMpoEnabled == 1) || (pCam->mode == CAM_MODE_PRIMARY) ) {
+            rc = hwi->pre_take_picture(pCam->dev);
+            if (rc != NO_ERROR) {
+                LOGE("Error preparing take_picture!! ");
+                return rc;
+            }
+        }
+    }
+
+    // 2. Check if preview restart is needed. Check all cameras.
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA_ERROR(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI_ERROR(hwi);
+
+        if (hwi->isPreviewRestartNeeded()) {
+            previewRestartNeeded = hwi->isPreviewRestartNeeded();
+            break;
+        }
+    }
+
+    if (previewRestartNeeded) {
+        // 3. if preview restart needed. stop the preview first
+        for (uint32_t i = 0; i < cam->numCameras; i++) {
+            pCam = gMuxer->getPhysicalCamera(cam, i);
+            CHECK_CAMERA_ERROR(pCam);
+
+            QCamera2HardwareInterface *hwi = pCam->hwi;
+            CHECK_HWI_ERROR(hwi);
+
+            rc = hwi->restart_stop_preview(pCam->dev);
+            if (rc != NO_ERROR) {
+                LOGE("Error in restart stop preview!! ");
+                return rc;
+            }
+        }
+
+        //4. Update the recording hint value to FALSE
+        for (uint32_t i = 0; i < cam->numCameras; i++) {
+            pCam = gMuxer->getPhysicalCamera(cam, i);
+            CHECK_CAMERA_ERROR(pCam);
+
+            QCamera2HardwareInterface *hwi = pCam->hwi;
+            CHECK_HWI_ERROR(hwi);
+
+            rc = hwi->setRecordingHintValue(FALSE);
+            if (rc != NO_ERROR) {
+                LOGE("Error in setting recording hint value!! ");
+                return rc;
+            }
+        }
+
+        // 5. start the preview
+        for (uint32_t i = 0; i < cam->numCameras; i++) {
+            pCam = gMuxer->getPhysicalCamera(cam, i);
+            CHECK_CAMERA_ERROR(pCam);
+
+            QCamera2HardwareInterface *hwi = pCam->hwi;
+            CHECK_HWI_ERROR(hwi);
+
+            rc = hwi->restart_start_preview(pCam->dev);
+            if (rc != NO_ERROR) {
+                LOGE("Error in restart start preview!! ");
+                return rc;
+            }
+        }
+    }
+
+    // As frame sync for dual cameras is enabled, the take picture call
+    // for secondary camera is handled only till HAL level to init corresponding
+    // pproc channel and update statemachine.
+    // This call is forwarded to mm-camera-intf only for primary camera
+    // Primary camera should receive the take picture call after all secondary
+    // camera statemachines are updated
+    for (int32_t i = cam->numCameras-1 ; i >= 0; i--) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA_ERROR(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI_ERROR(hwi);
+
+        // no need to call take_pic on Aux if not MPO (for AOST)
+        if ( (gMuxer->m_bMpoEnabled == 1) || (pCam->mode == CAM_MODE_PRIMARY) ) {
+            rc = QCamera2HardwareInterface::take_picture(pCam->dev);
+            if (rc != NO_ERROR) {
+                LOGE("Error taking picture !! ");
+                return rc;
+            }
+        }
+    }
+    LOGH("X");
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : cancel_picture
+ *
+ * DESCRIPTION: Cancel the take picture call
+ *
+ * PARAMETERS :
+ *   @device : camera hardware device info
+ *
+ * RETURN     :
+ *              NO_ERROR  : success
+ *              other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::cancel_picture(struct camera_device * device)
+{
+    LOGH("E");
+    CHECK_MUXER_ERROR();
+    int rc = NO_ERROR;
+    qcamera_physical_descriptor_t *pCam = NULL;
+    qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+    CHECK_CAMERA_ERROR(cam);
+
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA_ERROR(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI_ERROR(hwi);
+
+        rc = QCamera2HardwareInterface::cancel_picture(pCam->dev);
+        if (rc != NO_ERROR) {
+            LOGE("Error cancelling picture !! ");
+            return rc;
+        }
+    }
+    gMuxer->m_ComposeMpoTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, FALSE, FALSE);
+    // flush Jpeg Queues
+    gMuxer->m_MainJpegQ.flush();
+    gMuxer->m_AuxJpegQ.flush();
+
+    LOGH("X");
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : set_parameters
+ *
+ * DESCRIPTION: Sets the parameters on camera
+ *
+ * PARAMETERS :
+ *   @device : camera hardware device info
+ *   @parms : Parameters to be set on camera
+ *
+ * RETURN     :
+ *              NO_ERROR  : success
+ *              other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::set_parameters(struct camera_device * device,
+        const char *parms)
+
+{
+    LOGH("E");
+    CHECK_MUXER_ERROR();
+    int rc = NO_ERROR;
+    qcamera_physical_descriptor_t *pCam = NULL;
+    qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+    bool needRestart = false;
+    CHECK_CAMERA_ERROR(cam);
+
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA_ERROR(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI_ERROR(hwi);
+
+        rc = QCamera2HardwareInterface::set_parameters(pCam->dev, parms);
+        if (rc != NO_ERROR) {
+            LOGE("Error setting parameters !! ");
+            return rc;
+        }
+
+        needRestart |= hwi->getNeedRestart();
+    }
+
+    if (needRestart) {
+        for (uint32_t i = 0; i < cam->numCameras; i++) {
+            pCam = gMuxer->getPhysicalCamera(cam, i);
+            CHECK_CAMERA_ERROR(pCam);
+
+            QCamera2HardwareInterface *hwi = pCam->hwi;
+            CHECK_HWI_ERROR(hwi);
+
+            LOGD("stopping preview for cam %d", i);
+            rc = QCamera2HardwareInterface::stop_after_set_params(pCam->dev);
+            if (rc != NO_ERROR) {
+                LOGE("Error stopping camera rc=%d!! ", rc);
+                return rc;
+            }
+        }
+    }
+
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA_ERROR(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI_ERROR(hwi);
+
+        LOGD("commiting parameters for cam %d", i);
+        rc = QCamera2HardwareInterface::commit_params(pCam->dev);
+        if (rc != NO_ERROR) {
+            LOGE("Error committing parameters rc=%d!! ", rc);
+            return rc;
+        }
+    }
+
+    if (needRestart) {
+        for (uint32_t i = 0; i < cam->numCameras; i++) {
+            pCam = gMuxer->getPhysicalCamera(cam, i);
+            CHECK_CAMERA_ERROR(pCam);
+
+            QCamera2HardwareInterface *hwi = pCam->hwi;
+            CHECK_HWI_ERROR(hwi);
+
+            LOGD("restarting preview for cam %d", i);
+            rc = QCamera2HardwareInterface::restart_after_set_params(pCam->dev);
+            if (rc != NO_ERROR) {
+                LOGE("Error restarting camera rc=%d!! ", rc);
+                return rc;
+            }
+        }
+    }
+
+    LOGH(" X");
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : get_parameters
+ *
+ * DESCRIPTION: Gets the parameters on camera
+ *
+ * PARAMETERS :
+ *   @device : camera hardware device info
+ *
+ * RETURN     : Parameter string or NULL
+ *==========================================================================*/
+char* QCameraMuxer::get_parameters(struct camera_device * device)
+{
+    LOGH("E");
+
+    if (!gMuxer)
+        return NULL;
+
+    char* ret = NULL;
+    qcamera_physical_descriptor_t *pCam = NULL;
+    qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+    if (!cam) {
+        LOGE("Error getting logical camera");
+        return NULL;
+    }
+
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        if (!pCam) {
+            LOGE("Error getting physical camera");
+            return NULL;
+        }
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        if (!hwi) {
+            LOGE("Allocation of hardware interface failed");
+            return NULL;
+        }
+        if (pCam->mode == CAM_MODE_PRIMARY) {
+            // Get only primary camera parameters
+            ret = QCamera2HardwareInterface::get_parameters(pCam->dev);
+            break;
+        }
+    }
+
+    LOGH("X");
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : put_parameters
+ *
+ * DESCRIPTION: Puts parameters on camera
+ *
+ * PARAMETERS :
+ *   @device : camera hardware device info
+ *   @parm : parameters
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraMuxer::put_parameters(struct camera_device * device, char *parm)
+{
+    LOGH("E");
+    CHECK_MUXER();
+    qcamera_physical_descriptor_t *pCam = NULL;
+    qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+    CHECK_CAMERA(cam);
+
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI(hwi);
+
+        if (pCam->mode == CAM_MODE_PRIMARY) {
+            // Parameters are not used in HWI and hence freed
+            QCamera2HardwareInterface::put_parameters(pCam->dev, parm);
+            break;
+        }
+    }
+    LOGH("X");
+}
+
+/*===========================================================================
+ * FUNCTION   : send_command
+ *
+ * DESCRIPTION: Send command to camera
+ *
+ * PARAMETERS :
+ *   @device : camera hardware device info
+ *   @cmd : Command
+ *   @arg1/arg2 : command arguments
+ *
+ * RETURN     :
+ *              NO_ERROR  : success
+ *              other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::send_command(struct camera_device * device,
+        int32_t cmd, int32_t arg1, int32_t arg2)
+{
+    LOGH("E");
+    CHECK_MUXER_ERROR();
+    int rc = NO_ERROR;
+    qcamera_physical_descriptor_t *pCam = NULL;
+    qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+    CHECK_CAMERA_ERROR(cam);
+
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA_ERROR(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI_ERROR(hwi);
+
+        rc = QCamera2HardwareInterface::send_command(pCam->dev, cmd, arg1, arg2);
+        if (rc != NO_ERROR) {
+            LOGE("Error sending command !! ");
+            return rc;
+        }
+    }
+
+        switch (cmd) {
+#ifndef VANILLA_HAL
+        case CAMERA_CMD_LONGSHOT_ON:
+            for (uint32_t i = 0; i < cam->numCameras; i++) {
+                pCam = gMuxer->getPhysicalCamera(cam, i);
+                CHECK_CAMERA_ERROR(pCam);
+
+                QCamera2HardwareInterface *hwi = pCam->hwi;
+                CHECK_HWI_ERROR(hwi);
+
+                rc = QCamera2HardwareInterface::send_command_restart(pCam->dev,
+                        cmd, arg1, arg2);
+                if (rc != NO_ERROR) {
+                    LOGE("Error sending command restart !! ");
+                    return rc;
+                }
+            }
+        break;
+        case CAMERA_CMD_LONGSHOT_OFF:
+            gMuxer->m_ComposeMpoTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC,
+                    FALSE, FALSE);
+            // flush Jpeg Queues
+            gMuxer->m_MainJpegQ.flush();
+            gMuxer->m_AuxJpegQ.flush();
+        break;
+#endif
+        default:
+            // do nothing
+            rc = NO_ERROR;
+        break;
+        }
+
+    LOGH("X");
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : release
+ *
+ * DESCRIPTION: Release the camera
+ *
+ * PARAMETERS :
+ *   @device : camera hardware device info
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraMuxer::release(struct camera_device * device)
+{
+    LOGH("E");
+    CHECK_MUXER();
+    qcamera_physical_descriptor_t *pCam = NULL;
+    qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+    CHECK_CAMERA(cam);
+
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI(hwi);
+
+        QCamera2HardwareInterface::release(pCam->dev);
+    }
+    LOGH("X");
+}
+
+/*===========================================================================
+ * FUNCTION   : dump
+ *
+ * DESCRIPTION: Dump the camera info
+ *
+ * PARAMETERS :
+ *   @device : camera hardware device info
+ *   @fd : fd
+ *
+ * RETURN     :
+ *              NO_ERROR  : success
+ *              other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::dump(struct camera_device * device, int fd)
+{
+    LOGH("E");
+    CHECK_MUXER_ERROR();
+    int rc = NO_ERROR;
+    qcamera_physical_descriptor_t *pCam = NULL;
+    qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(device);
+    CHECK_CAMERA_ERROR(cam);
+
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA_ERROR(pCam);
+
+        QCamera2HardwareInterface *hwi = pCam->hwi;
+        CHECK_HWI_ERROR(hwi);
+
+        rc = QCamera2HardwareInterface::dump(pCam->dev, fd);
+        if (rc != NO_ERROR) {
+            LOGE("Error dumping");
+            return rc;
+        }
+    }
+    LOGH("X");
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : close_camera_device
+ *
+ * DESCRIPTION: Close the camera
+ *
+ * PARAMETERS :
+ *   @hw_dev : camera hardware device info
+ *
+ * RETURN     :
+ *              NO_ERROR  : success
+ *              other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::close_camera_device(hw_device_t *hw_dev)
+{
+    LOGH("E");
+    CHECK_MUXER_ERROR();
+    int rc = NO_ERROR;
+    qcamera_physical_descriptor_t *pCam = NULL;
+    camera_device_t *cam_dev = (camera_device_t*)hw_dev;
+    qcamera_logical_descriptor_t *cam = gMuxer->getLogicalCamera(cam_dev);
+    CHECK_CAMERA_ERROR(cam);
+
+    // Unlink camera sessions
+    if (cam->bSyncOn) {
+        if (cam->numCameras > 1) {
+            uint sessionId = 0;
+            // unbundle primary camera with all aux cameras
+            for (uint32_t i = 0; i < cam->numCameras; i++) {
+                pCam = gMuxer->getPhysicalCamera(cam, i);
+                CHECK_CAMERA_ERROR(pCam);
+
+                QCamera2HardwareInterface *hwi = pCam->hwi;
+                CHECK_HWI_ERROR(hwi);
+
+                if(pCam->mode == CAM_MODE_PRIMARY) {
+                    // bundle primary cam with all aux cameras
+                    for (uint32_t j = 0; j < cam->numCameras; j++) {
+                        if (j == cam->nPrimaryPhyCamIndex) {
+                            continue;
+                        }
+                        sessionId = cam->sId[j];
+                        LOGH("Related cam id: %d, server id: %d sync OFF"
+                                " related session_id %d",
+                                cam->pId[i], cam->sId[i], sessionId);
+                        rc = hwi->bundleRelatedCameras(false, sessionId);
+                        if (rc != NO_ERROR) {
+                            LOGE("Error Bundling physical cameras !! ");
+                            break;
+                        }
+                    }
+                }
+
+                if (pCam->mode == CAM_MODE_SECONDARY) {
+                    // unbundle all aux cam with primary cams
+                    sessionId = cam->sId[cam->nPrimaryPhyCamIndex];
+                    LOGH("Related cam id: %d, server id: %d sync OFF"
+                            " related session_id %d",
+                            cam->pId[i], cam->sId[i], sessionId);
+                    rc = hwi->bundleRelatedCameras(false, sessionId);
+                    if (rc != NO_ERROR) {
+                        LOGE("Error Bundling physical cameras !! ");
+                        break;
+                    }
+                }
+            }
+        }
+        cam->bSyncOn = false;
+    }
+
+    // Attempt to close all cameras regardless of unbundle results
+    for (uint32_t i = 0; i < cam->numCameras; i++) {
+        pCam = gMuxer->getPhysicalCamera(cam, i);
+        CHECK_CAMERA_ERROR(pCam);
+
+        hw_device_t *dev = (hw_device_t*)(pCam->dev);
+        LOGH("hw device %x, hw %x", dev, pCam->hwi);
+
+        rc = QCamera2HardwareInterface::close_camera_device(dev);
+        if (rc != NO_ERROR) {
+            LOGE("Error closing camera");
+        }
+        pCam->hwi = NULL;
+        pCam->dev = NULL;
+    }
+
+    // Reset JPEG client handle
+    gMuxer->setJpegHandle(0);
+    LOGH("X, rc: %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION         : setupLogicalCameras
+ *
+ * DESCRIPTION     : Creates Camera Muxer if not created
+ *
+ * RETURN     :
+ *              NO_ERROR  : success
+ *              other: non-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::setupLogicalCameras()
+{
+    int rc = NO_ERROR;
+    char prop[PROPERTY_VALUE_MAX];
+    int i = 0;
+    int primaryType = CAM_TYPE_MAIN;
+
+    LOGH("[%d] E: rc = %d", rc);
+    // Signifies whether AUX camera has to be exposed as physical camera
+    property_get("persist.camera.aux.camera", prop, "0");
+    m_bAuxCameraExposed = atoi(prop);
+
+    // Signifies whether AUX camera needs to be swapped
+    property_get("persist.camera.auxcamera.swap", prop, "0");
+    int swapAux = atoi(prop);
+    if (swapAux != 0) {
+        primaryType = CAM_TYPE_AUX;
+    }
+
+    // Check for number of camera present on device
+    if (!m_nPhyCameras || (m_nPhyCameras > MM_CAMERA_MAX_NUM_SENSORS)) {
+        LOGE("Error!! Invalid number of cameras: %d",
+                 m_nPhyCameras);
+        return BAD_VALUE;
+    }
+
+    m_pPhyCamera = new qcamera_physical_descriptor_t[m_nPhyCameras];
+    if (!m_pPhyCamera) {
+        LOGE("Error allocating camera info buffer!!");
+        return NO_MEMORY;
+    }
+    memset(m_pPhyCamera, 0x00,
+            (m_nPhyCameras * sizeof(qcamera_physical_descriptor_t)));
+    uint32_t cameraId = 0;
+    m_nLogicalCameras = 0;
+
+    // Enumerate physical cameras and logical
+    for (i = 0; i < m_nPhyCameras ; i++, cameraId++) {
+        camera_info *info = &m_pPhyCamera[i].cam_info;
+        rc = QCamera2HardwareInterface::getCapabilities(cameraId,
+                info, &m_pPhyCamera[i].type);
+        m_pPhyCamera[i].id = cameraId;
+        m_pPhyCamera[i].device_version = CAMERA_DEVICE_API_VERSION_1_0;
+        m_pPhyCamera[i].mode = CAM_MODE_PRIMARY;
+
+        if (!m_bAuxCameraExposed && (m_pPhyCamera[i].type != primaryType)) {
+            m_pPhyCamera[i].mode = CAM_MODE_SECONDARY;
+            LOGH("Camera ID: %d, Aux Camera, type: %d, facing: %d",
+ cameraId, m_pPhyCamera[i].type,
+                    m_pPhyCamera[i].cam_info.facing);
+        }
+        else {
+            m_nLogicalCameras++;
+            LOGH("Camera ID: %d, Main Camera, type: %d, facing: %d",
+ cameraId, m_pPhyCamera[i].type,
+                    m_pPhyCamera[i].cam_info.facing);
+        }
+    }
+
+    if (!m_nLogicalCameras) {
+        // No Main camera detected, return from here
+        LOGE("Error !!!! detecting main camera!!");
+        delete [] m_pPhyCamera;
+        m_pPhyCamera = NULL;
+        return -ENODEV;
+    }
+    // Allocate Logical Camera descriptors
+    m_pLogicalCamera = new qcamera_logical_descriptor_t[m_nLogicalCameras];
+    if (!m_pLogicalCamera) {
+        LOGE("Error !!!! allocating camera info buffer!!");
+        delete [] m_pPhyCamera;
+        m_pPhyCamera = NULL;
+        return  NO_MEMORY;
+    }
+    memset(m_pLogicalCamera, 0x00,
+            (m_nLogicalCameras * sizeof(qcamera_logical_descriptor_t)));
+    // Assign MAIN cameras for each logical camera
+    int index = 0;
+    for (i = 0; i < m_nPhyCameras ; i++) {
+        if (m_pPhyCamera[i].mode == CAM_MODE_PRIMARY) {
+            m_pLogicalCamera[index].nPrimaryPhyCamIndex = 0;
+            m_pLogicalCamera[index].id = index;
+            m_pLogicalCamera[index].device_version = CAMERA_DEVICE_API_VERSION_1_0;
+            m_pLogicalCamera[index].pId[0] = i;
+            m_pLogicalCamera[index].type[0] = CAM_TYPE_MAIN;
+            m_pLogicalCamera[index].mode[0] = CAM_MODE_PRIMARY;
+            m_pLogicalCamera[index].facing = m_pPhyCamera[i].cam_info.facing;
+            m_pLogicalCamera[index].numCameras++;
+            LOGH("Logical Main Camera ID: %d, facing: %d,"
+                    "Phy Id: %d type: %d mode: %d",
+ m_pLogicalCamera[index].id,
+                    m_pLogicalCamera[index].facing,
+                    m_pLogicalCamera[index].pId[0],
+                    m_pLogicalCamera[index].type[0],
+                    m_pLogicalCamera[index].mode[0]);
+
+            index++;
+        }
+    }
+    //Now assign AUX cameras to logical camera
+    for (i = 0; i < m_nPhyCameras ; i++) {
+        if (m_pPhyCamera[i].mode == CAM_MODE_SECONDARY) {
+            for (int j = 0; j < m_nLogicalCameras; j++) {
+                int n = m_pLogicalCamera[j].numCameras;
+                ///@note n can only be 1 at this point
+                if ((n < MAX_NUM_CAMERA_PER_BUNDLE) &&
+                        (m_pLogicalCamera[j].facing ==
+                        m_pPhyCamera[i].cam_info.facing)) {
+                    m_pLogicalCamera[j].pId[n] = i;
+                    m_pLogicalCamera[j].type[n] = CAM_TYPE_AUX;
+                    m_pLogicalCamera[j].mode[n] = CAM_MODE_SECONDARY;
+                    m_pLogicalCamera[j].numCameras++;
+                    LOGH("Aux %d for Logical Camera ID: %d,"
+                        "aux phy id:%d, type: %d mode: %d",
+ n, j, m_pLogicalCamera[j].pId[n],
+                        m_pLogicalCamera[j].type[n], m_pLogicalCamera[j].mode[n]);
+                }
+            }
+        }
+    }
+    //Print logical and physical camera tables
+    for (i = 0; i < m_nLogicalCameras ; i++) {
+        for (uint8_t j = 0; j < m_pLogicalCamera[i].numCameras; j++) {
+            LOGH("Logical Camera ID: %d, index: %d, "
+                    "facing: %d, Phy Id: %d type: %d mode: %d",
+ i, j, m_pLogicalCamera[i].facing,
+                    m_pLogicalCamera[i].pId[j], m_pLogicalCamera[i].type[j],
+                    m_pLogicalCamera[i].mode[j]);
+        }
+    }
+    LOGH("[%d] X: rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumberOfCameras
+ *
+ * DESCRIPTION: query number of logical cameras detected
+ *
+ * RETURN     : number of cameras detected
+ *==========================================================================*/
+int QCameraMuxer::getNumberOfCameras()
+{
+    return m_nLogicalCameras;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCameraInfo
+ *
+ * DESCRIPTION: query camera information with its ID
+ *
+ * PARAMETERS :
+ *   @camera_id : camera ID
+ *   @info      : ptr to camera info struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::getCameraInfo(int camera_id,
+        struct camera_info *info, __unused cam_sync_type_t *p_cam_type)
+{
+    int rc = NO_ERROR;
+    LOGH("E, camera_id = %d", camera_id);
+
+    if (!m_nLogicalCameras || (camera_id >= m_nLogicalCameras) ||
+            !info || (camera_id < 0)) {
+        LOGE("m_nLogicalCameras: %d, camera id: %d",
+                m_nLogicalCameras, camera_id);
+        return -ENODEV;
+    }
+
+    if (!m_pLogicalCamera || !m_pPhyCamera) {
+        LOGE("Error! Cameras not initialized!");
+        return NO_INIT;
+    }
+    uint32_t phy_id =
+            m_pLogicalCamera[camera_id].pId[
+            m_pLogicalCamera[camera_id].nPrimaryPhyCamIndex];
+    // Call HAL3 getCamInfo to get the flash light info through static metatdata
+    // regardless of HAL version
+    rc = QCamera3HardwareInterface::getCamInfo(phy_id, info);
+    info->device_version = CAMERA_DEVICE_API_VERSION_1_0; // Hardcode the HAL to HAL1
+    LOGH("X");
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setCallbacks
+ *
+ * DESCRIPTION: set callback functions to send asynchronous notifications to
+ *              frameworks.
+ *
+ * PARAMETERS :
+ *   @callbacks : callback function pointer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraMuxer::setCallbacks(const camera_module_callbacks_t *callbacks)
+{
+    if(callbacks) {
+        m_pCallbacks = callbacks;
+        return NO_ERROR;
+    } else {
+        return BAD_TYPE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setDataCallback
+ *
+ * DESCRIPTION: set data callback function for snapshots
+ *
+ * PARAMETERS :
+ *   @data_cb : callback function pointer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraMuxer::setDataCallback(camera_data_callback data_cb)
+{
+    if(data_cb) {
+        mDataCb = data_cb;
+        return NO_ERROR;
+    } else {
+        return BAD_TYPE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setMemoryCallback
+ *
+ * DESCRIPTION: set get memory callback for memory allocations
+ *
+ * PARAMETERS :
+ *   @get_memory : callback function pointer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraMuxer::setMemoryCallback(camera_request_memory get_memory)
+{
+    if(get_memory) {
+        mGetMemoryCb = get_memory;
+        return NO_ERROR;
+    } else {
+        return BAD_TYPE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setMpoCallbackCookie
+ *
+ * DESCRIPTION: set mpo callback cookie. will be used for sending final MPO callbacks
+ *                     to framework
+ *
+ * PARAMETERS :
+ *   @mpoCbCookie : callback function pointer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraMuxer::setMpoCallbackCookie(void* mpoCbCookie)
+{
+    if(mpoCbCookie) {
+        m_pMpoCallbackCookie = mpoCbCookie;
+        return NO_ERROR;
+    } else {
+        return BAD_TYPE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getMpoCallbackCookie
+ *
+ * DESCRIPTION: gets the mpo callback cookie. will be used for sending final MPO callbacks
+ *                     to framework
+ *
+ * PARAMETERS :none
+ *
+ * RETURN     :void ptr to the mpo callback cookie
+ *==========================================================================*/
+void* QCameraMuxer::getMpoCallbackCookie(void)
+{
+    return m_pMpoCallbackCookie;
+}
+
+/*===========================================================================
+ * FUNCTION   : setMainJpegCallbackCookie
+ *
+ * DESCRIPTION: set jpeg callback cookie.
+ *                     set to phy cam instance of the primary related cam instance
+ *
+ * PARAMETERS :
+ *   @jpegCbCookie : ptr to jpeg cookie
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraMuxer::setMainJpegCallbackCookie(void* jpegCbCookie)
+{
+    if(jpegCbCookie) {
+        m_pJpegCallbackCookie = jpegCbCookie;
+        return NO_ERROR;
+    } else {
+        return BAD_TYPE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getMainJpegCallbackCookie
+ *
+ * DESCRIPTION: gets the jpeg callback cookie for primary related cam instance
+ *                     set to phy cam instance of the primary related cam instance
+ *
+ * PARAMETERS :none
+ *
+ * RETURN     :void ptr to the jpeg callback cookie
+ *==========================================================================*/
+void* QCameraMuxer::getMainJpegCallbackCookie(void)
+{
+    return m_pJpegCallbackCookie;
+}
+
+/*===========================================================================
+ * FUNCTION   : cameraDeviceOpen
+ *
+ * DESCRIPTION: open a camera device with its ID
+ *
+ * PARAMETERS :
+ *   @camera_id : camera ID
+ *   @hw_device : ptr to struct storing camera hardware device info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraMuxer::cameraDeviceOpen(int camera_id,
+        struct hw_device_t **hw_device)
+{
+    int rc = NO_ERROR;
+    uint32_t phyId = 0;
+    qcamera_logical_descriptor_t *cam = NULL;
+
+    if (camera_id < 0 || camera_id >= m_nLogicalCameras) {
+        LOGE("Camera id %d not found!", camera_id);
+        return -ENODEV;
+    }
+
+    if ( NULL == m_pLogicalCamera) {
+        LOGE("Hal descriptor table is not initialized!");
+        return NO_INIT;
+    }
+
+    char prop[PROPERTY_VALUE_MAX];
+    property_get("persist.camera.dc.frame.sync", prop, "1");
+    m_bFrameSyncEnabled = atoi(prop);
+
+    // Get logical camera
+    cam = &m_pLogicalCamera[camera_id];
+
+    if (m_pLogicalCamera[camera_id].device_version ==
+            CAMERA_DEVICE_API_VERSION_1_0) {
+        // HW Dev Holders
+        hw_device_t *hw_dev[cam->numCameras];
+
+        if (m_pPhyCamera[cam->pId[0]].type != CAM_TYPE_MAIN) {
+            LOGE("Physical camera at index 0 is not main!");
+            return UNKNOWN_ERROR;
+        }
+
+        // Open all physical cameras
+        for (uint32_t i = 0; i < cam->numCameras; i++) {
+            phyId = cam->pId[i];
+            QCamera2HardwareInterface *hw =
+                    new QCamera2HardwareInterface((uint32_t)phyId);
+            if (!hw) {
+                LOGE("Allocation of hardware interface failed");
+                return NO_MEMORY;
+            }
+            hw_dev[i] = NULL;
+
+            // Make Camera HWI aware of its mode
+            cam_sync_related_sensors_event_info_t info;
+            info.sync_control = CAM_SYNC_RELATED_SENSORS_ON;
+            info.mode = m_pPhyCamera[phyId].mode;
+            info.type = m_pPhyCamera[phyId].type;
+            rc = hw->setRelatedCamSyncInfo(&info);
+            hw->setFrameSyncEnabled(m_bFrameSyncEnabled);
+            if (rc != NO_ERROR) {
+                LOGE("setRelatedCamSyncInfo failed %d", rc);
+                delete hw;
+                return rc;
+            }
+
+            rc = hw->openCamera(&hw_dev[i]);
+            if (rc != NO_ERROR) {
+                delete hw;
+                return rc;
+            }
+            hw->getCameraSessionId(&m_pPhyCamera[phyId].camera_server_id);
+            m_pPhyCamera[phyId].dev = reinterpret_cast<camera_device_t*>(hw_dev[i]);
+            m_pPhyCamera[phyId].hwi = hw;
+            cam->sId[i] = m_pPhyCamera[phyId].camera_server_id;
+            LOGH("camera id %d server id : %d hw device %x, hw %x",
+                     phyId, cam->sId[i], hw_dev[i], hw);
+        }
+    } else {
+        LOGE("Device version for camera id %d invalid %d",
+                 camera_id, m_pLogicalCamera[camera_id].device_version);
+        return BAD_VALUE;
+    }
+
+    cam->dev.common.tag = HARDWARE_DEVICE_TAG;
+    cam->dev.common.version = HARDWARE_DEVICE_API_VERSION(1, 0);
+    cam->dev.common.close = close_camera_device;
+    cam->dev.ops = &mCameraMuxerOps;
+    cam->dev.priv = (void*)cam;
+    *hw_device = &cam->dev.common;
+    return rc;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : getLogicalCamera
+ *
+ * DESCRIPTION: Get logical camera descriptor
+ *
+ * PARAMETERS :
+ *   @device : camera hardware device info
+ *
+ * RETURN     : logical camera descriptor or NULL
+ *==========================================================================*/
+qcamera_logical_descriptor_t* QCameraMuxer::getLogicalCamera(
+        struct camera_device * device)
+{
+    if(device && device->priv){
+        return (qcamera_logical_descriptor_t*)(device->priv);
+    }
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : getPhysicalCamera
+ *
+ * DESCRIPTION: Get physical camera descriptor
+ *
+ * PARAMETERS :
+ *   @log_cam : Logical camera descriptor
+ *   @index : physical camera index
+ *
+ * RETURN     : physical camera descriptor or NULL
+ *==========================================================================*/
+qcamera_physical_descriptor_t* QCameraMuxer::getPhysicalCamera(
+        qcamera_logical_descriptor_t* log_cam, uint32_t index)
+{
+    if(!log_cam){
+        return NULL;
+    }
+    return &m_pPhyCamera[log_cam->pId[index]];
+}
+
+/*===========================================================================
+ * FUNCTION   : getActiveNumOfPhyCam
+ *
+ * DESCRIPTION: Get active physical camera number in Logical Camera
+ *
+ * PARAMETERS :
+ *   @log_cam :   Logical camera descriptor
+ *   @numOfAcitvePhyCam :  number of active physical camera in Logical Camera.
+ *
+ * RETURN     :
+ *                NO_ERROR  : success
+ *                ENODEV : Camera not found
+ *                other: non-zero failure code
+ *==========================================================================*/
+int32_t QCameraMuxer::getActiveNumOfPhyCam(
+        qcamera_logical_descriptor_t* log_cam, int& numOfAcitvePhyCam)
+{
+    CHECK_CAMERA_ERROR(log_cam);
+
+    numOfAcitvePhyCam = log_cam->numCameras;
+    return NO_ERROR;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : sendEvtNotify
+ *
+ * DESCRIPTION: send event notify to HWI for error callbacks
+ *
+ * PARAMETERS :
+ *   @msg_type: msg type to be sent
+ *   @ext1    : optional extension1
+ *   @ext2    : optional extension2
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraMuxer::sendEvtNotify(int32_t msg_type, int32_t ext1,
+        int32_t ext2)
+{
+    LOGH("E");
+
+    CHECK_MUXER_ERROR();
+
+    qcamera_physical_descriptor_t *pCam = NULL;
+    pCam = (qcamera_physical_descriptor_t*)(gMuxer->getMainJpegCallbackCookie());
+
+    CHECK_CAMERA_ERROR(pCam);
+
+    QCamera2HardwareInterface *hwi = pCam->hwi;
+    CHECK_HWI_ERROR(hwi);
+
+    LOGH("X");
+    return pCam->hwi->sendEvtNotify(msg_type, ext1, ext2);
+}
+
+/*===========================================================================
+ * FUNCTION   : composeMpo
+ *
+ * DESCRIPTION: Composition of the 2 MPOs
+ *
+ * PARAMETERS : none
+ *   @main_Jpeg: pointer to info to Main Jpeg
+ *   @aux_Jpeg : pointer to info to Aux JPEG
+ *
+  * RETURN : none
+ *==========================================================================*/
+void QCameraMuxer::composeMpo(cam_compose_jpeg_info_t* main_Jpeg,
+        cam_compose_jpeg_info_t* aux_Jpeg)
+{
+    LOGH("E Main Jpeg %p Aux Jpeg %p", main_Jpeg, aux_Jpeg);
+
+    CHECK_MUXER();
+    if(main_Jpeg == NULL || aux_Jpeg == NULL) {
+        LOGE("input buffers invalid, ret = NO_MEMORY");
+        gMuxer->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+        return;
+    }
+
+    pthread_mutex_lock(&m_JpegLock);
+
+    m_pRelCamMpoJpeg = mGetMemoryCb(-1, main_Jpeg->buffer->size +
+            aux_Jpeg->buffer->size, 1, m_pMpoCallbackCookie);
+    if (NULL == m_pRelCamMpoJpeg) {
+        LOGE("getMemory for mpo, ret = NO_MEMORY");
+        gMuxer->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+        pthread_mutex_unlock(&m_JpegLock);
+        return;
+    }
+
+    // fill all structures to send for composition
+    mm_jpeg_mpo_info_t mpo_compose_info;
+    mpo_compose_info.num_of_images = 2;
+    mpo_compose_info.primary_image.buf_filled_len = main_Jpeg->buffer->size;
+    mpo_compose_info.primary_image.buf_vaddr =
+            (uint8_t*)(main_Jpeg->buffer->data);
+    mpo_compose_info.aux_images[0].buf_filled_len = aux_Jpeg->buffer->size;
+    mpo_compose_info.aux_images[0].buf_vaddr =
+            (uint8_t*)(aux_Jpeg->buffer->data);
+    mpo_compose_info.output_buff.buf_vaddr =
+            (uint8_t*)m_pRelCamMpoJpeg->data;
+    mpo_compose_info.output_buff.buf_filled_len = 0;
+    mpo_compose_info.output_buff_size = main_Jpeg->buffer->size +
+            aux_Jpeg->buffer->size;
+
+    LOGD("MPO buffer size %d\n"
+            "expected size %d, mpo_compose_info.output_buff_size %d",
+             m_pRelCamMpoJpeg->size,
+            main_Jpeg->buffer->size + aux_Jpeg->buffer->size,
+            mpo_compose_info.output_buff_size);
+
+    LOGD("MPO primary buffer filled lengths\n"
+            "mpo_compose_info.primary_image.buf_filled_len %d\n"
+            "mpo_compose_info.primary_image.buf_vaddr %p",
+            mpo_compose_info.primary_image.buf_filled_len,
+            mpo_compose_info.primary_image.buf_vaddr);
+
+    LOGD("MPO aux buffer filled lengths\n"
+            "mpo_compose_info.aux_images[0].buf_filled_len %d"
+            "mpo_compose_info.aux_images[0].buf_vaddr %p",
+            mpo_compose_info.aux_images[0].buf_filled_len,
+            mpo_compose_info.aux_images[0].buf_vaddr);
+
+    if(m_bDumpImages) {
+        LOGD("Dumping Main Image for MPO");
+        char buf_main[QCAMERA_MAX_FILEPATH_LENGTH];
+        memset(buf_main, 0, sizeof(buf_main));
+        snprintf(buf_main, sizeof(buf_main),
+                QCAMERA_DUMP_FRM_LOCATION "Main.jpg");
+
+        int file_fd_main = open(buf_main, O_RDWR | O_CREAT, 0777);
+        if (file_fd_main >= 0) {
+            ssize_t written_len = write(file_fd_main,
+                    mpo_compose_info.primary_image.buf_vaddr,
+                    mpo_compose_info.primary_image.buf_filled_len);
+            fchmod(file_fd_main, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
+            LOGD("written number of bytes for main Image %zd\n",
+                     written_len);
+            close(file_fd_main);
+        }
+
+        LOGD("Dumping Aux Image for MPO");
+        char buf_aux[QCAMERA_MAX_FILEPATH_LENGTH];
+        memset(buf_aux, 0, sizeof(buf_aux));
+        snprintf(buf_aux, sizeof(buf_aux),
+                QCAMERA_DUMP_FRM_LOCATION "Aux.jpg");
+
+        int file_fd_aux = open(buf_aux, O_RDWR | O_CREAT, 0777);
+        if (file_fd_aux >= 0) {
+            ssize_t written_len = write(file_fd_aux,
+                    mpo_compose_info.aux_images[0].buf_vaddr,
+                    mpo_compose_info.aux_images[0].buf_filled_len);
+            fchmod(file_fd_aux, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
+            LOGD("written number of bytes for Aux Image %zd\n",
+                     written_len);
+            close(file_fd_aux);
+        }
+    }
+
+    int32_t rc = mJpegMpoOps.compose_mpo(&mpo_compose_info);
+    LOGD("Compose mpo returned %d", rc);
+
+    if(rc != NO_ERROR) {
+        LOGE("ComposeMpo failed, ret = %d", rc);
+        gMuxer->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+        pthread_mutex_unlock(&m_JpegLock);
+        return;
+    }
+
+    if(m_bDumpImages) {
+        char buf_mpo[QCAMERA_MAX_FILEPATH_LENGTH];
+        memset(buf_mpo, 0, sizeof(buf_mpo));
+        snprintf(buf_mpo, sizeof(buf_mpo),
+                QCAMERA_DUMP_FRM_LOCATION "Composed.MPO");
+
+        int file_fd_mpo = open(buf_mpo, O_RDWR | O_CREAT, 0777);
+        if (file_fd_mpo >= 0) {
+            ssize_t written_len = write(file_fd_mpo,
+                    m_pRelCamMpoJpeg->data,
+                    m_pRelCamMpoJpeg->size);
+            fchmod(file_fd_mpo, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
+            LOGD("written number of bytes for MPO Image %zd\n",
+                     written_len);
+            close(file_fd_mpo);
+        }
+    }
+
+    mDataCb(main_Jpeg->msg_type,
+            m_pRelCamMpoJpeg,
+            main_Jpeg->index,
+            main_Jpeg->metadata,
+            m_pMpoCallbackCookie);
+
+    if (NULL != m_pRelCamMpoJpeg) {
+        m_pRelCamMpoJpeg->release(m_pRelCamMpoJpeg);
+        m_pRelCamMpoJpeg = NULL;
+    }
+
+    pthread_mutex_unlock(&m_JpegLock);
+    LOGH("X");
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : matchFrameId
+ *
+ * DESCRIPTION: function to match frame ids within queue nodes
+ *
+ * PARAMETERS :
+ *   @data: pointer to queue node to be matched for condition
+ *   @user_data: caller can add more info here
+ *   @match_data : value to be matched against
+ *
+ * RETURN     : true or false based on whether match was successful or not
+ *==========================================================================*/
+bool QCameraMuxer::matchFrameId(void *data, __unused void *user_data,
+        void *match_data)
+{
+    LOGH("E");
+
+    if (!data || !match_data) {
+        return false;
+    }
+
+    cam_compose_jpeg_info_t * node = (cam_compose_jpeg_info_t *) data;
+    uint32_t frame_idx = *((uint32_t *) match_data);
+    LOGH("X");
+    return node->frame_idx == frame_idx;
+}
+
+/*===========================================================================
+ * FUNCTION   : findPreviousJpegs
+ *
+ * DESCRIPTION: Finds Jpegs in the queue with index less than delivered one
+ *
+ * PARAMETERS :
+ *   @data: pointer to queue node to be matched for condition
+ *   @user_data: caller can add more info here
+ *   @match_data : value to be matched against
+ *
+ * RETURN     : true or false based on whether match was successful or not
+ *==========================================================================*/
+bool QCameraMuxer::findPreviousJpegs(void *data, __unused void *user_data,
+        void *match_data)
+{
+    LOGH("E");
+
+    if (!data || !match_data) {
+        return false;
+    }
+    cam_compose_jpeg_info_t * node = (cam_compose_jpeg_info_t *) data;
+    uint32_t frame_idx = *((uint32_t *) match_data);
+    LOGH("X");
+    return node->frame_idx < frame_idx;
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseJpegInfo
+ *
+ * DESCRIPTION: callback function for the release of individual nodes
+ *                     in the JPEG queues.
+ *
+ * PARAMETERS :
+ *   @data      : ptr to the data to be released
+ *   @user_data : caller can add more info here
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraMuxer::releaseJpegInfo(void *data, __unused void *user_data)
+{
+    LOGH("E");
+
+    cam_compose_jpeg_info_t *jpegInfo = (cam_compose_jpeg_info_t *)data;
+    if(jpegInfo && jpegInfo->release_cb) {
+        if (jpegInfo->release_data != NULL) {
+            jpegInfo->release_cb(jpegInfo->release_data,
+                    jpegInfo->release_cookie,
+                    NO_ERROR);
+        }
+    }
+    LOGH("X");
+}
+
+/*===========================================================================
+ * FUNCTION   : composeMpoRoutine
+ *
+ * DESCRIPTION: specialized thread for MPO composition
+ *
+ * PARAMETERS :
+ *   @data   : pointer to the thread owner
+ *
+ * RETURN     : void* to thread
+ *==========================================================================*/
+void* QCameraMuxer::composeMpoRoutine(__unused void *data)
+{
+    LOGH("E");
+    if (!gMuxer) {
+        LOGE("Error getting muxer ");
+        return NULL;
+    }
+
+    int running = 1;
+    int ret;
+    uint8_t is_active = FALSE;
+    QCameraCmdThread *cmdThread = &gMuxer->m_ComposeMpoTh;
+    cmdThread->setName("CAM_ComposeMpo");
+
+    do {
+        do {
+            ret = cam_sem_wait(&cmdThread->cmd_sem);
+            if (ret != 0 && errno != EINVAL) {
+                LOGE("cam_sem_wait error (%s)", strerror(errno));
+                return NULL;
+            }
+        } while (ret != 0);
+
+        // we got notified about new cmd avail in cmd queue
+        camera_cmd_type_t cmd = cmdThread->getCmd();
+        switch (cmd) {
+        case CAMERA_CMD_TYPE_START_DATA_PROC:
+            {
+                LOGH("start ComposeMpo processing");
+                is_active = TRUE;
+
+                // signal cmd is completed
+                cam_sem_post(&cmdThread->sync_sem);
+            }
+            break;
+        case CAMERA_CMD_TYPE_STOP_DATA_PROC:
+            {
+                LOGH("stop ComposeMpo processing");
+                is_active = FALSE;
+
+                // signal cmd is completed
+                cam_sem_post(&cmdThread->sync_sem);
+            }
+            break;
+        case CAMERA_CMD_TYPE_DO_NEXT_JOB:
+            {
+                if (is_active == TRUE) {
+                    LOGH("Mpo Composition Requested");
+                    cam_compose_jpeg_info_t *main_jpeg_node = NULL;
+                    cam_compose_jpeg_info_t *aux_jpeg_node = NULL;
+                    bool foundMatch = false;
+                    while (!gMuxer->m_MainJpegQ.isEmpty() &&
+                            !gMuxer->m_AuxJpegQ.isEmpty()) {
+                        main_jpeg_node = (cam_compose_jpeg_info_t *)
+                                gMuxer->m_MainJpegQ.dequeue();
+                        if (main_jpeg_node != NULL) {
+                            LOGD("main_jpeg_node found frame idx %d"
+                                    "ptr %p buffer_ptr %p buffer_size %d",
+                                     main_jpeg_node->frame_idx,
+                                    main_jpeg_node,
+                                    main_jpeg_node->buffer->data,
+                                    main_jpeg_node->buffer->size);
+                            // find matching aux node in Aux Jpeg Queue
+                            aux_jpeg_node =
+                                    (cam_compose_jpeg_info_t *) gMuxer->
+                                    m_AuxJpegQ.dequeue();
+                            if (aux_jpeg_node != NULL) {
+                                LOGD("aux_jpeg_node found frame idx %d"
+                                        "ptr %p buffer_ptr %p buffer_size %d",
+                                         aux_jpeg_node->frame_idx,
+                                        aux_jpeg_node,
+                                        aux_jpeg_node->buffer->data,
+                                        aux_jpeg_node->buffer->size);
+                                foundMatch = true;
+                                // start MPO composition
+                                gMuxer->composeMpo(main_jpeg_node,
+                                        aux_jpeg_node);
+                            }
+                        }
+                        if (main_jpeg_node != NULL) {
+                            if ( main_jpeg_node->release_cb ) {
+                                main_jpeg_node->release_cb(
+                                        main_jpeg_node->release_data,
+                                        main_jpeg_node->release_cookie,
+                                        NO_ERROR);
+                            }
+                            free(main_jpeg_node);
+                            main_jpeg_node = NULL;
+                        } else {
+                            LOGH("Mpo Match not found");
+                        }
+                        if (aux_jpeg_node != NULL) {
+                            if (aux_jpeg_node->release_cb) {
+                                aux_jpeg_node->release_cb(
+                                        aux_jpeg_node->release_data,
+                                        aux_jpeg_node->release_cookie,
+                                        NO_ERROR);
+                            }
+                            free(aux_jpeg_node);
+                            aux_jpeg_node = NULL;
+                        } else {
+                            LOGH("Mpo Match not found");
+                        }
+                    }
+                }
+            break;
+            }
+        case CAMERA_CMD_TYPE_EXIT:
+            LOGH("ComposeMpo thread exit");
+            running = 0;
+            break;
+        default:
+            break;
+        }
+    } while (running);
+    LOGH("X");
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : jpeg_data_callback
+ *
+ * DESCRIPTION: JPEG data callback for snapshot
+ *
+ * PARAMETERS :
+ *   @msg_type : callback msg type
+ *   @data : data ptr of the buffer
+ *   @index : index of the frame
+ *   @metadata : metadata associated with the buffer
+ *   @user : callback cookie returned back to the user
+ *   @frame_idx : frame index for matching frames
+ *   @release_cb : callback function for releasing the data memory
+ *   @release_cookie : cookie for the release callback function
+ *   @release_data :pointer indicating what needs to be released
+ *
+ * RETURN : none
+ *==========================================================================*/
+void QCameraMuxer::jpeg_data_callback(int32_t msg_type,
+           const camera_memory_t *data, unsigned int index,
+           camera_frame_metadata_t *metadata, void *user,
+           uint32_t frame_idx, camera_release_callback release_cb,
+           void *release_cookie, void *release_data)
+{
+    LOGH("E");
+    CHECK_MUXER();
+
+    if(data != NULL) {
+        LOGH("jpeg received: data %p size %d data ptr %p frameIdx %d",
+                 data, data->size, data->data, frame_idx);
+        int rc = gMuxer->storeJpeg(((qcamera_physical_descriptor_t*)(user))->type,
+                msg_type, data, index, metadata, user, frame_idx, release_cb,
+                release_cookie, release_data);
+        if(rc != NO_ERROR) {
+            gMuxer->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+        }
+    } else {
+        gMuxer->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+    }
+    LOGH("X");
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : storeJpeg
+ *
+ * DESCRIPTION: Stores jpegs from multiple related cam instances into a common Queue
+ *
+ * PARAMETERS :
+ *   @cam_type : indicates whether main or aux camera sent the Jpeg callback
+ *   @msg_type : callback msg type
+ *   @data : data ptr of the buffer
+ *   @index : index of the frame
+ *   @metadata : metadata associated with the buffer
+ *   @user : callback cookie returned back to the user
+ *   @frame_idx : frame index for matching frames
+ *   @release_cb : callback function for releasing the data memory
+ *   @release_cookie : cookie for the release callback function
+ *   @release_data :pointer indicating what needs to be released
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraMuxer::storeJpeg(cam_sync_type_t cam_type,
+        int32_t msg_type, const camera_memory_t *data, unsigned int index,
+        camera_frame_metadata_t *metadata, void *user,uint32_t frame_idx,
+        camera_release_callback release_cb, void *release_cookie,
+        void *release_data)
+{
+    LOGH("E jpeg received: data %p size %d data ptr %p frameIdx %d",
+             data, data->size, data->data, frame_idx);
+
+    CHECK_MUXER_ERROR();
+
+    if (!m_bMpoEnabled) {
+        if (cam_type == CAM_TYPE_MAIN) {
+            // send data callback only incase of main camera
+            // aux image is ignored and released back
+            mDataCb(msg_type,
+                    data,
+                    index,
+                    metadata,
+                    m_pMpoCallbackCookie);
+        }
+        if (release_cb) {
+            release_cb(release_data, release_cookie, NO_ERROR);
+        }
+        LOGH("X");
+        return NO_ERROR;
+    }
+
+    cam_compose_jpeg_info_t* pJpegFrame =
+            (cam_compose_jpeg_info_t*)malloc(sizeof(cam_compose_jpeg_info_t));
+    if (!pJpegFrame) {
+        LOGE("Allocation failed for MPO nodes");
+        return NO_MEMORY;
+    }
+    memset(pJpegFrame, 0, sizeof(*pJpegFrame));
+
+    pJpegFrame->msg_type = msg_type;
+    pJpegFrame->buffer = const_cast<camera_memory_t*>(data);
+    pJpegFrame->index = index;
+    pJpegFrame->metadata = metadata;
+    pJpegFrame->user = user;
+    pJpegFrame->valid = true;
+    pJpegFrame->frame_idx = frame_idx;
+    pJpegFrame->release_cb = release_cb;
+    pJpegFrame->release_cookie = release_cookie;
+    pJpegFrame->release_data = release_data;
+    if(cam_type == CAM_TYPE_MAIN) {
+        if (m_MainJpegQ.enqueue((void *)pJpegFrame)) {
+            LOGD("Main FrameIdx %d", pJpegFrame->frame_idx);
+            if (m_MainJpegQ.getCurrentSize() > 0) {
+                LOGD("Trigger Compose");
+                m_ComposeMpoTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+            }
+        } else {
+            LOGE("Enqueue Failed for Main Jpeg Q");
+            if ( pJpegFrame->release_cb ) {
+                // release other buffer also here
+                pJpegFrame->release_cb(
+                        pJpegFrame->release_data,
+                        pJpegFrame->release_cookie,
+                        NO_ERROR);
+            }
+            free(pJpegFrame);
+            pJpegFrame = NULL;
+            return NO_MEMORY;
+        }
+
+    } else {
+        if (m_AuxJpegQ.enqueue((void *)pJpegFrame)) {
+            LOGD("Aux FrameIdx %d", pJpegFrame->frame_idx);
+            if (m_AuxJpegQ.getCurrentSize() > 0) {
+                LOGD("Trigger Compose");
+                m_ComposeMpoTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+            }
+        } else {
+            LOGE("Enqueue Failed for Aux Jpeg Q");
+            if ( pJpegFrame->release_cb ) {
+                // release other buffer also here
+                pJpegFrame->release_cb(
+                        pJpegFrame->release_data,
+                        pJpegFrame->release_cookie,
+                        NO_ERROR);
+            }
+            free(pJpegFrame);
+            pJpegFrame = NULL;
+            return NO_MEMORY;
+        }
+    }
+    LOGH("X");
+
+    return NO_ERROR;
+}
+
+
+// Muxer Ops
+camera_device_ops_t QCameraMuxer::mCameraMuxerOps = {
+    .set_preview_window =        QCameraMuxer::set_preview_window,
+    .set_callbacks =             QCameraMuxer::set_callBacks,
+    .enable_msg_type =           QCameraMuxer::enable_msg_type,
+    .disable_msg_type =          QCameraMuxer::disable_msg_type,
+    .msg_type_enabled =          QCameraMuxer::msg_type_enabled,
+
+    .start_preview =             QCameraMuxer::start_preview,
+    .stop_preview =              QCameraMuxer::stop_preview,
+    .preview_enabled =           QCameraMuxer::preview_enabled,
+    .store_meta_data_in_buffers= QCameraMuxer::store_meta_data_in_buffers,
+
+    .start_recording =           QCameraMuxer::start_recording,
+    .stop_recording =            QCameraMuxer::stop_recording,
+    .recording_enabled =         QCameraMuxer::recording_enabled,
+    .release_recording_frame =   QCameraMuxer::release_recording_frame,
+
+    .auto_focus =                QCameraMuxer::auto_focus,
+    .cancel_auto_focus =         QCameraMuxer::cancel_auto_focus,
+
+    .take_picture =              QCameraMuxer::take_picture,
+    .cancel_picture =            QCameraMuxer::cancel_picture,
+
+    .set_parameters =            QCameraMuxer::set_parameters,
+    .get_parameters =            QCameraMuxer::get_parameters,
+    .put_parameters =            QCameraMuxer::put_parameters,
+    .send_command =              QCameraMuxer::send_command,
+
+    .release =                   QCameraMuxer::release,
+    .dump =                      QCameraMuxer::dump,
+};
+
+
+}; // namespace android
diff --git a/msmcobalt/QCamera2/HAL/QCameraMuxer.h b/msmcobalt/QCamera2/HAL/QCameraMuxer.h
new file mode 100644
index 0000000..a85612e
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/QCameraMuxer.h
@@ -0,0 +1,284 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#ifndef __QCAMERAMUXER_H__
+#define __QCAMERAMUXER_H__
+
+#include "camera.h"
+#include "QCamera2HWI.h"
+#include "QCamera3HWI.h"
+
+namespace qcamera {
+
+/* Struct@ qcamera_physical_descriptor_t
+ *
+ *  Description@ This structure specifies various attributes
+ *      physical cameras enumerated on the device
+ */
+typedef struct {
+    // Userspace Physical Camera ID
+    uint32_t id;
+    // Server Camera ID
+    uint32_t camera_server_id;
+    // Device version
+    uint32_t device_version;
+    // Specifies type of camera
+    cam_sync_type_t type;
+    // Specifies mode of Camera
+    cam_sync_mode_t mode;
+    // Camera Info
+    camera_info cam_info;
+    // Reference to HWI
+    QCamera2HardwareInterface *hwi;
+    // Reference to camera device structure
+    camera_device_t* dev;
+} qcamera_physical_descriptor_t;
+
+/* Struct@ qcamera_logical_descriptor_t
+ *
+ *  Description@ This structure stores information about logical cameras
+ *      and corresponding data of the physical camera that are part of
+ *      this logical camera
+ */
+typedef struct {
+    // Camera Device to be shared to Frameworks
+    camera_device_t dev;
+    // Device version
+    uint32_t device_version;
+    // Logical Camera ID
+    uint32_t id;
+    // Logical Camera Facing
+    int32_t facing;
+    // Number of Physical camera present in this logical camera
+    uint32_t numCameras;
+    // To signify if the LINK/UNLINK established between physical cameras
+    bool bSyncOn;
+    // index of the primary physical camera session in the bundle
+    uint8_t nPrimaryPhyCamIndex;
+    // Signifies Physical Camera ID of each camera
+    uint32_t pId[MAX_NUM_CAMERA_PER_BUNDLE];
+    // Signifies server camera ID of each camera
+    uint32_t sId[MAX_NUM_CAMERA_PER_BUNDLE];
+    // Signifies type of each camera
+    cam_sync_type_t type[MAX_NUM_CAMERA_PER_BUNDLE];
+    // Signifies mode of each camera
+    cam_sync_mode_t mode[MAX_NUM_CAMERA_PER_BUNDLE];
+} qcamera_logical_descriptor_t;
+
+/* Struct@ cam_compose_jpeg_info_t
+ *
+ *  Description@ This structure stores information about individual Jpeg images
+ *  received from multiple related physical camera instances. These images would then be
+ *  composed together into a single MPO image later.
+ */
+typedef struct {
+    // msg_type is same as data callback msg_type
+    int32_t msg_type;
+    // ptr to actual data buffer
+    camera_memory_t *buffer;
+    // index of the buffer same as received in data callback
+    unsigned int index;
+    // metadata associated with the buffer
+    camera_frame_metadata_t *metadata;
+    // user contains the caller's identity
+    // this contains a reference to the physical cam structure
+    // of the HWI instance which had requested for this data buffer
+    void *user;
+    // this indicates validity of the buffer
+    // this flag is used by multiple threads to check validity of
+    // Jpegs received by other threads
+    bool valid;
+    // frame id of the Jpeg. this is needed for frame sync between aux
+    // and main camera sessions
+    uint32_t frame_idx;
+    // release callback function to release this Jpeg memory later after
+    // composition is completed
+    camera_release_callback release_cb;
+    // cookie for the release callback function
+    void *release_cookie;
+    // release data info for what needs to be released
+    void *release_data;
+}cam_compose_jpeg_info_t;
+
+/* Class@ QCameraMuxer
+ *
+ * Description@ Muxer interface
+ *    a) Manages the grouping of the physical cameras into a logical camera
+ *    b) Muxes the operational calls from Frameworks to HWI
+ *    c) Composes MPO from JPEG
+ */
+class QCameraMuxer {
+
+public:
+    /* Public Methods   */
+    QCameraMuxer(uint32_t num_of_cameras);
+    virtual ~QCameraMuxer();
+    static void getCameraMuxer(QCameraMuxer** pCamMuxer,
+            uint32_t num_of_cameras);
+    static int get_number_of_cameras();
+    static int get_camera_info(int camera_id, struct camera_info *info);
+    static int set_callbacks(const camera_module_callbacks_t *callbacks);
+    static int open_legacy(const struct hw_module_t* module,
+            const char* id, uint32_t halVersion, struct hw_device_t** device);
+
+    static int camera_device_open(const struct hw_module_t* module,
+            const char* id,
+            struct hw_device_t** device);
+    static int close_camera_device( hw_device_t *);
+
+    /* Operation methods directly accessed by Camera Service */
+    static camera_device_ops_t mCameraMuxerOps;
+
+    /* Start of operational methods */
+    static int set_preview_window(struct camera_device *,
+            struct preview_stream_ops *window);
+    static void set_callBacks(struct camera_device *,
+            camera_notify_callback notify_cb,
+            camera_data_callback data_cb,
+            camera_data_timestamp_callback data_cb_timestamp,
+            camera_request_memory get_memory,
+            void *user);
+    static void enable_msg_type(struct camera_device *, int32_t msg_type);
+    static void disable_msg_type(struct camera_device *, int32_t msg_type);
+    static int msg_type_enabled(struct camera_device *, int32_t msg_type);
+    static int start_preview(struct camera_device *);
+    static void stop_preview(struct camera_device *);
+    static int preview_enabled(struct camera_device *);
+    static int store_meta_data_in_buffers(struct camera_device *,
+            int enable);
+    static int start_recording(struct camera_device *);
+    static void stop_recording(struct camera_device *);
+    static int recording_enabled(struct camera_device *);
+    static void release_recording_frame(struct camera_device *,
+              const void *opaque);
+    static int auto_focus(struct camera_device *);
+    static int cancel_auto_focus(struct camera_device *);
+    static int take_picture(struct camera_device *);
+    static int cancel_picture(struct camera_device *);
+    static int set_parameters(struct camera_device *, const char *parms);
+    static char* get_parameters(struct camera_device *);
+    static void put_parameters(struct camera_device *, char *);
+    static int send_command(struct camera_device *,
+          int32_t cmd, int32_t arg1, int32_t arg2);
+    static void release(struct camera_device *);
+    static int dump(struct camera_device *, int fd);
+    /* End of operational methods */
+
+    static void jpeg_data_callback(int32_t msg_type,
+            const camera_memory_t *data, unsigned int index,
+            camera_frame_metadata_t *metadata, void *user,
+            uint32_t frame_idx, camera_release_callback release_cb,
+            void *release_cookie, void *release_data);
+    // add notify error msgs to the notifer queue of the primary related cam instance
+    static int32_t sendEvtNotify(int32_t msg_type, int32_t ext1, int32_t ext2);
+    // function to compose all JPEG images from all physical related camera instances
+    void composeMpo(cam_compose_jpeg_info_t* main_Jpeg,
+        cam_compose_jpeg_info_t* aux_Jpeg);
+    static void* composeMpoRoutine(void* data);
+    static bool matchFrameId(void *data, void *user_data, void *match_data);
+    static bool findPreviousJpegs(void *data, void *user_data, void *match_data);
+    static void releaseJpegInfo(void *data, void *user_data);
+
+public:
+    /* Public Members  Variables   */
+    // Jpeg and Mpo ops need to be shared between 2 HWI instances
+    // hence these are cached in the muxer alongwith Jpeg handle
+    mm_jpeg_ops_t mJpegOps;
+    mm_jpeg_mpo_ops_t mJpegMpoOps;
+    uint32_t mJpegClientHandle;
+    // Stores Camera Data Callback function
+    camera_data_callback mDataCb;
+    // Stores Camera GetMemory Callback function
+    camera_request_memory mGetMemoryCb;
+
+private:
+    /* Private Member Variables  */
+    qcamera_physical_descriptor_t *m_pPhyCamera;
+    qcamera_logical_descriptor_t *m_pLogicalCamera;
+    const camera_module_callbacks_t *m_pCallbacks;
+    bool m_bAuxCameraExposed;
+    uint8_t m_nPhyCameras;
+    uint8_t m_nLogicalCameras;
+
+    // Main Camera session Jpeg Queue
+    QCameraQueue m_MainJpegQ;
+    // Aux Camera session Jpeg Queue
+    QCameraQueue m_AuxJpegQ;
+    // thread for mpo composition
+    QCameraCmdThread m_ComposeMpoTh;
+    // Final Mpo Jpeg Buffer
+    camera_memory_t *m_pRelCamMpoJpeg;
+    // Lock needed to synchronize between multiple composition requests
+    pthread_mutex_t m_JpegLock;
+    // this callback cookie would be used for sending Final mpo Jpeg to the framework
+    void *m_pMpoCallbackCookie;
+    // this callback cookie would be used for caching main related cam phy instance
+    // this is needed for error scenarios
+    // incase of error, we use this cookie to get HWI instance and send errors in notify cb
+    void *m_pJpegCallbackCookie;
+    // flag to indicate whether we need to dump dual camera snapshots
+    bool m_bDumpImages;
+    // flag to indicate whether MPO is enabled or not
+    bool m_bMpoEnabled;
+    // Signifies if frame sync is enabled
+    bool m_bFrameSyncEnabled;
+    // flag to indicate whether recording hint is internally set.
+    bool m_bRecordingHintInternallySet;
+
+    /* Private Member Methods */
+    int setupLogicalCameras();
+    int cameraDeviceOpen(int camera_id, struct hw_device_t **hw_device);
+    int getNumberOfCameras();
+    int getCameraInfo(int camera_id, struct camera_info *info,
+            cam_sync_type_t *p_cam_type);
+    int32_t setCallbacks(const camera_module_callbacks_t *callbacks);
+    int32_t setDataCallback(camera_data_callback data_cb);
+    int32_t setMemoryCallback(camera_request_memory get_memory);
+    qcamera_logical_descriptor_t* getLogicalCamera(
+            struct camera_device * device);
+    qcamera_physical_descriptor_t* getPhysicalCamera(
+            qcamera_logical_descriptor_t* log_cam, uint32_t index);
+    int32_t getActiveNumOfPhyCam(
+            qcamera_logical_descriptor_t* log_cam, int& numOfAcitvePhyCam);
+    int32_t setMpoCallbackCookie(void* mpoCbCookie);
+    void* getMpoCallbackCookie();
+    int32_t setMainJpegCallbackCookie(void* jpegCbCookie);
+    void* getMainJpegCallbackCookie();
+    void setJpegHandle(uint32_t handle) { mJpegClientHandle = handle;};
+    // function to store single JPEG from 1 related physical camera instance
+    int32_t storeJpeg(cam_sync_type_t cam_type, int32_t msg_type,
+            const camera_memory_t *data, unsigned int index,
+            camera_frame_metadata_t *metadata, void *user,
+            uint32_t frame_idx, camera_release_callback release_cb,
+            void *release_cookie, void *release_data);
+
+};// End namespace qcamera
+
+}
+#endif /* __QCAMERAMUXER_H__ */
+
diff --git a/msmcobalt/QCamera2/HAL/QCameraParameters.cpp b/msmcobalt/QCamera2/HAL/QCameraParameters.cpp
new file mode 100644
index 0000000..c403574
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/QCameraParameters.cpp
@@ -0,0 +1,14378 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#define LOG_TAG "QCameraParameters"
+
+// To remove
+#include <cutils/properties.h>
+
+// System dependencies
+#include <math.h>
+#include <string.h>
+#include <stdlib.h>
+#include <utils/Errors.h>
+#define SYSINFO_H <SYSTEM_HEADER_PREFIX/sysinfo.h>
+#include SYSINFO_H
+#include "gralloc_priv.h"
+#include "graphics.h"
+
+// Camera dependencies
+#include "QCameraBufferMaps.h"
+#include "QCamera2HWI.h"
+#include "QCameraParameters.h"
+#include "QCameraTrace.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+#define PI 3.14159265
+#define ASPECT_TOLERANCE 0.001
+#define CAMERA_DEFAULT_LONGSHOT_STAGES 4
+#define CAMERA_MIN_LONGSHOT_STAGES 2
+#define FOCUS_PERCISION 0.0000001
+
+
+namespace qcamera {
+// Parameter keys to communicate between camera application and driver.
+const char QCameraParameters::KEY_QC_SUPPORTED_HFR_SIZES[] = "hfr-size-values";
+const char QCameraParameters::KEY_QC_PREVIEW_FRAME_RATE_MODE[] = "preview-frame-rate-mode";
+const char QCameraParameters::KEY_QC_SUPPORTED_PREVIEW_FRAME_RATE_MODES[] = "preview-frame-rate-modes";
+const char QCameraParameters::KEY_QC_PREVIEW_FRAME_RATE_AUTO_MODE[] = "frame-rate-auto";
+const char QCameraParameters::KEY_QC_PREVIEW_FRAME_RATE_FIXED_MODE[] = "frame-rate-fixed";
+const char QCameraParameters::KEY_QC_TOUCH_AF_AEC[] = "touch-af-aec";
+const char QCameraParameters::KEY_QC_SUPPORTED_TOUCH_AF_AEC[] = "touch-af-aec-values";
+const char QCameraParameters::KEY_QC_TOUCH_INDEX_AEC[] = "touch-index-aec";
+const char QCameraParameters::KEY_QC_TOUCH_INDEX_AF[] = "touch-index-af";
+const char QCameraParameters::KEY_QC_SCENE_DETECT[] = "scene-detect";
+const char QCameraParameters::KEY_QC_SUPPORTED_SCENE_DETECT[] = "scene-detect-values";
+const char QCameraParameters::KEY_QC_ISO_MODE[] = "iso";
+const char QCameraParameters::KEY_QC_CONTINUOUS_ISO[] = "continuous-iso";
+const char QCameraParameters::KEY_QC_MIN_ISO[] = "min-iso";
+const char QCameraParameters::KEY_QC_MAX_ISO[] = "max-iso";
+const char QCameraParameters::KEY_QC_SUPPORTED_ISO_MODES[] = "iso-values";
+const char QCameraParameters::KEY_QC_EXPOSURE_TIME[] = "exposure-time";
+const char QCameraParameters::KEY_QC_MIN_EXPOSURE_TIME[] = "min-exposure-time";
+const char QCameraParameters::KEY_QC_MAX_EXPOSURE_TIME[] = "max-exposure-time";
+const char QCameraParameters::KEY_QC_CURRENT_EXPOSURE_TIME[] = "cur-exposure-time";
+const char QCameraParameters::KEY_QC_CURRENT_ISO[] = "cur-iso";
+const char QCameraParameters::KEY_QC_LENSSHADE[] = "lensshade";
+const char QCameraParameters::KEY_QC_SUPPORTED_LENSSHADE_MODES[] = "lensshade-values";
+const char QCameraParameters::KEY_QC_AUTO_EXPOSURE[] = "auto-exposure";
+const char QCameraParameters::KEY_QC_SUPPORTED_AUTO_EXPOSURE[] = "auto-exposure-values";
+const char QCameraParameters::KEY_QC_DENOISE[] = "denoise";
+const char QCameraParameters::KEY_QC_SUPPORTED_DENOISE[] = "denoise-values";
+const char QCameraParameters::KEY_QC_FOCUS_ALGO[] = "selectable-zone-af";
+const char QCameraParameters::KEY_QC_SUPPORTED_FOCUS_ALGOS[] = "selectable-zone-af-values";
+const char QCameraParameters::KEY_QC_MANUAL_FOCUS_POSITION[] = "manual-focus-position";
+const char QCameraParameters::KEY_QC_MANUAL_FOCUS_POS_TYPE[] = "manual-focus-pos-type";
+const char QCameraParameters::KEY_QC_MIN_FOCUS_POS_INDEX[] = "min-focus-pos-index";
+const char QCameraParameters::KEY_QC_MAX_FOCUS_POS_INDEX[] = "max-focus-pos-index";
+const char QCameraParameters::KEY_QC_MIN_FOCUS_POS_DAC[] = "min-focus-pos-dac";
+const char QCameraParameters::KEY_QC_MAX_FOCUS_POS_DAC[] = "max-focus-pos-dac";
+const char QCameraParameters::KEY_QC_MIN_FOCUS_POS_RATIO[] = "min-focus-pos-ratio";
+const char QCameraParameters::KEY_QC_MAX_FOCUS_POS_RATIO[] = "max-focus-pos-ratio";
+const char QCameraParameters::KEY_QC_FOCUS_POSITION_SCALE[] = "cur-focus-scale";
+const char QCameraParameters::KEY_QC_MIN_FOCUS_POS_DIOPTER[] = "min-focus-pos-diopter";
+const char QCameraParameters::KEY_QC_MAX_FOCUS_POS_DIOPTER[] = "max-focus-pos-diopter";
+const char QCameraParameters::KEY_QC_FOCUS_POSITION_DIOPTER[] = "cur-focus-diopter";
+const char QCameraParameters::KEY_QC_FACE_DETECTION[] = "face-detection";
+const char QCameraParameters::KEY_QC_SUPPORTED_FACE_DETECTION[] = "face-detection-values";
+const char QCameraParameters::KEY_QC_FACE_RECOGNITION[] = "face-recognition";
+const char QCameraParameters::KEY_QC_SUPPORTED_FACE_RECOGNITION[] = "face-recognition-values";
+const char QCameraParameters::KEY_QC_MEMORY_COLOR_ENHANCEMENT[] = "mce";
+const char QCameraParameters::KEY_QC_SUPPORTED_MEM_COLOR_ENHANCE_MODES[] = "mce-values";
+const char QCameraParameters::KEY_QC_DIS[] = "dis";
+const char QCameraParameters::KEY_QC_OIS[] = "ois";
+const char QCameraParameters::KEY_QC_SUPPORTED_DIS_MODES[] = "dis-values";
+const char QCameraParameters::KEY_QC_SUPPORTED_OIS_MODES[] = "ois-values";
+const char QCameraParameters::KEY_QC_VIDEO_HIGH_FRAME_RATE[] = "video-hfr";
+const char QCameraParameters::KEY_QC_VIDEO_HIGH_SPEED_RECORDING[] = "video-hsr";
+const char QCameraParameters::KEY_QC_SUPPORTED_VIDEO_HIGH_FRAME_RATE_MODES[] = "video-hfr-values";
+const char QCameraParameters::KEY_QC_REDEYE_REDUCTION[] = "redeye-reduction";
+const char QCameraParameters::KEY_QC_SUPPORTED_REDEYE_REDUCTION[] = "redeye-reduction-values";
+const char QCameraParameters::KEY_QC_HIGH_DYNAMIC_RANGE_IMAGING[] = "hdr";
+const char QCameraParameters::KEY_QC_SUPPORTED_HDR_IMAGING_MODES[] = "hdr-values";
+const char QCameraParameters::KEY_QC_ZSL[] = "zsl";
+const char QCameraParameters::KEY_QC_SUPPORTED_ZSL_MODES[] = "zsl-values";
+const char QCameraParameters::KEY_QC_ZSL_BURST_INTERVAL[] = "capture-burst-interval";
+const char QCameraParameters::KEY_QC_ZSL_BURST_LOOKBACK[] = "capture-burst-retroactive";
+const char QCameraParameters::KEY_QC_ZSL_QUEUE_DEPTH[] = "capture-burst-queue-depth";
+const char QCameraParameters::KEY_QC_CAMERA_MODE[] = "camera-mode";
+const char QCameraParameters::KEY_QC_AE_BRACKET_HDR[] = "ae-bracket-hdr";
+const char QCameraParameters::KEY_QC_SUPPORTED_AE_BRACKET_MODES[] = "ae-bracket-hdr-values";
+const char QCameraParameters::KEY_QC_SUPPORTED_RAW_FORMATS[] = "raw-format-values";
+const char QCameraParameters::KEY_QC_RAW_FORMAT[] = "raw-format";
+const char QCameraParameters::KEY_QC_ORIENTATION[] = "orientation";
+const char QCameraParameters::KEY_QC_SELECTABLE_ZONE_AF[] = "selectable-zone-af";
+const char QCameraParameters::KEY_QC_CAPTURE_BURST_EXPOSURE[] = "capture-burst-exposures";
+const char QCameraParameters::KEY_QC_NUM_SNAPSHOT_PER_SHUTTER[] = "num-snaps-per-shutter";
+const char QCameraParameters::KEY_QC_NUM_RETRO_BURST_PER_SHUTTER[] = "num-retro-burst-per-shutter";
+const char QCameraParameters::KEY_QC_SNAPSHOT_BURST_LED_ON_PERIOD[] = "zsl-burst-led-on-period";
+const char QCameraParameters::KEY_QC_NO_DISPLAY_MODE[] = "no-display-mode";
+const char QCameraParameters::KEY_QC_RAW_PICUTRE_SIZE[] = "raw-size";
+const char QCameraParameters::KEY_QC_SUPPORTED_SKIN_TONE_ENHANCEMENT_MODES[] = "skinToneEnhancement-values";
+const char QCameraParameters::KEY_QC_SUPPORTED_LIVESNAPSHOT_SIZES[] = "supported-live-snapshot-sizes";
+const char QCameraParameters::KEY_QC_SUPPORTED_HDR_NEED_1X[] = "hdr-need-1x-values";
+const char QCameraParameters::KEY_QC_HDR_NEED_1X[] = "hdr-need-1x";
+const char QCameraParameters::KEY_QC_PREVIEW_FLIP[] = "preview-flip";
+const char QCameraParameters::KEY_QC_VIDEO_FLIP[] = "video-flip";
+const char QCameraParameters::KEY_QC_SNAPSHOT_PICTURE_FLIP[] = "snapshot-picture-flip";
+const char QCameraParameters::KEY_QC_SUPPORTED_FLIP_MODES[] = "flip-mode-values";
+const char QCameraParameters::KEY_QC_VIDEO_HDR[] = "video-hdr";
+const char QCameraParameters::KEY_QC_SENSOR_HDR[] = "sensor-hdr";
+const char QCameraParameters::KEY_QC_VT_ENABLE[] = "avtimer";
+const char QCameraParameters::KEY_QC_SUPPORTED_VIDEO_HDR_MODES[] = "video-hdr-values";
+const char QCameraParameters::KEY_QC_SUPPORTED_SENSOR_HDR_MODES[] = "sensor-hdr-values";
+const char QCameraParameters::KEY_QC_AUTO_HDR_ENABLE [] = "auto-hdr-enable";
+const char QCameraParameters::KEY_QC_SNAPSHOT_BURST_NUM[] = "snapshot-burst-num";
+const char QCameraParameters::KEY_QC_SNAPSHOT_FD_DATA[] = "snapshot-fd-data-enable";
+const char QCameraParameters::KEY_QC_TINTLESS_ENABLE[] = "tintless";
+const char QCameraParameters::KEY_QC_SCENE_SELECTION[] = "scene-selection";
+const char QCameraParameters::KEY_QC_CDS_MODE[] = "cds-mode";
+const char QCameraParameters::KEY_QC_VIDEO_CDS_MODE[] = "video-cds-mode";
+const char QCameraParameters::KEY_QC_SUPPORTED_CDS_MODES[] = "cds-mode-values";
+const char QCameraParameters::KEY_QC_SUPPORTED_VIDEO_CDS_MODES[] = "video-cds-mode-values";
+const char QCameraParameters::KEY_QC_TNR_MODE[] = "tnr-mode";
+const char QCameraParameters::KEY_QC_VIDEO_TNR_MODE[] = "video-tnr-mode";
+const char QCameraParameters::KEY_QC_SUPPORTED_TNR_MODES[] = "tnr-mode-values";
+const char QCameraParameters::KEY_QC_SUPPORTED_VIDEO_TNR_MODES[] = "video-tnr-mode-values";
+const char QCameraParameters::KEY_QC_VIDEO_ROTATION[] = "video-rotation";
+const char QCameraParameters::KEY_QC_SUPPORTED_VIDEO_ROTATION_VALUES[] = "video-rotation-values";
+const char QCameraParameters::KEY_QC_AF_BRACKET[] = "af-bracket";
+const char QCameraParameters::KEY_QC_SUPPORTED_AF_BRACKET_MODES[] = "af-bracket-values";
+const char QCameraParameters::KEY_QC_RE_FOCUS[] = "re-focus";
+const char QCameraParameters::KEY_QC_SUPPORTED_RE_FOCUS_MODES[] = "re-focus-values";
+const char QCameraParameters::KEY_QC_CHROMA_FLASH[] = "chroma-flash";
+const char QCameraParameters::KEY_QC_SUPPORTED_CHROMA_FLASH_MODES[] = "chroma-flash-values";
+const char QCameraParameters::KEY_QC_OPTI_ZOOM[] = "opti-zoom";
+const char QCameraParameters::KEY_QC_SEE_MORE[] = "see-more";
+const char QCameraParameters::KEY_QC_STILL_MORE[] = "still-more";
+const char QCameraParameters::KEY_QC_SUPPORTED_OPTI_ZOOM_MODES[] = "opti-zoom-values";
+const char QCameraParameters::KEY_QC_HDR_MODE[] = "hdr-mode";
+const char QCameraParameters::KEY_QC_SUPPORTED_KEY_QC_HDR_MODES[] = "hdr-mode-values";
+const char QCameraParameters::KEY_QC_TRUE_PORTRAIT[] = "true-portrait";
+const char QCameraParameters::KEY_QC_SUPPORTED_TRUE_PORTRAIT_MODES[] = "true-portrait-values";
+const char QCameraParameters::KEY_QC_SUPPORTED_SEE_MORE_MODES[] = "see-more-values";
+const char QCameraParameters::KEY_QC_SUPPORTED_STILL_MORE_MODES[] = "still-more-values";
+const char QCameraParameters::KEY_INTERNAL_PERVIEW_RESTART[] = "internal-restart";
+const char QCameraParameters::KEY_QC_RDI_MODE[] = "rdi-mode";
+const char QCameraParameters::KEY_QC_SUPPORTED_RDI_MODES[] = "rdi-mode-values";
+const char QCameraParameters::KEY_QC_SECURE_MODE[] = "secure-mode";
+const char QCameraParameters::KEY_QC_SUPPORTED_SECURE_MODES[] = "secure-mode-values";
+const char QCameraParameters::ISO_HJR[] = "ISO_HJR";
+const char QCameraParameters::KEY_QC_AUTO_HDR_SUPPORTED[] = "auto-hdr-supported";
+const char QCameraParameters::KEY_QC_LONGSHOT_SUPPORTED[] = "longshot-supported";
+const char QCameraParameters::KEY_QC_ZSL_HDR_SUPPORTED[] = "zsl-hdr-supported";
+const char QCameraParameters::KEY_QC_WB_MANUAL_CCT[] = "wb-manual-cct";
+const char QCameraParameters::KEY_QC_MIN_WB_CCT[] = "min-wb-cct";
+const char QCameraParameters::KEY_QC_MAX_WB_CCT[] = "max-wb-cct";
+
+const char QCameraParameters::KEY_QC_MANUAL_WB_GAINS[] = "manual-wb-gains";
+const char QCameraParameters::KEY_QC_MIN_WB_GAIN[] = "min-wb-gain";
+const char QCameraParameters::KEY_QC_MAX_WB_GAIN[] = "max-wb-gain";
+
+const char QCameraParameters::KEY_QC_MANUAL_WB_TYPE[] = "manual-wb-type";
+const char QCameraParameters::KEY_QC_MANUAL_WB_VALUE[] = "manual-wb-value";
+
+const char QCameraParameters::WHITE_BALANCE_MANUAL[] = "manual";
+const char QCameraParameters::FOCUS_MODE_MANUAL_POSITION[] = "manual";
+const char QCameraParameters::KEY_QC_CACHE_VIDEO_BUFFERS[] = "cache-video-buffers";
+
+const char QCameraParameters::KEY_QC_LONG_SHOT[] = "long-shot";
+const char QCameraParameters::KEY_QC_INITIAL_EXPOSURE_INDEX[] = "initial-exp-index";
+const char QCameraParameters::KEY_QC_INSTANT_AEC[] = "instant-aec";
+const char QCameraParameters::KEY_QC_INSTANT_CAPTURE[] = "instant-capture";
+const char QCameraParameters::KEY_QC_INSTANT_AEC_SUPPORTED_MODES[] = "instant-aec-values";
+const char QCameraParameters::KEY_QC_INSTANT_CAPTURE_SUPPORTED_MODES[] = "instant-capture-values";
+
+// Values for effect settings.
+const char QCameraParameters::EFFECT_EMBOSS[] = "emboss";
+const char QCameraParameters::EFFECT_SKETCH[] = "sketch";
+const char QCameraParameters::EFFECT_NEON[] = "neon";
+const char QCameraParameters::EFFECT_BEAUTY[] = "beauty";
+
+
+// Values for auto exposure settings.
+const char QCameraParameters::TOUCH_AF_AEC_OFF[] = "touch-off";
+const char QCameraParameters::TOUCH_AF_AEC_ON[] = "touch-on";
+
+// Values for scene mode settings.
+const char QCameraParameters::SCENE_MODE_ASD[] = "asd";   // corresponds to CAMERA_BESTSHOT_AUTO in HAL
+const char QCameraParameters::SCENE_MODE_BACKLIGHT[] = "backlight";
+const char QCameraParameters::SCENE_MODE_FLOWERS[] = "flowers";
+const char QCameraParameters::SCENE_MODE_AR[] = "AR";
+const char QCameraParameters::SCENE_MODE_HDR[] = "hdr";
+
+// Formats for setPreviewFormat and setPictureFormat.
+const char QCameraParameters::PIXEL_FORMAT_YUV420SP_ADRENO[] = "yuv420sp-adreno";
+const char QCameraParameters::PIXEL_FORMAT_YV12[] = "yuv420p";
+const char QCameraParameters::PIXEL_FORMAT_NV12[] = "nv12";
+const char QCameraParameters::QC_PIXEL_FORMAT_NV12_VENUS[] = "nv12-venus";
+
+// Values for raw image formats
+const char QCameraParameters::QC_PIXEL_FORMAT_YUV_RAW_8BIT_YUYV[] = "yuv-raw8-yuyv";
+const char QCameraParameters::QC_PIXEL_FORMAT_YUV_RAW_8BIT_YVYU[] = "yuv-raw8-yvyu";
+const char QCameraParameters::QC_PIXEL_FORMAT_YUV_RAW_8BIT_UYVY[] = "yuv-raw8-uyvy";
+const char QCameraParameters::QC_PIXEL_FORMAT_YUV_RAW_8BIT_VYUY[] = "yuv-raw8-vyuy";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8GBRG[] = "bayer-qcom-8gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8GRBG[] = "bayer-qcom-8grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8RGGB[] = "bayer-qcom-8rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8BGGR[] = "bayer-qcom-8bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10GBRG[] = "bayer-qcom-10gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10GRBG[] = "bayer-qcom-10grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10RGGB[] = "bayer-qcom-10rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10BGGR[] = "bayer-qcom-10bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12GBRG[] = "bayer-qcom-12gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12GRBG[] = "bayer-qcom-12grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12RGGB[] = "bayer-qcom-12rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12BGGR[] = "bayer-qcom-12bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_14GBRG[] = "bayer-qcom-14gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_14GRBG[] = "bayer-qcom-14grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_14RGGB[] = "bayer-qcom-14rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_14BGGR[] = "bayer-qcom-14bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8GBRG[] = "bayer-mipi-8gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8GRBG[] = "bayer-mipi-8grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8RGGB[] = "bayer-mipi-8rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8BGGR[] = "bayer-mipi-8bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10GBRG[] = "bayer-mipi-10gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10GRBG[] = "bayer-mipi-10grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10RGGB[] = "bayer-mipi-10rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10BGGR[] = "bayer-mipi-10bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12GBRG[] = "bayer-mipi-12gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12GRBG[] = "bayer-mipi-12grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12RGGB[] = "bayer-mipi-12rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12BGGR[] = "bayer-mipi-12bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_14GBRG[] = "bayer-mipi-14gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_14GRBG[] = "bayer-mipi-14grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_14RGGB[] = "bayer-mipi-14rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_14BGGR[] = "bayer-mipi-14bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8GBRG[] = "bayer-ideal-qcom-8gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8GRBG[] = "bayer-ideal-qcom-8grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8RGGB[] = "bayer-ideal-qcom-8rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8BGGR[] = "bayer-ideal-qcom-8bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10GBRG[] = "bayer-ideal-qcom-10gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10GRBG[] = "bayer-ideal-qcom-10grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10RGGB[] = "bayer-ideal-qcom-10rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10BGGR[] = "bayer-ideal-qcom-10bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12GBRG[] = "bayer-ideal-qcom-12gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12GRBG[] = "bayer-ideal-qcom-12grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12RGGB[] = "bayer-ideal-qcom-12rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12BGGR[] = "bayer-ideal-qcom-12bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_14GBRG[] = "bayer-ideal-qcom-14gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_14GRBG[] = "bayer-ideal-qcom-14grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_14RGGB[] = "bayer-ideal-qcom-14rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_14BGGR[] = "bayer-ideal-qcom-14bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8GBRG[] = "bayer-ideal-mipi-8gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8GRBG[] = "bayer-ideal-mipi-8grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8RGGB[] = "bayer-ideal-mipi-8rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8BGGR[] = "bayer-ideal-mipi-8bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10GBRG[] = "bayer-ideal-mipi-10gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10GRBG[] = "bayer-ideal-mipi-10grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10RGGB[] = "bayer-ideal-mipi-10rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10BGGR[] = "bayer-ideal-mipi-10bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12GBRG[] = "bayer-ideal-mipi-12gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12GRBG[] = "bayer-ideal-mipi-12grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12RGGB[] = "bayer-ideal-mipi-12rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12BGGR[] = "bayer-ideal-mipi-12bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_14GBRG[] = "bayer-ideal-mipi-14gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_14GRBG[] = "bayer-ideal-mipi-14grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_14RGGB[] = "bayer-ideal-mipi-14rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_14BGGR[] = "bayer-ideal-mipi-14bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8GBRG[] = "bayer-ideal-plain8-8gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8GRBG[] = "bayer-ideal-plain8-8grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8RGGB[] = "bayer-ideal-plain8-8rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8BGGR[] = "bayer-ideal-plain8-8bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8GBRG[] = "bayer-ideal-plain16-8gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8GRBG[] = "bayer-ideal-plain16-8grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8RGGB[] = "bayer-ideal-plain16-8rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8BGGR[] = "bayer-ideal-plain16-8bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10GBRG[] = "bayer-ideal-plain16-10gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10GRBG[] = "bayer-ideal-plain16-10grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10RGGB[] = "bayer-ideal-plain16-10rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10BGGR[] = "bayer-ideal-plain16-10bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12GBRG[] = "bayer-ideal-plain16-12gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12GRBG[] = "bayer-ideal-plain16-12grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12RGGB[] = "bayer-ideal-plain16-12rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12BGGR[] = "bayer-ideal-plain16-12bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_14GBRG[] = "bayer-ideal-plain16-14gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_14GRBG[] = "bayer-ideal-plain16-14grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_14RGGB[] = "bayer-ideal-plain16-14rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_14BGGR[] = "bayer-ideal-plain16-14bggr";
+
+// Values for ISO Settings
+const char QCameraParameters::ISO_AUTO[] = "auto";
+const char QCameraParameters::ISO_100[] = "ISO100";
+const char QCameraParameters::ISO_200[] = "ISO200";
+const char QCameraParameters::ISO_400[] = "ISO400";
+const char QCameraParameters::ISO_800[] = "ISO800";
+const char QCameraParameters::ISO_1600[] = "ISO1600";
+const char QCameraParameters::ISO_3200[] = "ISO3200";
+const char QCameraParameters::ISO_MANUAL[] = "manual";
+
+
+// Values for auto exposure settings.
+const char QCameraParameters::AUTO_EXPOSURE_FRAME_AVG[] = "frame-average";
+const char QCameraParameters::AUTO_EXPOSURE_CENTER_WEIGHTED[] = "center-weighted";
+const char QCameraParameters::AUTO_EXPOSURE_SPOT_METERING[] = "spot-metering";
+const char QCameraParameters::AUTO_EXPOSURE_SMART_METERING[] = "smart-metering";
+const char QCameraParameters::AUTO_EXPOSURE_USER_METERING[] = "user-metering";
+const char QCameraParameters::AUTO_EXPOSURE_SPOT_METERING_ADV[] = "spot-metering-adv";
+const char QCameraParameters::AUTO_EXPOSURE_CENTER_WEIGHTED_ADV[] = "center-weighted-adv";
+
+// Values for instant AEC modes
+const char QCameraParameters::KEY_QC_INSTANT_AEC_DISABLE[] = "0";
+const char QCameraParameters::KEY_QC_INSTANT_AEC_AGGRESSIVE_AEC[] = "1";
+const char QCameraParameters::KEY_QC_INSTANT_AEC_FAST_AEC[] = "2";
+
+// Values for instant capture modes
+const char QCameraParameters::KEY_QC_INSTANT_CAPTURE_DISABLE[] = "0";
+const char QCameraParameters::KEY_QC_INSTANT_CAPTURE_AGGRESSIVE_AEC[] = "1";
+const char QCameraParameters::KEY_QC_INSTANT_CAPTURE_FAST_AEC[] = "2";
+
+const char QCameraParameters::KEY_QC_GPS_LATITUDE_REF[] = "gps-latitude-ref";
+const char QCameraParameters::KEY_QC_GPS_LONGITUDE_REF[] = "gps-longitude-ref";
+const char QCameraParameters::KEY_QC_GPS_ALTITUDE_REF[] = "gps-altitude-ref";
+const char QCameraParameters::KEY_QC_GPS_STATUS[] = "gps-status";
+
+const char QCameraParameters::KEY_QC_HISTOGRAM[] = "histogram";
+const char QCameraParameters::KEY_QC_SUPPORTED_HISTOGRAM_MODES[] = "histogram-values";
+
+const char QCameraParameters::VALUE_ENABLE[] = "enable";
+const char QCameraParameters::VALUE_DISABLE[] = "disable";
+const char QCameraParameters::VALUE_OFF[] = "off";
+const char QCameraParameters::VALUE_ON[] = "on";
+const char QCameraParameters::VALUE_TRUE[] = "true";
+const char QCameraParameters::VALUE_FALSE[] = "false";
+
+const char QCameraParameters::VALUE_FAST[] = "fast";
+const char QCameraParameters::VALUE_HIGH_QUALITY[] = "high-quality";
+
+const char QCameraParameters::KEY_QC_SHARPNESS[] = "sharpness";
+const char QCameraParameters::KEY_QC_MIN_SHARPNESS[] = "min-sharpness";
+const char QCameraParameters::KEY_QC_MAX_SHARPNESS[] = "max-sharpness";
+const char QCameraParameters::KEY_QC_SHARPNESS_STEP[] = "sharpness-step";
+const char QCameraParameters::KEY_QC_CONTRAST[] = "contrast";
+const char QCameraParameters::KEY_QC_MIN_CONTRAST[] = "min-contrast";
+const char QCameraParameters::KEY_QC_MAX_CONTRAST[] = "max-contrast";
+const char QCameraParameters::KEY_QC_CONTRAST_STEP[] = "contrast-step";
+const char QCameraParameters::KEY_QC_SATURATION[] = "saturation";
+const char QCameraParameters::KEY_QC_MIN_SATURATION[] = "min-saturation";
+const char QCameraParameters::KEY_QC_MAX_SATURATION[] = "max-saturation";
+const char QCameraParameters::KEY_QC_SATURATION_STEP[] = "saturation-step";
+const char QCameraParameters::KEY_QC_BRIGHTNESS[] = "luma-adaptation";
+const char QCameraParameters::KEY_QC_MIN_BRIGHTNESS[] = "min-brightness";
+const char QCameraParameters::KEY_QC_MAX_BRIGHTNESS[] = "max-brightness";
+const char QCameraParameters::KEY_QC_BRIGHTNESS_STEP[] = "brightness-step";
+const char QCameraParameters::KEY_QC_SCE_FACTOR[] = "skinToneEnhancement";
+const char QCameraParameters::KEY_QC_MIN_SCE_FACTOR[] = "min-sce-factor";
+const char QCameraParameters::KEY_QC_MAX_SCE_FACTOR[] = "max-sce-factor";
+const char QCameraParameters::KEY_QC_SCE_FACTOR_STEP[] = "sce-factor-step";
+
+const char QCameraParameters::KEY_QC_MAX_NUM_REQUESTED_FACES[] = "qc-max-num-requested-faces";
+
+//Values for DENOISE
+const char QCameraParameters::DENOISE_OFF[] = "denoise-off";
+const char QCameraParameters::DENOISE_ON[] = "denoise-on";
+
+// Values for selectable zone af Settings
+const char QCameraParameters::FOCUS_ALGO_AUTO[] = "auto";
+const char QCameraParameters::FOCUS_ALGO_SPOT_METERING[] = "spot-metering";
+const char QCameraParameters::FOCUS_ALGO_CENTER_WEIGHTED[] = "center-weighted";
+const char QCameraParameters::FOCUS_ALGO_FRAME_AVERAGE[] = "frame-average";
+
+// Values for HFR settings.
+const char QCameraParameters::VIDEO_HFR_OFF[] = "off";
+const char QCameraParameters::VIDEO_HFR_2X[] = "60";
+const char QCameraParameters::VIDEO_HFR_3X[] = "90";
+const char QCameraParameters::VIDEO_HFR_4X[] = "120";
+const char QCameraParameters::VIDEO_HFR_5X[] = "150";
+const char QCameraParameters::VIDEO_HFR_6X[] = "180";
+const char QCameraParameters::VIDEO_HFR_7X[] = "210";
+const char QCameraParameters::VIDEO_HFR_8X[] = "240";
+const char QCameraParameters::VIDEO_HFR_9X[] = "480";
+
+// Values for HDR Bracketing settings.
+const char QCameraParameters::AE_BRACKET_OFF[] = "Off";
+const char QCameraParameters::AE_BRACKET[] = "AE-Bracket";
+
+// Values for AF Bracketing setting.
+const char QCameraParameters::AF_BRACKET_OFF[] = "af-bracket-off";
+const char QCameraParameters::AF_BRACKET_ON[] = "af-bracket-on";
+
+// Values for Refocus setting.
+const char QCameraParameters::RE_FOCUS_OFF[] = "re-focus-off";
+const char QCameraParameters::RE_FOCUS_ON[] = "re-focus-on";
+
+// Values for Chroma Flash setting.
+const char QCameraParameters::CHROMA_FLASH_OFF[] = "chroma-flash-off";
+const char QCameraParameters::CHROMA_FLASH_ON[] = "chroma-flash-on";
+
+// Values for Opti Zoom setting.
+const char QCameraParameters::OPTI_ZOOM_OFF[] = "opti-zoom-off";
+const char QCameraParameters::OPTI_ZOOM_ON[] = "opti-zoom-on";
+
+// Values for Still More setting.
+const char QCameraParameters::STILL_MORE_OFF[] = "still-more-off";
+const char QCameraParameters::STILL_MORE_ON[] = "still-more-on";
+
+// Values for HDR mode setting.
+const char QCameraParameters::HDR_MODE_SENSOR[] = "hdr-mode-sensor";
+const char QCameraParameters::HDR_MODE_MULTI_FRAME[] = "hdr-mode-multiframe";
+
+// Values for True Portrait setting.
+const char QCameraParameters::TRUE_PORTRAIT_OFF[] = "true-portrait-off";
+const char QCameraParameters::TRUE_PORTRAIT_ON[] = "true-portrait-on";
+
+// Values for FLIP settings.
+const char QCameraParameters::FLIP_MODE_OFF[] = "off";
+const char QCameraParameters::FLIP_MODE_V[] = "flip-v";
+const char QCameraParameters::FLIP_MODE_H[] = "flip-h";
+const char QCameraParameters::FLIP_MODE_VH[] = "flip-vh";
+
+const char QCameraParameters::CDS_MODE_OFF[] = "off";
+const char QCameraParameters::CDS_MODE_ON[] = "on";
+const char QCameraParameters::CDS_MODE_AUTO[] = "auto";
+
+// Values for video rotation settings.
+const char QCameraParameters::VIDEO_ROTATION_0[] = "0";
+const char QCameraParameters::VIDEO_ROTATION_90[] = "90";
+const char QCameraParameters::VIDEO_ROTATION_180[] = "180";
+const char QCameraParameters::VIDEO_ROTATION_270[] = "270";
+
+const char QCameraParameters::KEY_QC_SUPPORTED_MANUAL_FOCUS_MODES[] = "manual-focus-modes";
+const char QCameraParameters::KEY_QC_SUPPORTED_MANUAL_EXPOSURE_MODES[] = "manual-exposure-modes";
+const char QCameraParameters::KEY_QC_SUPPORTED_MANUAL_WB_MODES[] = "manual-wb-modes";
+const char QCameraParameters::KEY_QC_FOCUS_SCALE_MODE[] = "scale-mode";
+const char QCameraParameters::KEY_QC_FOCUS_DIOPTER_MODE[] = "diopter-mode";
+const char QCameraParameters::KEY_QC_ISO_PRIORITY[] = "iso-priority";
+const char QCameraParameters::KEY_QC_EXP_TIME_PRIORITY[] = "exp-time-priority";
+const char QCameraParameters::KEY_QC_USER_SETTING[] = "user-setting";
+const char QCameraParameters::KEY_QC_WB_CCT_MODE[] = "color-temperature";
+const char QCameraParameters::KEY_QC_WB_GAIN_MODE[] = "rbgb-gains";
+const char QCameraParameters::KEY_QC_NOISE_REDUCTION_MODE[] = "noise-reduction-mode";
+const char QCameraParameters::KEY_QC_NOISE_REDUCTION_MODE_VALUES[] = "noise-reduction-mode-values";
+
+#ifdef TARGET_TS_MAKEUP
+const char QCameraParameters::KEY_TS_MAKEUP[] = "tsmakeup";
+const char QCameraParameters::KEY_TS_MAKEUP_WHITEN[] = "tsmakeup_whiten";
+const char QCameraParameters::KEY_TS_MAKEUP_CLEAN[] = "tsmakeup_clean";
+#endif
+
+//KEY to share HFR batch size with video encoder.
+const char QCameraParameters::KEY_QC_VIDEO_BATCH_SIZE[] = "video-batch-size";
+
+static const char* portrait = "portrait";
+static const char* landscape = "landscape";
+
+const cam_dimension_t QCameraParameters::THUMBNAIL_SIZES_MAP[] = {
+    { 256, 154 }, //1.66233
+    { 240, 160 }, //1.5
+    { 320, 320 }, //1.0
+    { 320, 240 }, //1.33333
+    { 256, 144 }, //1.777778
+    { 240, 144 }, //1.666667
+    { 176, 144 }, //1.222222
+    /*Thumbnail sizes to match portrait picture size aspect ratio*/
+    { 240, 320 }, //to match 480X640 & 240X320 picture size
+    { 144, 176 }, //to match 144X176  picture size
+    { 0, 0 }      // required by Android SDK
+};
+
+const QCameraParameters::QCameraMap<cam_auto_exposure_mode_type>
+        QCameraParameters::AUTO_EXPOSURE_MAP[] = {
+    { AUTO_EXPOSURE_FRAME_AVG,           CAM_AEC_MODE_FRAME_AVERAGE },
+    { AUTO_EXPOSURE_CENTER_WEIGHTED,     CAM_AEC_MODE_CENTER_WEIGHTED },
+    { AUTO_EXPOSURE_SPOT_METERING,       CAM_AEC_MODE_SPOT_METERING },
+    { AUTO_EXPOSURE_SMART_METERING,      CAM_AEC_MODE_SMART_METERING },
+    { AUTO_EXPOSURE_USER_METERING,       CAM_AEC_MODE_USER_METERING },
+    { AUTO_EXPOSURE_SPOT_METERING_ADV,   CAM_AEC_MODE_SPOT_METERING_ADV },
+    { AUTO_EXPOSURE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
+};
+
+const QCameraParameters::QCameraMap<cam_aec_convergence_type>
+        QCameraParameters::INSTANT_AEC_MODES_MAP[] = {
+    { KEY_QC_INSTANT_AEC_DISABLE,        CAM_AEC_NORMAL_CONVERGENCE },
+    { KEY_QC_INSTANT_AEC_AGGRESSIVE_AEC, CAM_AEC_AGGRESSIVE_CONVERGENCE },
+    { KEY_QC_INSTANT_AEC_FAST_AEC,       CAM_AEC_FAST_CONVERGENCE },
+};
+
+const QCameraParameters::QCameraMap<cam_aec_convergence_type>
+        QCameraParameters::INSTANT_CAPTURE_MODES_MAP[] = {
+    { KEY_QC_INSTANT_CAPTURE_DISABLE,        CAM_AEC_NORMAL_CONVERGENCE },
+    { KEY_QC_INSTANT_CAPTURE_AGGRESSIVE_AEC, CAM_AEC_AGGRESSIVE_CONVERGENCE },
+    { KEY_QC_INSTANT_CAPTURE_FAST_AEC,       CAM_AEC_FAST_CONVERGENCE },
+};
+
+const QCameraParameters::QCameraMap<cam_format_t>
+        QCameraParameters::PREVIEW_FORMATS_MAP[] = {
+    {PIXEL_FORMAT_YUV420SP,        CAM_FORMAT_YUV_420_NV21},
+    {PIXEL_FORMAT_YUV420P,         CAM_FORMAT_YUV_420_YV12},
+    {PIXEL_FORMAT_YUV420SP_ADRENO, CAM_FORMAT_YUV_420_NV21_ADRENO},
+    {PIXEL_FORMAT_YV12,            CAM_FORMAT_YUV_420_YV12},
+    {PIXEL_FORMAT_NV12,            CAM_FORMAT_YUV_420_NV12},
+    {QC_PIXEL_FORMAT_NV12_VENUS,   CAM_FORMAT_YUV_420_NV12_VENUS}
+};
+
+const QCameraParameters::QCameraMap<cam_format_t>
+        QCameraParameters::PICTURE_TYPES_MAP[] = {
+    {PIXEL_FORMAT_JPEG,                          CAM_FORMAT_JPEG},
+    {PIXEL_FORMAT_YUV420SP,                      CAM_FORMAT_YUV_420_NV21},
+    {PIXEL_FORMAT_YUV422SP,                      CAM_FORMAT_YUV_422_NV16},
+    {QC_PIXEL_FORMAT_YUV_RAW_8BIT_YUYV,          CAM_FORMAT_YUV_RAW_8BIT_YUYV},
+    {QC_PIXEL_FORMAT_YUV_RAW_8BIT_YVYU,          CAM_FORMAT_YUV_RAW_8BIT_YVYU},
+    {QC_PIXEL_FORMAT_YUV_RAW_8BIT_UYVY,          CAM_FORMAT_YUV_RAW_8BIT_UYVY},
+    {QC_PIXEL_FORMAT_YUV_RAW_8BIT_VYUY,          CAM_FORMAT_YUV_RAW_8BIT_VYUY},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8GBRG,       CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8GRBG,       CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8RGGB,       CAM_FORMAT_BAYER_QCOM_RAW_8BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8BGGR,       CAM_FORMAT_BAYER_QCOM_RAW_8BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10GBRG,      CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10GRBG,      CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10RGGB,      CAM_FORMAT_BAYER_QCOM_RAW_10BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10BGGR,      CAM_FORMAT_BAYER_QCOM_RAW_10BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12GBRG,      CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12GRBG,      CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12RGGB,      CAM_FORMAT_BAYER_QCOM_RAW_12BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12BGGR,      CAM_FORMAT_BAYER_QCOM_RAW_12BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_14GBRG,      CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_14GRBG,      CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_14RGGB,      CAM_FORMAT_BAYER_QCOM_RAW_14BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_14BGGR,      CAM_FORMAT_BAYER_QCOM_RAW_14BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8GBRG,       CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8GRBG,       CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8RGGB,       CAM_FORMAT_BAYER_MIPI_RAW_8BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8BGGR,       CAM_FORMAT_BAYER_MIPI_RAW_8BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10GBRG,      CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10GRBG,      CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10RGGB,      CAM_FORMAT_BAYER_MIPI_RAW_10BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10BGGR,      CAM_FORMAT_BAYER_MIPI_RAW_10BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12GBRG,      CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12GRBG,      CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12RGGB,      CAM_FORMAT_BAYER_MIPI_RAW_12BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12BGGR,      CAM_FORMAT_BAYER_MIPI_RAW_12BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_14GBRG,      CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_14GRBG,      CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_14RGGB,      CAM_FORMAT_BAYER_MIPI_RAW_14BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_14BGGR,      CAM_FORMAT_BAYER_MIPI_RAW_14BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8GBRG,     CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8GRBG,     CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8RGGB,     CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8BGGR,     CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10GBRG,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10GRBG,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10RGGB,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10BGGR,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12GBRG,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12GRBG,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12RGGB,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12BGGR,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_14GBRG,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_14GRBG,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_14RGGB,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_14BGGR,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8GBRG,     CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8GRBG,     CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8RGGB,     CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8BGGR,     CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10GBRG,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10GRBG,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10RGGB,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10BGGR,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12GBRG,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12GRBG,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12RGGB,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12BGGR,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_14GBRG,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_14GRBG,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_14RGGB,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_14BGGR,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8GBRG,   CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8GRBG,   CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8RGGB,   CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8BGGR,   CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8GBRG,  CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8GRBG,  CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8RGGB,  CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8BGGR,  CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10GBRG, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10GRBG, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10RGGB, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10BGGR, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12GBRG, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12GRBG, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12RGGB, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12BGGR, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_14GBRG, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_14GRBG, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_14RGGB, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_14BGGR, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_BGGR}
+};
+
+const QCameraParameters::QCameraMap<cam_focus_mode_type>
+        QCameraParameters::FOCUS_MODES_MAP[] = {
+    { FOCUS_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
+    { FOCUS_MODE_INFINITY,           CAM_FOCUS_MODE_INFINITY },
+    { FOCUS_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
+    { FOCUS_MODE_FIXED,              CAM_FOCUS_MODE_FIXED },
+    { FOCUS_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
+    { FOCUS_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
+    { FOCUS_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO },
+    { FOCUS_MODE_MANUAL_POSITION,    CAM_FOCUS_MODE_MANUAL},
+};
+
+const QCameraParameters::QCameraMap<cam_effect_mode_type>
+        QCameraParameters::EFFECT_MODES_MAP[] = {
+    { EFFECT_NONE,       CAM_EFFECT_MODE_OFF },
+    { EFFECT_MONO,       CAM_EFFECT_MODE_MONO },
+    { EFFECT_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
+    { EFFECT_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
+    { EFFECT_SEPIA,      CAM_EFFECT_MODE_SEPIA },
+    { EFFECT_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
+    { EFFECT_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
+    { EFFECT_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
+    { EFFECT_AQUA,       CAM_EFFECT_MODE_AQUA },
+    { EFFECT_EMBOSS,     CAM_EFFECT_MODE_EMBOSS },
+    { EFFECT_SKETCH,     CAM_EFFECT_MODE_SKETCH },
+    { EFFECT_NEON,       CAM_EFFECT_MODE_NEON },
+    { EFFECT_BEAUTY,     CAM_EFFECT_MODE_BEAUTY }
+};
+
+const QCameraParameters::QCameraMap<cam_scene_mode_type>
+        QCameraParameters::SCENE_MODES_MAP[] = {
+    { SCENE_MODE_AUTO,           CAM_SCENE_MODE_OFF },
+    { SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
+    { SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
+    { SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
+    { SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
+    { SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
+    { SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
+    { SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
+    { SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
+    { SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
+    { SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
+    { SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
+    { SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
+    { SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
+    { SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
+    { SCENE_MODE_ASD,            CAM_SCENE_MODE_AUTO },
+    { SCENE_MODE_BACKLIGHT,      CAM_SCENE_MODE_BACKLIGHT },
+    { SCENE_MODE_FLOWERS,        CAM_SCENE_MODE_FLOWERS },
+    { SCENE_MODE_AR,             CAM_SCENE_MODE_AR },
+    { SCENE_MODE_HDR,            CAM_SCENE_MODE_HDR },
+};
+
+const QCameraParameters::QCameraMap<cam_flash_mode_t>
+        QCameraParameters::FLASH_MODES_MAP[] = {
+    { FLASH_MODE_OFF,   CAM_FLASH_MODE_OFF },
+    { FLASH_MODE_AUTO,  CAM_FLASH_MODE_AUTO },
+    { FLASH_MODE_ON,    CAM_FLASH_MODE_ON },
+    { FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
+};
+
+const QCameraParameters::QCameraMap<cam_focus_algorithm_type>
+         QCameraParameters::FOCUS_ALGO_MAP[] = {
+    { FOCUS_ALGO_AUTO,            CAM_FOCUS_ALGO_AUTO },
+    { FOCUS_ALGO_SPOT_METERING,   CAM_FOCUS_ALGO_SPOT },
+    { FOCUS_ALGO_CENTER_WEIGHTED, CAM_FOCUS_ALGO_CENTER_WEIGHTED },
+    { FOCUS_ALGO_FRAME_AVERAGE,   CAM_FOCUS_ALGO_AVERAGE }
+};
+
+const QCameraParameters::QCameraMap<cam_wb_mode_type>
+        QCameraParameters::WHITE_BALANCE_MODES_MAP[] = {
+    { WHITE_BALANCE_AUTO,            CAM_WB_MODE_AUTO },
+    { WHITE_BALANCE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
+    { WHITE_BALANCE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
+    { WHITE_BALANCE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
+    { WHITE_BALANCE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
+    { WHITE_BALANCE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
+    { WHITE_BALANCE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
+    { WHITE_BALANCE_SHADE,           CAM_WB_MODE_SHADE },
+    { WHITE_BALANCE_MANUAL,          CAM_WB_MODE_MANUAL},
+};
+
+const QCameraParameters::QCameraMap<cam_antibanding_mode_type>
+        QCameraParameters::ANTIBANDING_MODES_MAP[] = {
+    { ANTIBANDING_OFF,  CAM_ANTIBANDING_MODE_OFF },
+    { ANTIBANDING_50HZ, CAM_ANTIBANDING_MODE_50HZ },
+    { ANTIBANDING_60HZ, CAM_ANTIBANDING_MODE_60HZ },
+    { ANTIBANDING_AUTO, CAM_ANTIBANDING_MODE_AUTO }
+};
+
+const QCameraParameters::QCameraMap<cam_iso_mode_type>
+        QCameraParameters::ISO_MODES_MAP[] = {
+    { ISO_AUTO,  CAM_ISO_MODE_AUTO },
+    { ISO_HJR,   CAM_ISO_MODE_DEBLUR },
+    { ISO_100,   CAM_ISO_MODE_100 },
+    { ISO_200,   CAM_ISO_MODE_200 },
+    { ISO_400,   CAM_ISO_MODE_400 },
+    { ISO_800,   CAM_ISO_MODE_800 },
+    { ISO_1600,  CAM_ISO_MODE_1600 },
+    { ISO_3200,  CAM_ISO_MODE_3200 }
+};
+
+const QCameraParameters::QCameraMap<cam_hfr_mode_t>
+        QCameraParameters::HFR_MODES_MAP[] = {
+    { VIDEO_HFR_OFF, CAM_HFR_MODE_OFF },
+    { VIDEO_HFR_2X, CAM_HFR_MODE_60FPS },
+    { VIDEO_HFR_3X, CAM_HFR_MODE_90FPS },
+    { VIDEO_HFR_4X, CAM_HFR_MODE_120FPS },
+    { VIDEO_HFR_5X, CAM_HFR_MODE_150FPS },
+    { VIDEO_HFR_6X, CAM_HFR_MODE_180FPS },
+    { VIDEO_HFR_7X, CAM_HFR_MODE_210FPS },
+    { VIDEO_HFR_8X, CAM_HFR_MODE_240FPS },
+    { VIDEO_HFR_9X, CAM_HFR_MODE_480FPS }
+};
+
+const QCameraParameters::QCameraMap<cam_bracket_mode>
+        QCameraParameters::BRACKETING_MODES_MAP[] = {
+    { AE_BRACKET_OFF, CAM_EXP_BRACKETING_OFF },
+    { AE_BRACKET,     CAM_EXP_BRACKETING_ON }
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::ON_OFF_MODES_MAP[] = {
+    { VALUE_OFF, 0 },
+    { VALUE_ON,  1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::TOUCH_AF_AEC_MODES_MAP[] = {
+    { QCameraParameters::TOUCH_AF_AEC_OFF, 0 },
+    { QCameraParameters::TOUCH_AF_AEC_ON, 1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::ENABLE_DISABLE_MODES_MAP[] = {
+    { VALUE_ENABLE,  1 },
+    { VALUE_DISABLE, 0 }
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::DENOISE_ON_OFF_MODES_MAP[] = {
+    { DENOISE_OFF, 0 },
+    { DENOISE_ON,  1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::TRUE_FALSE_MODES_MAP[] = {
+    { VALUE_FALSE, 0},
+    { VALUE_TRUE,  1}
+};
+
+const QCameraParameters::QCameraMap<cam_flip_t>
+        QCameraParameters::FLIP_MODES_MAP[] = {
+    {FLIP_MODE_OFF, FLIP_NONE},
+    {FLIP_MODE_V, FLIP_V},
+    {FLIP_MODE_H, FLIP_H},
+    {FLIP_MODE_VH, FLIP_V_H}
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::AF_BRACKETING_MODES_MAP[] = {
+    { AF_BRACKET_OFF, 0 },
+    { AF_BRACKET_ON,  1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::RE_FOCUS_MODES_MAP[] = {
+    { RE_FOCUS_OFF, 0 },
+    { RE_FOCUS_ON,  1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::CHROMA_FLASH_MODES_MAP[] = {
+    { CHROMA_FLASH_OFF, 0 },
+    { CHROMA_FLASH_ON,  1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::OPTI_ZOOM_MODES_MAP[] = {
+    { OPTI_ZOOM_OFF, 0 },
+    { OPTI_ZOOM_ON,  1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::TRUE_PORTRAIT_MODES_MAP[] = {
+    { TRUE_PORTRAIT_OFF, 0 },
+    { TRUE_PORTRAIT_ON,  1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::STILL_MORE_MODES_MAP[] = {
+    { STILL_MORE_OFF, 0 },
+    { STILL_MORE_ON,  1 }
+};
+
+const QCameraParameters::QCameraMap<cam_cds_mode_type_t>
+        QCameraParameters::CDS_MODES_MAP[] = {
+    { CDS_MODE_OFF, CAM_CDS_MODE_OFF },
+    { CDS_MODE_ON, CAM_CDS_MODE_ON },
+    { CDS_MODE_AUTO, CAM_CDS_MODE_AUTO}
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::HDR_MODES_MAP[] = {
+    { HDR_MODE_SENSOR, 0 },
+    { HDR_MODE_MULTI_FRAME, 1 }
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::VIDEO_ROTATION_MODES_MAP[] = {
+    { VIDEO_ROTATION_0, 0 },
+    { VIDEO_ROTATION_90, 90 },
+    { VIDEO_ROTATION_180, 180 },
+    { VIDEO_ROTATION_270, 270 }
+};
+
+const QCameraParameters::QCameraMap<int>
+        QCameraParameters::NOISE_REDUCTION_MODES_MAP[] = {
+    { VALUE_OFF, 0 },
+    { VALUE_FAST,  1 },
+    { VALUE_HIGH_QUALITY,  2 }
+};
+
+#define DEFAULT_CAMERA_AREA "(0, 0, 0, 0, 0)"
+#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
+#define TOTAL_RAM_SIZE_512MB 536870912
+#define PARAM_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
+
+/*===========================================================================
+ * FUNCTION   : isOEMFeat1PropEnabled
+ *
+ * DESCRIPTION: inline function to check from property if custom feature
+ *            is enabled
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : boolean true/false
+ *==========================================================================*/
+static inline bool isOEMFeat1PropEnabled()
+{
+    char value[PROPERTY_VALUE_MAX];
+    property_get("persist.camera.imglib.oemfeat1", value, "0");
+    return atoi(value) > 0 ? true : false;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraParameters
+ *
+ * DESCRIPTION: default constructor of QCameraParameters
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraParameters::QCameraParameters()
+    : CameraParameters(),
+      m_reprocScaleParam(),
+      mCommon(),
+      m_pCapability(NULL),
+      m_pCamOpsTbl(NULL),
+      m_pParamHeap(NULL),
+      m_pParamBuf(NULL),
+      m_pRelCamSyncHeap(NULL),
+      m_pRelCamSyncBuf(NULL),
+      m_bFrameSyncEnabled(false),
+      mIsType(IS_TYPE_NONE),
+      mIsTypePreview(IS_TYPE_NONE),
+      m_bZslMode(false),
+      m_bZslMode_new(false),
+      m_bForceZslMode(false),
+      m_bRecordingHint(false),
+      m_bRecordingHint_new(false),
+      m_bHistogramEnabled(false),
+      m_bLongshotEnabled(false),
+      m_nFaceProcMask(0),
+      m_bFaceDetectionOn(0),
+      m_bDebugFps(false),
+      mFocusMode(CAM_FOCUS_MODE_MAX),
+      mPreviewFormat(CAM_FORMAT_YUV_420_NV21),
+      mAppPreviewFormat(CAM_FORMAT_YUV_420_NV21),
+      mPictureFormat(CAM_FORMAT_JPEG),
+      m_bNeedRestart(false),
+      m_bNoDisplayMode(false),
+      m_bWNROn(false),
+      m_bTNRPreviewOn(false),
+      m_bTNRVideoOn(false),
+      m_bTNRSnapshotOn(false),
+      m_bInited(false),
+      m_nRetroBurstNum(0),
+      m_nBurstLEDOnPeriod(100),
+      m_bUpdateEffects(false),
+      m_bSceneTransitionAuto(false),
+      m_bPreviewFlipChanged(false),
+      m_bVideoFlipChanged(false),
+      m_bSnapshotFlipChanged(false),
+      m_bFixedFrameRateSet(false),
+      m_bHDREnabled(false),
+      m_bLocalHDREnabled(false),
+      m_bAVTimerEnabled(false),
+      m_bDISEnabled(false),
+      m_MobiMask(0),
+      m_AdjustFPS(NULL),
+      m_bHDR1xFrameEnabled(false),
+      m_HDRSceneEnabled(false),
+      m_bHDRThumbnailProcessNeeded(false),
+      m_bHDR1xExtraBufferNeeded(true),
+      m_bHDROutputCropEnabled(false),
+      m_tempMap(),
+      m_bAFBracketingOn(false),
+      m_bReFocusOn(false),
+      m_bChromaFlashOn(false),
+      m_bOptiZoomOn(false),
+      m_bSceneSelection(false),
+      m_SelectedScene(CAM_SCENE_MODE_MAX),
+      m_bSeeMoreOn(false),
+      m_bStillMoreOn(false),
+      m_bHighQualityNoiseReductionMode(false),
+      m_bHfrMode(false),
+      m_bSensorHDREnabled(false),
+      m_bRdiMode(false),
+      m_bSecureMode(false),
+      m_bAeBracketingEnabled(false),
+      mFlashValue(CAM_FLASH_MODE_OFF),
+      mFlashDaemonValue(CAM_FLASH_MODE_OFF),
+      mHfrMode(CAM_HFR_MODE_OFF),
+      m_bHDRModeSensor(true),
+      mOfflineRAW(false),
+      m_bTruePortraitOn(false),
+      m_bIsLowMemoryDevice(false),
+      mCds_mode(CAM_CDS_MODE_OFF),
+      m_LLCaptureEnabled(FALSE),
+      m_LowLightLevel(CAM_LOW_LIGHT_OFF),
+      m_bLtmForSeeMoreEnabled(false),
+      m_expTime(0),
+      m_isoValue(0),
+      m_ManualCaptureMode(CAM_MANUAL_CAPTURE_TYPE_OFF),
+      m_dualLedCalibration(0),
+      m_bInstantAEC(false),
+      m_bInstantCapture(false),
+      mAecFrameBound(0),
+      mAecSkipDisplayFrameBound(0)
+{
+    char value[PROPERTY_VALUE_MAX];
+    // TODO: may move to parameter instead of sysprop
+    property_get("persist.debug.sf.showfps", value, "0");
+    m_bDebugFps = atoi(value) > 0 ? true : false;
+
+    // For thermal mode, it should be set as system property
+    // because system property applies to all applications, while
+    // parameters only apply to specific app.
+    property_get("persist.camera.thermal.mode", value, "fps");
+    if (!strcmp(value, "frameskip")) {
+        m_ThermalMode = QCAMERA_THERMAL_ADJUST_FRAMESKIP;
+    } else {
+        if (strcmp(value, "fps"))
+            LOGW("Invalid camera thermal mode %s", value);
+        m_ThermalMode = QCAMERA_THERMAL_ADJUST_FPS;
+    }
+
+    memset(value, 0, sizeof(value));
+    // As per Power/Quality evaluation, LTM is enabled by default in SeeMore/StillMore usecase
+    // to improve the quality as there is no much impact to power
+    property_get("persist.camera.ltmforseemore", value, "1");
+    m_bLtmForSeeMoreEnabled = atoi(value);
+
+    memset(&m_LiveSnapshotSize, 0, sizeof(m_LiveSnapshotSize));
+    memset(&m_default_fps_range, 0, sizeof(m_default_fps_range));
+    memset(&m_hfrFpsRange, 0, sizeof(m_hfrFpsRange));
+    memset(&m_stillmore_config, 0, sizeof(cam_still_more_t));
+    memset(&m_captureFrameConfig, 0, sizeof(cam_capture_frame_config_t));
+    memset(&m_relCamSyncInfo, 0, sizeof(cam_sync_related_sensors_event_info_t));
+    mTotalPPCount = 1;
+    mZoomLevel = 0;
+    mParmZoomLevel = 0;
+    mCurPPCount = 0;
+    mBufBatchCnt = 0;
+    mRotation = 0;
+    mJpegRotation = 0;
+    mVideoBatchSize = 0;
+    m_bOEMFeatEnabled = isOEMFeat1PropEnabled();
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraParameters
+ *
+ * DESCRIPTION: constructor of QCameraParameters
+ *
+ * PARAMETERS :
+ *   @params  : parameters in string
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraParameters::QCameraParameters(const String8 &params)
+    : CameraParameters(params),
+    m_reprocScaleParam(),
+    m_pCapability(NULL),
+    m_pCamOpsTbl(NULL),
+    m_pParamHeap(NULL),
+    m_pParamBuf(NULL),
+    m_pRelCamSyncHeap(NULL),
+    m_pRelCamSyncBuf(NULL),
+    m_bFrameSyncEnabled(false),
+    m_bZslMode(false),
+    m_bZslMode_new(false),
+    m_bForceZslMode(false),
+    m_bRecordingHint(false),
+    m_bRecordingHint_new(false),
+    m_bHistogramEnabled(false),
+    m_bLongshotEnabled(false),
+    m_nFaceProcMask(0),
+    m_bDebugFps(false),
+    mFocusMode(CAM_FOCUS_MODE_MAX),
+    mPreviewFormat(CAM_FORMAT_YUV_420_NV21),
+    mAppPreviewFormat(CAM_FORMAT_YUV_420_NV21),
+    mPictureFormat(CAM_FORMAT_JPEG),
+    m_bNeedRestart(false),
+    m_bNoDisplayMode(false),
+    m_bWNROn(false),
+    m_bTNRPreviewOn(false),
+    m_bTNRVideoOn(false),
+    m_bTNRSnapshotOn(false),
+    m_bInited(false),
+    m_nRetroBurstNum(0),
+    m_nBurstLEDOnPeriod(100),
+    m_bPreviewFlipChanged(false),
+    m_bVideoFlipChanged(false),
+    m_bSnapshotFlipChanged(false),
+    m_bFixedFrameRateSet(false),
+    m_bHDREnabled(false),
+    m_bLocalHDREnabled(false),
+    m_bAVTimerEnabled(false),
+    m_AdjustFPS(NULL),
+    m_bHDR1xFrameEnabled(false),
+    m_HDRSceneEnabled(false),
+    m_bHDRThumbnailProcessNeeded(false),
+    m_bHDR1xExtraBufferNeeded(true),
+    m_bHDROutputCropEnabled(false),
+    m_tempMap(),
+    m_bAFBracketingOn(false),
+    m_bReFocusOn(false),
+    m_bChromaFlashOn(false),
+    m_bOptiZoomOn(false),
+    m_bSceneSelection(false),
+    m_SelectedScene(CAM_SCENE_MODE_MAX),
+    m_bSeeMoreOn(false),
+    m_bStillMoreOn(false),
+    m_bHighQualityNoiseReductionMode(false),
+    m_bHfrMode(false),
+    m_bSensorHDREnabled(false),
+    m_bRdiMode(false),
+    m_bSecureMode(false),
+    m_bAeBracketingEnabled(false),
+    mFlashValue(CAM_FLASH_MODE_OFF),
+    mFlashDaemonValue(CAM_FLASH_MODE_OFF),
+    mHfrMode(CAM_HFR_MODE_OFF),
+    m_bHDRModeSensor(true),
+    mOfflineRAW(false),
+    m_bTruePortraitOn(false),
+    m_bIsLowMemoryDevice(false),
+    mCds_mode(CAM_CDS_MODE_OFF),
+    mParmEffect(CAM_EFFECT_MODE_OFF),
+    m_LLCaptureEnabled(FALSE),
+    m_LowLightLevel(CAM_LOW_LIGHT_OFF),
+    m_bLtmForSeeMoreEnabled(false),
+    m_expTime(0),
+    m_isoValue(0),
+    m_ManualCaptureMode(CAM_MANUAL_CAPTURE_TYPE_OFF),
+    m_dualLedCalibration(0),
+    m_bInstantAEC(false),
+    m_bInstantCapture(false),
+    mAecFrameBound(0),
+    mAecSkipDisplayFrameBound(0)
+{
+    memset(&m_LiveSnapshotSize, 0, sizeof(m_LiveSnapshotSize));
+    memset(&m_default_fps_range, 0, sizeof(m_default_fps_range));
+    memset(&m_hfrFpsRange, 0, sizeof(m_hfrFpsRange));
+    memset(&m_stillmore_config, 0, sizeof(cam_still_more_t));
+    memset(&m_relCamSyncInfo, 0, sizeof(cam_sync_related_sensors_event_info_t));
+    mTotalPPCount = 0;
+    mZoomLevel = 0;
+    mParmZoomLevel = 0;
+    mCurPPCount = 0;
+    mRotation = 0;
+    mJpegRotation = 0;
+    mBufBatchCnt = 0;
+    mVideoBatchSize = 0;
+    m_bOEMFeatEnabled = isOEMFeat1PropEnabled();
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraParameters
+ *
+ * DESCRIPTION: deconstructor of QCameraParameters
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraParameters::~QCameraParameters()
+{
+    deinit();
+}
+
+/*===========================================================================
+ * FUNCTION   : createSizesString
+ *
+ * DESCRIPTION: create string obj contains array of dimensions
+ *
+ * PARAMETERS :
+ *   @sizes   : array of dimensions
+ *   @len     : size of dimension array
+ *
+ * RETURN     : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createSizesString(const cam_dimension_t *sizes, size_t len)
+{
+    String8 str;
+    char buffer[32];
+
+    if (len > 0) {
+        snprintf(buffer, sizeof(buffer), "%dx%d", sizes[0].width, sizes[0].height);
+        str.append(buffer);
+    }
+    for (size_t i = 1; i < len; i++) {
+        snprintf(buffer, sizeof(buffer), ",%dx%d",
+                sizes[i].width, sizes[i].height);
+        str.append(buffer);
+    }
+    return str;
+}
+
+/*===========================================================================
+ * FUNCTION   : createValuesString
+ *
+ * DESCRIPTION: create string obj contains array of values from map when matched
+ *              from input values array
+ *
+ * PARAMETERS :
+ *   @values  : array of values
+ *   @len     : size of values array
+ *   @map     : map contains the mapping between values and enums
+ *   @map_len : size of the map
+ *
+ * RETURN     : string obj
+ *==========================================================================*/
+template <typename valuesType, class mapType> String8 createValuesString(
+        const valuesType *values, size_t len, const mapType *map, size_t map_len)
+{
+    String8 str;
+    int count = 0;
+
+    for (size_t i = 0; i < len; i++ ) {
+        for (size_t j = 0; j < map_len; j ++)
+            if (map[j].val == values[i]) {
+                if (NULL != map[j].desc) {
+                    if (count > 0) {
+                        str.append(",");
+                    }
+                    str.append(map[j].desc);
+                    count++;
+                    break; //loop j
+                }
+            }
+    }
+    return str;
+}
+
+/*===========================================================================
+ * FUNCTION   : createValuesStringFromMap
+ *
+ * DESCRIPTION: create string obj contains array of values directly from map
+ *
+ * PARAMETERS :
+ *   @map     : map contains the mapping between values and enums
+ *   @map_len : size of the map
+ *
+ * RETURN     : string obj
+ *==========================================================================*/
+template <class mapType> String8 createValuesStringFromMap(
+        const mapType *map, size_t map_len)
+{
+    String8 str;
+
+    for (size_t i = 0; i < map_len; i++) {
+        if (NULL != map[i].desc) {
+            if (i > 0) {
+                str.append(",");
+            }
+            str.append(map[i].desc);
+        }
+    }
+    return str;
+}
+
+/*===========================================================================
+ * FUNCTION   : createZoomRatioValuesString
+ *
+ * DESCRIPTION: create string obj contains array of zoom ratio values
+ *
+ * PARAMETERS :
+ *   @zoomRaios  : array of zoom ratios
+ *   @length     : size of the array
+ *
+ * RETURN     : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createZoomRatioValuesString(uint32_t *zoomRatios,
+        size_t length)
+{
+    String8 str;
+    char buffer[32] = {0};
+
+    if(length > 0){
+        snprintf(buffer, sizeof(buffer), "%d", zoomRatios[0]);
+        str.append(buffer);
+    }
+
+    for (size_t i = 1; i < length; i++) {
+        memset(buffer, 0, sizeof(buffer));
+        snprintf(buffer, sizeof(buffer), ",%d", zoomRatios[i]);
+        str.append(buffer);
+    }
+    return str;
+}
+
+/*===========================================================================
+ * FUNCTION   : createHfrValuesString
+ *
+ * DESCRIPTION: create string obj contains array of hfr values from map when
+ *              matched from input hfr values
+ *
+ * PARAMETERS :
+ *   @values  : array of hfr info
+ *   @len     : size of the array
+ *   @map     : map of hfr string value and enum
+ *   map_len  : size of map
+ *
+ * RETURN     : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createHfrValuesString(const cam_hfr_info_t *values,
+        size_t len, const QCameraMap<cam_hfr_mode_t> *map, size_t map_len)
+{
+    String8 str;
+    int count = 0;
+
+    //Create HFR supported size string.
+    for (size_t i = 0; i < len; i++ ) {
+        for (size_t j = 0; j < map_len; j ++) {
+            if (map[j].val == (int)values[i].mode) {
+                if (NULL != map[j].desc) {
+                    if (count > 0) {
+                        str.append(",");
+                    }
+                     str.append(map[j].desc);
+                     count++;
+                     break; //loop j
+                }
+            }
+        }
+    }
+    if (count > 0) {
+        str.append(",");
+    }
+    str.append(VIDEO_HFR_OFF);
+    return str;
+}
+
+/*===========================================================================
+ * FUNCTION   : createHfrSizesString
+ *
+ * DESCRIPTION: create string obj contains array of hfr sizes
+ *
+ * PARAMETERS :
+ *   @values  : array of hfr info
+ *   @len     : size of the array
+ *
+ * RETURN     : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createHfrSizesString(const cam_hfr_info_t *values, size_t len)
+{
+    String8 str;
+    char buffer[32];
+
+    if (len > 0) {
+        snprintf(buffer, sizeof(buffer), "%dx%d",
+                 values[0].dim[0].width, values[0].dim[0].height);
+        str.append(buffer);
+    }
+    for (size_t i = 1; i < len; i++) {
+        snprintf(buffer, sizeof(buffer), ",%dx%d",
+                 values[i].dim[0].width, values[i].dim[0].height);
+        str.append(buffer);
+    }
+    return str;
+}
+
+/*===========================================================================
+ * FUNCTION   : createFpsString
+ *
+ * DESCRIPTION: create string obj contains array of FPS rates
+ *
+ * PARAMETERS :
+ *   @fps     : default fps range
+ *
+ * RETURN     : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createFpsString(cam_fps_range_t &fps)
+{
+    char buffer[32];
+    String8 fpsValues;
+
+    int min_fps = int(fps.min_fps);
+    int max_fps = int(fps.max_fps);
+
+    if (min_fps < fps.min_fps){
+        min_fps++;
+    }
+    if (max_fps > fps.max_fps) {
+        max_fps--;
+    }
+    if (min_fps <= max_fps) {
+        snprintf(buffer, sizeof(buffer), "%d", min_fps);
+        fpsValues.append(buffer);
+    }
+
+    for (int i = min_fps+1; i <= max_fps; i++) {
+        snprintf(buffer, sizeof(buffer), ",%d", i);
+        fpsValues.append(buffer);
+    }
+
+    return fpsValues;
+}
+
+/*===========================================================================
+ * FUNCTION   : createFpsRangeString
+ *
+ * DESCRIPTION: create string obj contains array of FPS ranges
+ *
+ * PARAMETERS :
+ *   @fps     : array of fps ranges
+ *   @len     : size of the array
+ *   @default_fps_index : reference to index of default fps range
+ *
+ * RETURN     : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createFpsRangeString(const cam_fps_range_t* fps,
+        size_t len, int &default_fps_index)
+{
+    String8 str;
+    char buffer[32];
+    int max_range = 0;
+    int min_fps, max_fps;
+
+    if (len > 0) {
+        min_fps = int(fps[0].min_fps * 1000);
+        max_fps = int(fps[0].max_fps * 1000);
+        max_range = max_fps - min_fps;
+        default_fps_index = 0;
+        snprintf(buffer, sizeof(buffer), "(%d,%d)", min_fps, max_fps);
+        str.append(buffer);
+    }
+    for (size_t i = 1; i < len; i++) {
+        min_fps = int(fps[i].min_fps * 1000);
+        max_fps = int(fps[i].max_fps * 1000);
+        if (max_range < (max_fps - min_fps)) {
+            max_range = max_fps - min_fps;
+            default_fps_index = (int)i;
+        }
+        snprintf(buffer, sizeof(buffer), ",(%d,%d)", min_fps, max_fps);
+        str.append(buffer);
+    }
+    return str;
+}
+
+/*===========================================================================
+ * FUNCTION   : lookupAttr
+ *
+ * DESCRIPTION: lookup a value by its name
+ *
+ * PARAMETERS :
+ *   @attr    : map contains <name, value>
+ *   @len     : size of the map
+ *   @name    : name to be looked up
+ *
+ * RETURN     : valid value if found
+ *              NAME_NOT_FOUND if not found
+ *==========================================================================*/
+template <class mapType> int lookupAttr(const mapType *arr,
+        size_t len, const char *name)
+{
+    if (name) {
+        for (size_t i = 0; i < len; i++) {
+            if (!strcmp(arr[i].desc, name))
+                return arr[i].val;
+        }
+    }
+    return NAME_NOT_FOUND;
+}
+
+/*===========================================================================
+ * FUNCTION   : lookupNameByValue
+ *
+ * DESCRIPTION: lookup a name by its value
+ *
+ * PARAMETERS :
+ *   @attr    : map contains <name, value>
+ *   @len     : size of the map
+ *   @value   : value to be looked up
+ *
+ * RETURN     : name str or NULL if not found
+ *==========================================================================*/
+template <class mapType> const char *lookupNameByValue(const mapType *arr,
+        size_t len, int value)
+{
+    for (size_t i = 0; i < len; i++) {
+        if (arr[i].val == value) {
+            return arr[i].desc;
+        }
+    }
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPreviewSize
+ *
+ * DESCRIPTION: set preview size from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPreviewSize(const QCameraParameters& params)
+{
+    int width = 0, height = 0;
+    int old_width = 0, old_height = 0;
+    params.getPreviewSize(&width, &height);
+    CameraParameters::getPreviewSize(&old_width, &old_height);
+
+    // Validate the preview size
+    for (size_t i = 0; i < m_pCapability->preview_sizes_tbl_cnt; ++i) {
+        if (width ==  m_pCapability->preview_sizes_tbl[i].width
+           && height ==  m_pCapability->preview_sizes_tbl[i].height) {
+            // check if need to restart preview in case of preview size change
+            if (width != old_width || height != old_height) {
+                LOGI("Requested preview size %d x %d", width, height);
+                m_bNeedRestart = true;
+            }
+            // set the new value
+            CameraParameters::setPreviewSize(width, height);
+            return NO_ERROR;
+        }
+    }
+    if (m_relCamSyncInfo.mode == CAM_MODE_SECONDARY) {
+        char prop[PROPERTY_VALUE_MAX];
+        // set prop to configure aux preview size
+        property_get("persist.camera.aux.preview.size", prop, "0");
+        parse_pair(prop, &width, &height, 'x', NULL);
+        bool foundMatch = false;
+        for (size_t i = 0; i < m_pCapability->preview_sizes_tbl_cnt; ++i) {
+            if (width ==  m_pCapability->preview_sizes_tbl[i].width &&
+                    height ==  m_pCapability->preview_sizes_tbl[i].height) {
+               foundMatch = true;
+            }
+        }
+        if (!foundMatch) {
+            width = m_pCapability->preview_sizes_tbl[0].width;
+            height = m_pCapability->preview_sizes_tbl[0].height;
+        }
+        // check if need to restart preview in case of preview size change
+        if (width != old_width || height != old_height) {
+            m_bNeedRestart = true;
+        }
+        CameraParameters::setPreviewSize(width, height);
+        LOGH("Secondary Camera: preview size %d x %d", width, height);
+        return NO_ERROR;
+    }
+
+    LOGE("Invalid preview size requested: %dx%d", width, height);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPictureSize
+ *
+ * DESCRIPTION: set picture size from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPictureSize(const QCameraParameters& params)
+{
+    int width, height;
+    params.getPictureSize(&width, &height);
+    int old_width, old_height;
+    CameraParameters::getPictureSize(&old_width, &old_height);
+
+    // Validate the picture size
+    if(!m_reprocScaleParam.isScaleEnabled()){
+        for (size_t i = 0; i < m_pCapability->picture_sizes_tbl_cnt; ++i) {
+            if (width ==  m_pCapability->picture_sizes_tbl[i].width
+               && height ==  m_pCapability->picture_sizes_tbl[i].height) {
+                // check if need to restart preview in case of picture size change
+                if ((m_bZslMode || m_bRecordingHint) &&
+                    (width != old_width || height != old_height)) {
+                    LOGI("Requested picture size %d x %d", width, height);
+                    m_bNeedRestart = true;
+                }
+                // set the new value
+                CameraParameters::setPictureSize(width, height);
+                // Update View angles based on Picture Aspect ratio
+                updateViewAngles();
+                return NO_ERROR;
+            }
+        }
+    }else{
+        //should use scaled picture size table to validate
+        if(m_reprocScaleParam.setValidatePicSize(width, height) == NO_ERROR){
+            // check if need to restart preview in case of picture size change
+            if ((m_bZslMode || m_bRecordingHint) &&
+                (width != old_width || height != old_height)) {
+                m_bNeedRestart = true;
+            }
+            // set the new value
+            char val[32];
+            snprintf(val, sizeof(val), "%dx%d", width, height);
+            updateParamEntry(KEY_PICTURE_SIZE, val);
+            LOGH("%s", val);
+            // Update View angles based on Picture Aspect ratio
+            updateViewAngles();
+            return NO_ERROR;
+        }
+    }
+    if (m_relCamSyncInfo.mode == CAM_MODE_SECONDARY) {
+        char prop[PROPERTY_VALUE_MAX];
+        // set prop to configure aux preview size
+        property_get("persist.camera.aux.picture.size", prop, "0");
+        parse_pair(prop, &width, &height, 'x', NULL);
+        bool foundMatch = false;
+        for (size_t i = 0; i < m_pCapability->picture_sizes_tbl_cnt; ++i) {
+            if (width ==  m_pCapability->picture_sizes_tbl[i].width &&
+                    height ==  m_pCapability->picture_sizes_tbl[i].height) {
+               foundMatch = true;
+            }
+        }
+        if (!foundMatch) {
+            width = m_pCapability->picture_sizes_tbl[0].width;
+            height = m_pCapability->picture_sizes_tbl[0].height;
+        }
+        // check if need to restart preview in case of preview size change
+        if (width != old_width || height != old_height) {
+            m_bNeedRestart = true;
+        }
+        char val[32];
+        snprintf(val, sizeof(val), "%dx%d", width, height);
+        set(KEY_PICTURE_SIZE, val);
+        LOGH("Secondary Camera: picture size %s", val);
+        return NO_ERROR;
+    }
+    LOGE("Invalid picture size requested: %dx%d", width, height);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : updateViewAngles
+ *
+ * DESCRIPTION: Update the Horizontal & Vertical based on the Aspect ratio of Preview and
+ *                        Picture aspect ratio
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::updateViewAngles()
+{
+    double stillAspectRatio, maxPictureAspectRatio;
+    int stillWidth, stillHeight, maxWidth, maxHeight;
+    // The crop factors from the full sensor array to the still picture crop region
+    double horizCropFactor = 1.f,vertCropFactor = 1.f;
+    float horizViewAngle, vertViewAngle, maxHfov, maxVfov;
+
+    // Get current Picture & max Snapshot sizes
+    getPictureSize(&stillWidth, &stillHeight);
+    maxWidth  = m_pCapability->picture_sizes_tbl[0].width;
+    maxHeight = m_pCapability->picture_sizes_tbl[0].height;
+
+    // Get default maximum FOV from corresponding sensor driver
+    maxHfov = m_pCapability->hor_view_angle;
+    maxVfov = m_pCapability->ver_view_angle;
+
+    stillAspectRatio = (double)stillWidth/stillHeight;
+    maxPictureAspectRatio = (double)maxWidth/maxHeight;
+    LOGD("Stillwidth: %d, height: %d", stillWidth, stillHeight);
+    LOGD("Max width: %d, height: %d", maxWidth, maxHeight);
+    LOGD("still aspect: %f, Max Pic Aspect: %f",
+            stillAspectRatio, maxPictureAspectRatio);
+
+    // crop as per the Maximum Snapshot aspect ratio
+    if (stillAspectRatio < maxPictureAspectRatio)
+        horizCropFactor = stillAspectRatio/maxPictureAspectRatio;
+    else
+        vertCropFactor = maxPictureAspectRatio/stillAspectRatio;
+
+    LOGD("horizCropFactor %f, vertCropFactor %f",
+             horizCropFactor, vertCropFactor);
+
+    // Now derive the final FOV's based on field of view formula is i.e,
+    // angle of view = 2 * arctangent ( d / 2f )
+    // where d is the physical sensor dimension of interest, and f is
+    // the focal length. This only applies to rectilinear sensors, for focusing
+    // at distances >> f, etc.
+    // Here d/2f is nothing but the Maximum Horizontal or Veritical FOV
+    horizViewAngle = (180/PI)*2*atan(horizCropFactor*tan((maxHfov/2)*(PI/180)));
+    vertViewAngle = (180/PI)*2*atan(horizCropFactor*tan((maxVfov/2)*(PI/180)));
+
+    setFloat(QCameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, horizViewAngle);
+    setFloat(QCameraParameters::KEY_VERTICAL_VIEW_ANGLE, vertViewAngle);
+    LOGH("Final horizViewAngle %f, vertViewAngle %f",
+            horizViewAngle, vertViewAngle);
+}
+
+/*===========================================================================
+ * FUNCTION   : setVideoSize
+ *
+ * DESCRIPTION: set video size from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setVideoSize(const QCameraParameters& params)
+{
+    const char *str= NULL;
+    int width, height;
+    str = params.get(KEY_VIDEO_SIZE);
+    int old_width, old_height;
+    CameraParameters::getVideoSize(&old_width, &old_height);
+    if(!str) {
+        //If application didn't set this parameter string, use the values from
+        //getPreviewSize() as video dimensions.
+        params.getPreviewSize(&width, &height);
+        LOGW("No Record Size requested, use the preview dimensions");
+    } else {
+        params.getVideoSize(&width, &height);
+    }
+
+    // Validate the video size
+    for (size_t i = 0; i < m_pCapability->video_sizes_tbl_cnt; ++i) {
+        if (width ==  m_pCapability->video_sizes_tbl[i].width
+                && height ==  m_pCapability->video_sizes_tbl[i].height) {
+            // check if need to restart preview in case of video size change
+            if (m_bRecordingHint &&
+               (width != old_width || height != old_height)) {
+                m_bNeedRestart = true;
+            }
+
+            // set the new value
+            LOGH("Requested video size %d x %d", width, height);
+            CameraParameters::setVideoSize(width, height);
+            return NO_ERROR;
+        }
+    }
+    if (m_relCamSyncInfo.mode == CAM_MODE_SECONDARY) {
+        // Set the default preview size for secondary camera
+        width = m_pCapability->video_sizes_tbl[0].width;
+        height = m_pCapability->video_sizes_tbl[0].height;
+        // check if need to restart preview in case of preview size change
+        if (width != old_width || height != old_height) {
+            m_bNeedRestart = true;
+        }
+
+        CameraParameters::setVideoSize(width, height);
+        LOGH("Secondary Camera: video size %d x %d",
+                 width, height);
+        return NO_ERROR;
+    }
+
+    LOGE("Error !! Invalid video size requested: %dx%d", width, height);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : getLiveSnapshotSize
+ *
+ * DESCRIPTION: get live snapshot size
+ *
+ * PARAMETERS : dim - Update dim with the liveshot size
+ *
+ *==========================================================================*/
+void QCameraParameters::getLiveSnapshotSize(cam_dimension_t &dim)
+{
+    if(is4k2kVideoResolution()) {
+        // We support maximum 8M liveshot @4K2K video resolution
+        cam_dimension_t resolution = {0, 0};
+        CameraParameters::getVideoSize(&resolution.width, &resolution.height);
+        if((m_LiveSnapshotSize.width > resolution.width) ||
+                (m_LiveSnapshotSize.height > resolution.height)) {
+            m_LiveSnapshotSize.width = resolution.width;
+            m_LiveSnapshotSize.height = resolution.height;
+        }
+    }
+    dim = m_LiveSnapshotSize;
+    LOGH("w x h: %d x %d", dim.width, dim.height);
+}
+
+/*===========================================================================
+ * FUNCTION   : setLiveSnapshotSize
+ *
+ * DESCRIPTION: set live snapshot size
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setLiveSnapshotSize(const QCameraParameters& params)
+{
+    char value[PROPERTY_VALUE_MAX];
+    property_get("persist.camera.opt.livepic", value, "1");
+    bool useOptimal = atoi(value) > 0 ? true : false;
+    bool vHdrOn;
+    int32_t liveSnapWidth = 0, liveSnapHeight = 0;
+    // use picture size from user setting
+    params.getPictureSize(&m_LiveSnapshotSize.width, &m_LiveSnapshotSize.height);
+
+    size_t livesnapshot_sizes_tbl_cnt =
+            m_pCapability->livesnapshot_sizes_tbl_cnt;
+    cam_dimension_t *livesnapshot_sizes_tbl =
+            &m_pCapability->livesnapshot_sizes_tbl[0];
+
+    if(is4k2kVideoResolution()) {
+        // We support maximum 8M liveshot @4K2K video resolution
+        cam_dimension_t resolution = {0, 0};
+        CameraParameters::getVideoSize(&resolution.width, &resolution.height);
+        if((m_LiveSnapshotSize.width > resolution.width) ||
+                (m_LiveSnapshotSize.height > resolution.height)) {
+            m_LiveSnapshotSize.width = resolution.width;
+            m_LiveSnapshotSize.height = resolution.height;
+        }
+    }
+
+    // check if HFR is enabled
+    const char *hfrStr = params.get(KEY_QC_VIDEO_HIGH_FRAME_RATE);
+    cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
+    const char *hsrStr = params.get(KEY_QC_VIDEO_HIGH_SPEED_RECORDING);
+
+    const char *vhdrStr = params.get(KEY_QC_VIDEO_HDR);
+    vHdrOn = (vhdrStr != NULL && (0 == strcmp(vhdrStr,"on"))) ? true : false;
+    if (vHdrOn) {
+        livesnapshot_sizes_tbl_cnt = m_pCapability->vhdr_livesnapshot_sizes_tbl_cnt;
+        livesnapshot_sizes_tbl = &m_pCapability->vhdr_livesnapshot_sizes_tbl[0];
+    }
+    if ((hsrStr != NULL) && strcmp(hsrStr, "off")) {
+        int32_t hsr = lookupAttr(HFR_MODES_MAP, PARAM_MAP_SIZE(HFR_MODES_MAP), hsrStr);
+        if ((hsr != NAME_NOT_FOUND) && (hsr > CAM_HFR_MODE_OFF)) {
+            // if HSR is enabled, change live snapshot size
+            for (size_t i = 0; i < m_pCapability->hfr_tbl_cnt; i++) {
+                if (m_pCapability->hfr_tbl[i].mode == hsr) {
+                    livesnapshot_sizes_tbl_cnt =
+                            m_pCapability->hfr_tbl[i].livesnapshot_sizes_tbl_cnt;
+                    livesnapshot_sizes_tbl =
+                            &m_pCapability->hfr_tbl[i].livesnapshot_sizes_tbl[0];
+                    hfrMode = m_pCapability->hfr_tbl[i].mode;
+                    break;
+                }
+            }
+        }
+    } else if ((hfrStr != NULL) && strcmp(hfrStr, "off")) {
+        int32_t hfr = lookupAttr(HFR_MODES_MAP, PARAM_MAP_SIZE(HFR_MODES_MAP), hfrStr);
+        if ((hfr != NAME_NOT_FOUND) && (hfr > CAM_HFR_MODE_OFF)) {
+            // if HFR is enabled, change live snapshot size
+            for (size_t i = 0; i < m_pCapability->hfr_tbl_cnt; i++) {
+                if (m_pCapability->hfr_tbl[i].mode == hfr) {
+                    livesnapshot_sizes_tbl_cnt =
+                            m_pCapability->hfr_tbl[i].livesnapshot_sizes_tbl_cnt;
+                    livesnapshot_sizes_tbl =
+                            &m_pCapability->hfr_tbl[i].livesnapshot_sizes_tbl[0];
+                    hfrMode = m_pCapability->hfr_tbl[i].mode;
+                    break;
+                }
+            }
+        }
+    }
+
+    if (useOptimal || hfrMode != CAM_HFR_MODE_OFF || vHdrOn) {
+        bool found = false;
+
+        // first check if picture size is within the list of supported sizes
+        for (size_t i = 0; i < livesnapshot_sizes_tbl_cnt; ++i) {
+            if (m_LiveSnapshotSize.width == livesnapshot_sizes_tbl[i].width &&
+                m_LiveSnapshotSize.height == livesnapshot_sizes_tbl[i].height) {
+                found = true;
+                break;
+            }
+        }
+
+        if (!found) {
+            // use optimal live snapshot size from supported list,
+            // that has same preview aspect ratio
+            int width = 0, height = 0;
+            params.getPreviewSize(&width, &height);
+
+            double previewAspectRatio = (double)width / height;
+            for (size_t i = 0; i < livesnapshot_sizes_tbl_cnt; ++i) {
+                double ratio = (double)livesnapshot_sizes_tbl[i].width /
+                                livesnapshot_sizes_tbl[i].height;
+                if (fabs(previewAspectRatio - ratio) <= ASPECT_TOLERANCE) {
+                    m_LiveSnapshotSize = livesnapshot_sizes_tbl[i];
+                    found = true;
+                    break;
+                }
+            }
+
+            if (!found && ((hfrMode != CAM_HFR_MODE_OFF) || vHdrOn)) {
+                // Cannot find matching aspect ration from supported live snapshot list
+                // choose the max dim from preview and video size
+                LOGD("Cannot find matching aspect ratio, choose max of preview or video size");
+                params.getVideoSize(&m_LiveSnapshotSize.width, &m_LiveSnapshotSize.height);
+                if (m_LiveSnapshotSize.width < width && m_LiveSnapshotSize.height < height) {
+                    m_LiveSnapshotSize.width = width;
+                    m_LiveSnapshotSize.height = height;
+                }
+            }
+        }
+    }
+    //To read liveshot resolution from setprop instead of matching aspect ratio.
+    //The setprop resolution format should be WxH.
+    //e.g: adb shell setprop persist.camera.liveshot.size 1280x720
+    memset(value, 0, PROPERTY_VALUE_MAX);
+    property_get("persist.camera.liveshot.size", value, "");
+    if (strlen(value) > 0) {
+        char *saveptr = NULL;
+        char *token = strtok_r(value, "x", &saveptr);
+        if (token != NULL) {
+            liveSnapWidth = atoi(token);
+        }
+        token = strtok_r(NULL, "x", &saveptr);
+        if (token != NULL) {
+            liveSnapHeight = atoi(token);
+        }
+        if ((liveSnapWidth!=0) && (liveSnapHeight!=0)) {
+            for (size_t i = 0; i < m_pCapability->picture_sizes_tbl_cnt; ++i) {
+                if (liveSnapWidth ==  m_pCapability->picture_sizes_tbl[i].width
+                        && liveSnapHeight ==  m_pCapability->picture_sizes_tbl[i].height) {
+                   m_LiveSnapshotSize.width = liveSnapWidth;
+                   m_LiveSnapshotSize.height = liveSnapHeight;
+                   break;
+                }
+            }
+        }
+    }
+    LOGH("live snapshot size %d x %d",
+          m_LiveSnapshotSize.width, m_LiveSnapshotSize.height);
+
+    return NO_ERROR;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : setRawSize
+ *
+ * DESCRIPTION: set live snapshot size
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRawSize(cam_dimension_t &dim)
+{
+    m_rawSize = dim;
+    return NO_ERROR;
+}
+/*===========================================================================
+ * FUNCTION   : setPreviewFormat
+ *
+ * DESCRIPTION: set preview format from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPreviewFormat(const QCameraParameters& params)
+{
+    const char *str = params.getPreviewFormat();
+    int32_t previewFormat = lookupAttr(PREVIEW_FORMATS_MAP,
+            PARAM_MAP_SIZE(PREVIEW_FORMATS_MAP), str);
+    if (previewFormat != NAME_NOT_FOUND) {
+        if (isUBWCEnabled()) {
+            char prop[PROPERTY_VALUE_MAX];
+            int pFormat;
+            memset(prop, 0, sizeof(prop));
+            property_get("persist.camera.preview.ubwc", prop, "1");
+
+            pFormat = atoi(prop);
+            if (pFormat == 1) {
+                mPreviewFormat = CAM_FORMAT_YUV_420_NV12_UBWC;
+                mAppPreviewFormat = (cam_format_t)previewFormat;
+            } else {
+                mPreviewFormat = (cam_format_t)previewFormat;
+                mAppPreviewFormat = (cam_format_t)previewFormat;
+            }
+        } else {
+            mPreviewFormat = (cam_format_t)previewFormat;
+            mAppPreviewFormat = (cam_format_t)previewFormat;
+        }
+        CameraParameters::setPreviewFormat(str);
+        LOGH("format %d\n", mPreviewFormat);
+        return NO_ERROR;
+    }
+    LOGE("Invalid preview format value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPictureFormat
+ *
+ * DESCRIPTION: set picture format from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPictureFormat(const QCameraParameters& params)
+{
+    const char *str = params.getPictureFormat();
+    int32_t pictureFormat = lookupAttr(PICTURE_TYPES_MAP, PARAM_MAP_SIZE(PICTURE_TYPES_MAP), str);
+    if (pictureFormat != NAME_NOT_FOUND) {
+        mPictureFormat = pictureFormat;
+
+        CameraParameters::setPictureFormat(str);
+        LOGH("format %d\n", mPictureFormat);
+        return NO_ERROR;
+    }
+    LOGE("Invalid picture format value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setJpegThumbnailSize
+ *
+ * DESCRIPTION: set jpeg thumbnail size from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setJpegThumbnailSize(const QCameraParameters& params)
+{
+    int width = params.getInt(KEY_JPEG_THUMBNAIL_WIDTH);
+    int height = params.getInt(KEY_JPEG_THUMBNAIL_HEIGHT);
+
+    LOGD("requested jpeg thumbnail size %d x %d", width, height);
+    int sizes_cnt = sizeof(THUMBNAIL_SIZES_MAP) / sizeof(cam_dimension_t);
+    // Validate thumbnail size
+    for (int i = 0; i < sizes_cnt; i++) {
+        if (width == THUMBNAIL_SIZES_MAP[i].width &&
+                height == THUMBNAIL_SIZES_MAP[i].height) {
+           set(KEY_JPEG_THUMBNAIL_WIDTH, width);
+           set(KEY_JPEG_THUMBNAIL_HEIGHT, height);
+           return NO_ERROR;
+        }
+    }
+    LOGE("error: setting jpeg thumbnail size (%d, %d)", width, height);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+
+ * FUNCTION   : setBurstLEDOnPeriod
+ *
+ * DESCRIPTION: set burst LED on period
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setBurstLEDOnPeriod(const QCameraParameters& params)
+{
+    int nBurstLEDOnPeriod = params.getInt(KEY_QC_SNAPSHOT_BURST_LED_ON_PERIOD);
+    //Check if the LED ON period is within limits
+    if ((nBurstLEDOnPeriod <= 0) || (nBurstLEDOnPeriod > 800)) {
+        // if burst led on period is not set in parameters,
+        // read from sys prop
+        char prop[PROPERTY_VALUE_MAX];
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.camera.led.on.period", prop, "0");
+        nBurstLEDOnPeriod = atoi(prop);
+        if (nBurstLEDOnPeriod <= 0) {
+            nBurstLEDOnPeriod = 300;
+        }
+    }
+
+    set(KEY_QC_SNAPSHOT_BURST_LED_ON_PERIOD, nBurstLEDOnPeriod);
+    m_nBurstLEDOnPeriod = nBurstLEDOnPeriod;
+    LOGH("Burst LED on period  %u", m_nBurstLEDOnPeriod);
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_BURST_LED_ON_PERIOD,
+            (uint32_t)nBurstLEDOnPeriod)) {
+        return BAD_VALUE;
+    }
+
+    return NO_ERROR;
+}
+
+
+
+/*===========================================================================
+ * FUNCTION   : setRetroActiveBurstNum
+ *
+ * DESCRIPTION: set retro active burst num
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRetroActiveBurstNum(
+        const QCameraParameters& params)
+{
+    int32_t nBurstNum = params.getInt(KEY_QC_NUM_RETRO_BURST_PER_SHUTTER);
+    LOGH("m_nRetroBurstNum = %d", m_nRetroBurstNum);
+    if (nBurstNum <= 0) {
+        // if burst number is not set in parameters,
+        // read from sys prop
+        char prop[PROPERTY_VALUE_MAX];
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.camera.retro.number", prop, "0");
+        nBurstNum = atoi(prop);
+        if (nBurstNum < 0) {
+            nBurstNum = 0;
+        }
+    }
+
+    set(KEY_QC_NUM_RETRO_BURST_PER_SHUTTER, nBurstNum);
+
+    m_nRetroBurstNum = nBurstNum;
+    LOGH("m_nRetroBurstNum = %d", m_nRetroBurstNum);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setJpegQuality
+ *
+ * DESCRIPTION: set jpeg encpding quality from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setJpegQuality(const QCameraParameters& params)
+{
+    int32_t rc = NO_ERROR;
+    int quality = params.getInt(KEY_JPEG_QUALITY);
+    if (quality >= 0 && quality <= 100) {
+        set(KEY_JPEG_QUALITY, quality);
+    } else {
+        LOGE("Invalid jpeg quality=%d", quality);
+        rc = BAD_VALUE;
+    }
+
+    quality = params.getInt(KEY_JPEG_THUMBNAIL_QUALITY);
+    if (quality >= 0 && quality <= 100) {
+        set(KEY_JPEG_THUMBNAIL_QUALITY, quality);
+    } else {
+        LOGE("Invalid jpeg thumbnail quality=%d", quality);
+        rc = BAD_VALUE;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setOrientaion
+ *
+ * DESCRIPTION: set orientaion from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setOrientation(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_ORIENTATION);
+
+    if (str != NULL) {
+        if (strcmp(str, portrait) == 0 || strcmp(str, landscape) == 0) {
+            // Camera service needs this to decide if the preview frames and raw
+            // pictures should be rotated.
+            set(KEY_QC_ORIENTATION, str);
+        } else {
+            LOGE("Invalid orientation value: %s", str);
+            return BAD_VALUE;
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAutoExposure
+ *
+ * DESCRIPTION: set auto exposure value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAutoExposure(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_AUTO_EXPOSURE);
+    const char *prev_str = get(KEY_QC_AUTO_EXPOSURE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setAutoExposure(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPreviewFpsRange
+ *
+ * DESCRIPTION: set preview FPS range from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPreviewFpsRange(const QCameraParameters& params)
+{
+    int minFps,maxFps;
+    int prevMinFps, prevMaxFps, vidMinFps, vidMaxFps;
+    int rc = NO_ERROR;
+    bool found = false, updateNeeded = false;
+
+    CameraParameters::getPreviewFpsRange(&prevMinFps, &prevMaxFps);
+    params.getPreviewFpsRange(&minFps, &maxFps);
+
+    LOGH("FpsRange Values:(%d, %d)", prevMinFps, prevMaxFps);
+    LOGH("Requested FpsRange Values:(%d, %d)", minFps, maxFps);
+
+    //first check if we need to change fps because of HFR mode change
+    updateNeeded = UpdateHFRFrameRate(params);
+    if (updateNeeded) {
+        m_bNeedRestart = true;
+        rc = setHighFrameRate(mHfrMode);
+        if (rc != NO_ERROR) goto end;
+    }
+    LOGH("UpdateHFRFrameRate %d", updateNeeded);
+
+    vidMinFps = (int)m_hfrFpsRange.video_min_fps;
+    vidMaxFps = (int)m_hfrFpsRange.video_max_fps;
+
+    if(minFps == prevMinFps && maxFps == prevMaxFps) {
+        if ( m_bFixedFrameRateSet ) {
+            minFps = params.getPreviewFrameRate() * 1000;
+            maxFps = params.getPreviewFrameRate() * 1000;
+            m_bFixedFrameRateSet = false;
+        } else if (!updateNeeded) {
+            LOGH("No change in FpsRange");
+            rc = NO_ERROR;
+            goto end;
+        }
+    }
+    for(size_t i = 0; i < m_pCapability->fps_ranges_tbl_cnt; i++) {
+        // if the value is in the supported list
+        if (minFps >= m_pCapability->fps_ranges_tbl[i].min_fps * 1000 &&
+                maxFps <= m_pCapability->fps_ranges_tbl[i].max_fps * 1000) {
+            found = true;
+            LOGH("FPS i=%d : minFps = %d, maxFps = %d"
+                    " vidMinFps = %d, vidMaxFps = %d",
+                     i, minFps, maxFps,
+                    (int)m_hfrFpsRange.video_min_fps,
+                    (int)m_hfrFpsRange.video_max_fps);
+            if ((0.0f >= m_hfrFpsRange.video_min_fps) ||
+                    (0.0f >= m_hfrFpsRange.video_max_fps)) {
+                vidMinFps = minFps;
+                vidMaxFps = maxFps;
+            }
+            else {
+                vidMinFps = (int)m_hfrFpsRange.video_min_fps;
+                vidMaxFps = (int)m_hfrFpsRange.video_max_fps;
+            }
+
+            setPreviewFpsRange(minFps, maxFps, vidMinFps, vidMaxFps);
+            break;
+        }
+    }
+    if(found == false){
+        LOGE("error: FPS range value not supported");
+        rc = BAD_VALUE;
+    }
+end:
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : UpdateHFRFrameRate
+ *
+ * DESCRIPTION: set preview FPS range based on HFR setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : bool true/false
+ *                  true -if HAL needs to overwrite FPS range set by app, false otherwise.
+ *==========================================================================*/
+
+bool QCameraParameters::UpdateHFRFrameRate(const QCameraParameters& params)
+{
+    bool updateNeeded = false;
+    int min_fps, max_fps;
+    int32_t hfrMode = CAM_HFR_MODE_OFF;
+    int32_t newHfrMode = CAM_HFR_MODE_OFF;
+
+    int parm_minfps,parm_maxfps;
+    int prevMinFps, prevMaxFps;
+    CameraParameters::getPreviewFpsRange(&prevMinFps, &prevMaxFps);
+    params.getPreviewFpsRange(&parm_minfps, &parm_maxfps);
+    LOGH("CameraParameters - : minFps = %d, maxFps = %d ",
+                 prevMinFps, prevMaxFps);
+    LOGH("Requested params - : minFps = %d, maxFps = %d ",
+                 parm_minfps, parm_maxfps);
+
+    const char *hfrStr = params.get(KEY_QC_VIDEO_HIGH_FRAME_RATE);
+    const char *hsrStr = params.get(KEY_QC_VIDEO_HIGH_SPEED_RECORDING);
+
+    const char *prev_hfrStr = CameraParameters::get(KEY_QC_VIDEO_HIGH_FRAME_RATE);
+    const char *prev_hsrStr = CameraParameters::get(KEY_QC_VIDEO_HIGH_SPEED_RECORDING);
+
+    if ((hfrStr != NULL) && (prev_hfrStr != NULL) && strcmp(hfrStr, prev_hfrStr)) {
+        updateParamEntry(KEY_QC_VIDEO_HIGH_FRAME_RATE, hfrStr);
+    }
+
+    if ((hsrStr != NULL) && (prev_hsrStr != NULL) && strcmp(hsrStr, prev_hsrStr)) {
+        updateParamEntry(KEY_QC_VIDEO_HIGH_SPEED_RECORDING, hsrStr);
+
+    }
+
+    // check if HFR is enabled
+    if ((hfrStr != NULL) && strcmp(hfrStr, "off")) {
+        hfrMode = lookupAttr(HFR_MODES_MAP, PARAM_MAP_SIZE(HFR_MODES_MAP), hfrStr);
+        if (NAME_NOT_FOUND != hfrMode) newHfrMode = hfrMode;
+    }
+    // check if HSR is enabled
+    else if ((hsrStr != NULL) && strcmp(hsrStr, "off")) {
+        hfrMode = lookupAttr(HFR_MODES_MAP, PARAM_MAP_SIZE(HFR_MODES_MAP), hsrStr);
+        if (NAME_NOT_FOUND != hfrMode) newHfrMode = hfrMode;
+    }
+    LOGH("prevHfrMode - %d, currentHfrMode = %d ",
+                 mHfrMode, newHfrMode);
+
+    if (mHfrMode != newHfrMode) {
+        updateNeeded = true;
+        mHfrMode = newHfrMode;
+        switch (mHfrMode) {
+            case CAM_HFR_MODE_60FPS:
+                min_fps = 60000;
+                max_fps = 60000;
+                break;
+            case CAM_HFR_MODE_90FPS:
+                min_fps = 90000;
+                max_fps = 90000;
+                break;
+            case CAM_HFR_MODE_120FPS:
+                min_fps = 120000;
+                max_fps = 120000;
+                break;
+            case CAM_HFR_MODE_150FPS:
+                min_fps = 150000;
+                max_fps = 150000;
+                break;
+            case CAM_HFR_MODE_180FPS:
+                min_fps = 180000;
+                max_fps = 180000;
+                break;
+            case CAM_HFR_MODE_210FPS:
+                min_fps = 210000;
+                max_fps = 210000;
+                break;
+            case CAM_HFR_MODE_240FPS:
+                min_fps = 240000;
+                max_fps = 240000;
+                break;
+            case CAM_HFR_MODE_480FPS:
+                min_fps = 480000;
+                max_fps = 480000;
+                break;
+            case CAM_HFR_MODE_OFF:
+            default:
+                // Set Video Fps to zero
+                min_fps = 0;
+                max_fps = 0;
+                break;
+        }
+        m_hfrFpsRange.video_min_fps = (float)min_fps;
+        m_hfrFpsRange.video_max_fps = (float)max_fps;
+
+        LOGH("HFR mode (%d) Set video FPS : minFps = %d, maxFps = %d ",
+                 mHfrMode, min_fps, max_fps);
+    }
+
+    // Remember if HFR mode is ON
+    if ((mHfrMode > CAM_HFR_MODE_OFF) && (mHfrMode < CAM_HFR_MODE_MAX)) {
+        LOGH("HFR mode is ON");
+        m_bHfrMode = true;
+    } else {
+        m_hfrFpsRange.video_min_fps = 0;
+        m_hfrFpsRange.video_max_fps = 0;
+        m_bHfrMode = false;
+        LOGH("HFR mode is OFF");
+    }
+    m_hfrFpsRange.min_fps = (float)parm_minfps;
+    m_hfrFpsRange.max_fps = (float)parm_maxfps;
+
+    if (m_bHfrMode && (mHfrMode > CAM_HFR_MODE_120FPS)
+            && (parm_maxfps != 0)) {
+        //Configure buffer batch count to use batch mode for higher fps
+        setBufBatchCount((int8_t)(m_hfrFpsRange.video_max_fps / parm_maxfps));
+    } else {
+        //Reset batch count and update KEY for encoder
+        setBufBatchCount(0);
+    }
+    return updateNeeded;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPreviewFrameRate
+ *
+ * DESCRIPTION: set preview frame rate from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPreviewFrameRate(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_PREVIEW_FRAME_RATE);
+    const char *prev_str = get(KEY_PREVIEW_FRAME_RATE);
+
+    if ( str ) {
+        if ( prev_str &&
+             strcmp(str, prev_str)) {
+            LOGD("Requested Fixed Frame Rate %s", str);
+            updateParamEntry(KEY_PREVIEW_FRAME_RATE, str);
+            m_bFixedFrameRateSet = true;
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setEffect
+ *
+ * DESCRIPTION: set effect value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setEffect(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_EFFECT);
+    const char *prev_str = get(KEY_EFFECT);
+
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.effect", prop, "none");
+
+    if (strcmp(prop, "none")) {
+        if ((prev_str == NULL) ||
+                (strcmp(prop, prev_str) != 0) ||
+                (m_bUpdateEffects == true)) {
+            m_bUpdateEffects = false;
+            return setEffect(prop);
+        }
+    } else if (str != NULL) {
+        if ((prev_str == NULL) ||
+                (strcmp(str, prev_str) != 0) ||
+                (m_bUpdateEffects == true)) {
+            m_bUpdateEffects = false;
+            return setEffect(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFocusMode
+ *
+ * DESCRIPTION: set focus mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFocusMode(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_FOCUS_MODE);
+    const char *prev_str = get(KEY_FOCUS_MODE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setFocusMode(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFocusPosition
+ *
+ * DESCRIPTION: set focus position from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t  QCameraParameters::setFocusPosition(const QCameraParameters& params)
+{
+    const char *focus_str = params.get(KEY_FOCUS_MODE);
+    const char *prev_focus_str = get(KEY_FOCUS_MODE);
+
+    if (NULL == focus_str) {
+        return NO_ERROR;
+    }
+
+    LOGD("current focus mode: %s", focus_str);
+    if (strcmp(focus_str, FOCUS_MODE_MANUAL_POSITION)) {
+        LOGH(", dont set focus pos to back-end!");
+        return NO_ERROR;
+    }
+
+    const char *pos = params.get(KEY_QC_MANUAL_FOCUS_POSITION);
+    const char *prev_pos = get(KEY_QC_MANUAL_FOCUS_POSITION);
+    const char *type = params.get(KEY_QC_MANUAL_FOCUS_POS_TYPE);
+    const char *prev_type = get(KEY_QC_MANUAL_FOCUS_POS_TYPE);
+
+    if ((pos != NULL) && (type != NULL) && (focus_str != NULL)) {
+        if (prev_pos  == NULL || (strcmp(pos, prev_pos) != 0) ||
+            prev_type == NULL || (strcmp(type, prev_type) != 0) ||
+            prev_focus_str == NULL || (strcmp(focus_str, prev_focus_str) != 0)) {
+            return setFocusPosition(type, pos);
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setBrightness
+ *
+ * DESCRIPTION: set brightness control value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setBrightness(const QCameraParameters& params)
+{
+    int currentBrightness = getInt(KEY_QC_BRIGHTNESS);
+    int brightness = params.getInt(KEY_QC_BRIGHTNESS);
+
+    if(params.get(KEY_QC_BRIGHTNESS) == NULL) {
+       LOGH("Brigtness not set by App ");
+       return NO_ERROR;
+    }
+    if (currentBrightness !=  brightness) {
+        if (brightness >= m_pCapability->brightness_ctrl.min_value &&
+            brightness <= m_pCapability->brightness_ctrl.max_value) {
+            LOGD("new brightness value : %d ", brightness);
+            return setBrightness(brightness);
+        } else {
+            LOGE("invalid value %d out of (%d, %d)",
+                   brightness,
+                  m_pCapability->brightness_ctrl.min_value,
+                  m_pCapability->brightness_ctrl.max_value);
+            return BAD_VALUE;
+        }
+    } else {
+        LOGD("No brightness value changed.");
+        return NO_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getBrightness
+ *
+ * DESCRIPTION: get brightness control value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraParameters::getBrightness()
+{
+    return getInt(KEY_QC_BRIGHTNESS);
+}
+
+/*===========================================================================
+ * FUNCTION   : setSharpness
+ *
+ * DESCRIPTION: set sharpness control value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSharpness(const QCameraParameters& params)
+{
+    int shaprness = params.getInt(KEY_QC_SHARPNESS);
+    int prev_sharp = getInt(KEY_QC_SHARPNESS);
+
+    if(params.get(KEY_QC_SHARPNESS) == NULL) {
+       LOGH("Sharpness not set by App ");
+       return NO_ERROR;
+    }
+    if (prev_sharp !=  shaprness) {
+        if((shaprness >= m_pCapability->sharpness_ctrl.min_value) &&
+           (shaprness <= m_pCapability->sharpness_ctrl.max_value)) {
+            LOGD("new sharpness value : %d ", shaprness);
+            return setSharpness(shaprness);
+        } else {
+            LOGE("invalid value %d out of (%d, %d)",
+                   shaprness,
+                  m_pCapability->sharpness_ctrl.min_value,
+                  m_pCapability->sharpness_ctrl.max_value);
+            return BAD_VALUE;
+        }
+    } else {
+        LOGD("No value change in shaprness");
+        return NO_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setSkintoneEnahancement
+ *
+ * DESCRIPTION: set skin tone enhancement factor from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSkinToneEnhancement(const QCameraParameters& params)
+{
+    int sceFactor = params.getInt(KEY_QC_SCE_FACTOR);
+    int prev_sceFactor = getInt(KEY_QC_SCE_FACTOR);
+
+    if(params.get(KEY_QC_SCE_FACTOR) == NULL) {
+       LOGH("Skintone enhancement not set by App ");
+       return NO_ERROR;
+    }
+    if (prev_sceFactor != sceFactor) {
+        if((sceFactor >= m_pCapability->sce_ctrl.min_value) &&
+           (sceFactor <= m_pCapability->sce_ctrl.max_value)) {
+            LOGD("new Skintone Enhancement value : %d ", sceFactor);
+            return setSkinToneEnhancement(sceFactor);
+        } else {
+            LOGE("invalid value %d out of (%d, %d)",
+                   sceFactor,
+                  m_pCapability->sce_ctrl.min_value,
+                  m_pCapability->sce_ctrl.max_value);
+            return BAD_VALUE;
+        }
+    } else {
+        LOGD("No value change in skintone enhancement factor");
+        return NO_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setSaturation
+ *
+ * DESCRIPTION: set saturation control value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSaturation(const QCameraParameters& params)
+{
+    int saturation = params.getInt(KEY_QC_SATURATION);
+    int prev_sat = getInt(KEY_QC_SATURATION);
+
+    if(params.get(KEY_QC_SATURATION) == NULL) {
+       LOGH("Saturation not set by App ");
+       return NO_ERROR;
+    }
+    if (prev_sat !=  saturation) {
+        if((saturation >= m_pCapability->saturation_ctrl.min_value) &&
+           (saturation <= m_pCapability->saturation_ctrl.max_value)) {
+            LOGD("new saturation value : %d ", saturation);
+            return setSaturation(saturation);
+        } else {
+            LOGE("invalid value %d out of (%d, %d)",
+                   saturation,
+                  m_pCapability->saturation_ctrl.min_value,
+                  m_pCapability->saturation_ctrl.max_value);
+            return BAD_VALUE;
+        }
+    } else {
+        LOGD("No value change in saturation factor");
+        return NO_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setContrast
+ *
+ * DESCRIPTION: set contrast control value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setContrast(const QCameraParameters& params)
+{
+    int contrast = params.getInt(KEY_QC_CONTRAST);
+    int prev_contrast = getInt(KEY_QC_CONTRAST);
+
+    if(params.get(KEY_QC_CONTRAST) == NULL) {
+       LOGH("Contrast not set by App ");
+       return NO_ERROR;
+    }
+    if (prev_contrast !=  contrast) {
+        if((contrast >= m_pCapability->contrast_ctrl.min_value) &&
+           (contrast <= m_pCapability->contrast_ctrl.max_value)) {
+            LOGD("new contrast value : %d ", contrast);
+            int32_t rc = setContrast(contrast);
+            return rc;
+        } else {
+            LOGE("invalid value %d out of (%d, %d)",
+                   contrast,
+                  m_pCapability->contrast_ctrl.min_value,
+                  m_pCapability->contrast_ctrl.max_value);
+            return BAD_VALUE;
+        }
+    } else {
+        LOGD("No value change in contrast");
+        return NO_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setExposureCompensation
+ *
+ * DESCRIPTION: set exposure compensation value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setExposureCompensation(const QCameraParameters & params)
+{
+    int expComp = params.getInt(KEY_EXPOSURE_COMPENSATION);
+    int prev_expComp = getInt(KEY_EXPOSURE_COMPENSATION);
+
+    if(params.get(KEY_EXPOSURE_COMPENSATION) == NULL) {
+       LOGH("Exposure compensation not set by App ");
+       return NO_ERROR;
+    }
+    if (prev_expComp != expComp) {
+        if((expComp >= m_pCapability->exposure_compensation_min) &&
+           (expComp <= m_pCapability->exposure_compensation_max)) {
+            LOGD("new Exposure Compensation value : %d ", expComp);
+            return setExposureCompensation(expComp);
+        } else {
+            LOGE("invalid value %d out of (%d, %d)",
+                   expComp,
+                  m_pCapability->exposure_compensation_min,
+                  m_pCapability->exposure_compensation_max);
+            return BAD_VALUE;
+        }
+    } else {
+        LOGD("No value change in Exposure Compensation");
+        return NO_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setWhiteBalance
+ *
+ * DESCRIPTION: set white balance value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setWhiteBalance(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_WHITE_BALANCE);
+    const char *prev_str = get(KEY_WHITE_BALANCE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setWhiteBalance(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setManualWhiteBalance
+ *
+ * DESCRIPTION: set manual white balance from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t  QCameraParameters::setManualWhiteBalance(const QCameraParameters& params)
+{
+    int32_t rc = NO_ERROR;
+    const char *wb_str = params.get(KEY_WHITE_BALANCE);
+    const char *prev_wb_str = get(KEY_WHITE_BALANCE);
+    LOGD("current wb mode: %s", wb_str);
+
+    if (wb_str != NULL) {
+        if (strcmp(wb_str, WHITE_BALANCE_MANUAL)) {
+            LOGD("dont set cct to back-end.");
+            return NO_ERROR;
+        }
+    }
+
+    const char *value = params.get(KEY_QC_MANUAL_WB_VALUE);
+    const char *prev_value = get(KEY_QC_MANUAL_WB_VALUE);
+    const char *type = params.get(KEY_QC_MANUAL_WB_TYPE);
+    const char *prev_type = get(KEY_QC_MANUAL_WB_TYPE);
+
+    if ((value != NULL) && (type != NULL) && (wb_str != NULL)) {
+        if (prev_value  == NULL || (strcmp(value, prev_value) != 0) ||
+            prev_type == NULL || (strcmp(type, prev_type) != 0) ||
+            prev_wb_str == NULL || (strcmp(wb_str, prev_wb_str) != 0)) {
+            updateParamEntry(KEY_QC_MANUAL_WB_TYPE, type);
+            updateParamEntry(KEY_QC_MANUAL_WB_VALUE, value);
+            int32_t wb_type = atoi(type);
+            if (wb_type == CAM_MANUAL_WB_MODE_CCT) {
+                rc = setWBManualCCT(value);
+            } else if (wb_type == CAM_MANUAL_WB_MODE_GAIN) {
+                rc = setManualWBGains(value);
+            } else {
+                rc = BAD_VALUE;
+            }
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAntibanding
+ *
+ * DESCRIPTION: set antibanding value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAntibanding(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_ANTIBANDING);
+    const char *prev_str = get(KEY_ANTIBANDING);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setAntibanding(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setStatsDebugMask
+ *
+ * DESCRIPTION: get the value from persist file in Stats module that will
+ *              control funtionality in the module
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setStatsDebugMask()
+{
+    uint32_t mask = 0;
+    char value[PROPERTY_VALUE_MAX];
+
+    property_get("persist.camera.stats.debug.mask", value, "0");
+    mask = (uint32_t)atoi(value);
+
+    LOGH("ctrl mask :%d", mask);
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_STATS_DEBUG_MASK, mask)) {
+        return BAD_VALUE;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPAAF
+ *
+ * DESCRIPTION: get the value from persist file in Stats module that will
+ *              control the preview assisted AF in the module
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPAAF()
+{
+    uint32_t paaf = 0;
+    char value[PROPERTY_VALUE_MAX];
+
+    property_get("persist.camera.stats.af.paaf", value, "1");
+    paaf = (uint32_t)atoi(value);
+
+    LOGH("PAAF is: %s", paaf ? "ON": "OFF");
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_STATS_AF_PAAF, paaf)) {
+        return BAD_VALUE;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSceneDetect
+ *
+ * DESCRIPTION: set scenen detect value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSceneDetect(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_SCENE_DETECT);
+    const char *prev_str = get(KEY_QC_SCENE_DETECT);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setSceneDetect(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setVideoHDR
+ *
+ * DESCRIPTION: set video HDR value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setVideoHDR(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_VIDEO_HDR);
+    const char *prev_str = get(KEY_QC_VIDEO_HDR);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setVideoHDR(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setVtEnable
+ *
+ * DESCRIPTION: set vt Time Stamp enable from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setVtEnable(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_VT_ENABLE);
+    const char *prev_str = get(KEY_QC_VT_ENABLE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setVtEnable(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFaceRecognition
+ *
+ * DESCRIPTION: set face recognition mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFaceRecognition(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_FACE_RECOGNITION);
+    const char *prev_str = get(KEY_QC_FACE_RECOGNITION);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            uint32_t maxFaces = (uint32_t)params.getInt(KEY_QC_MAX_NUM_REQUESTED_FACES);
+            return setFaceRecognition(str, maxFaces);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setZoom
+ *
+ * DESCRIPTION: set zoom value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setZoom(const QCameraParameters& params)
+{
+    if ((m_pCapability->zoom_supported == 0 ||
+         m_pCapability->zoom_ratio_tbl_cnt == 0)) {
+        LOGH("no zoom support");
+        return NO_ERROR;
+    }
+
+    int zoomLevel = params.getInt(KEY_ZOOM);
+    mParmZoomLevel = zoomLevel;
+    if ((zoomLevel < 0) || (zoomLevel >= (int)m_pCapability->zoom_ratio_tbl_cnt)) {
+        LOGE("invalid value %d out of (%d, %d)",
+               zoomLevel,
+              0, m_pCapability->zoom_ratio_tbl_cnt-1);
+        return BAD_VALUE;
+    }
+
+    int prevZoomLevel = getInt(KEY_ZOOM);
+    if (prevZoomLevel == zoomLevel) {
+        LOGD("No value change in zoom %d %d", prevZoomLevel, zoomLevel);
+        return NO_ERROR;
+    }
+
+    return setZoom(zoomLevel);
+}
+
+/*===========================================================================
+ * FUNCTION   : setISOValue
+ *
+ * DESCRIPTION: set ISO value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t  QCameraParameters::setISOValue(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_ISO_MODE);
+    const char *prev_str = get(KEY_QC_ISO_MODE);
+
+    if(getManualCaptureMode()) {
+        char iso_val[PROPERTY_VALUE_MAX];
+
+        property_get("persist.camera.iso", iso_val, "");
+        if (strlen(iso_val) > 0) {
+            if (prev_str == NULL ||
+                    strcmp(iso_val, prev_str) != 0) {
+                return setISOValue(iso_val);
+            }
+        }
+    } else if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setISOValue(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setContinuousISO
+ *
+ * DESCRIPTION: set ISO value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t  QCameraParameters::setContinuousISO(const char *isoValue)
+{
+    char iso[PROPERTY_VALUE_MAX];
+    int32_t continous_iso = 0;
+
+    // Check if continuous ISO is set through setproperty
+    property_get("persist.camera.continuous.iso", iso, "");
+    if (strlen(iso) > 0) {
+        continous_iso = atoi(iso);
+    } else {
+        continous_iso = atoi(isoValue);
+    }
+
+    if ((continous_iso >= 0) &&
+            (continous_iso <= m_pCapability->sensitivity_range.max_sensitivity)) {
+        LOGH("Setting continuous ISO value %d", continous_iso);
+        updateParamEntry(KEY_QC_CONTINUOUS_ISO, isoValue);
+
+        cam_intf_parm_manual_3a_t iso_settings;
+        memset(&iso_settings, 0, sizeof(cam_intf_parm_manual_3a_t));
+        iso_settings.previewOnly = FALSE;
+        iso_settings.value = continous_iso;
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_ISO, iso_settings)) {
+            return BAD_VALUE;
+        }
+        return NO_ERROR;
+    }
+    LOGE("Invalid iso value: %d", continous_iso);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setExposureTime
+ *
+ * DESCRIPTION: set exposure time from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t  QCameraParameters::setExposureTime(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_EXPOSURE_TIME);
+    const char *prev_str = get(KEY_QC_EXPOSURE_TIME);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+                strcmp(str, prev_str) != 0) {
+            return setExposureTime(str);
+        }
+    } else if(getManualCaptureMode()) {
+        char expTime[PROPERTY_VALUE_MAX];
+
+        property_get("persist.camera.exposure.time", expTime, "");
+        if (strlen(expTime) > 0) {
+            if (prev_str == NULL ||
+                    strcmp(expTime, prev_str) != 0) {
+                return setExposureTime(expTime);
+            }
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setVideoRotation
+ *
+ * DESCRIPTION: set rotation value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setVideoRotation(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_VIDEO_ROTATION);
+    if(str != NULL) {
+        int value = lookupAttr(VIDEO_ROTATION_MODES_MAP,
+                PARAM_MAP_SIZE(VIDEO_ROTATION_MODES_MAP), str);
+        if (value != NAME_NOT_FOUND) {
+            updateParamEntry(KEY_QC_VIDEO_ROTATION, str);
+            LOGL("setVideoRotation:   %d: ", str, value);
+        } else {
+            LOGE("Invalid rotation value: %d", value);
+            return BAD_VALUE;
+        }
+
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setRotation
+ *
+ * DESCRIPTION: set rotation value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRotation(const QCameraParameters& params)
+{
+    int32_t rotation = params.getInt(KEY_ROTATION);
+    if (rotation != -1) {
+        if (rotation == 0 || rotation == 90 ||
+            rotation == 180 || rotation == 270) {
+            set(KEY_ROTATION, rotation);
+
+            ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_META_JPEG_ORIENTATION,
+                    rotation);
+            mRotation = rotation;
+        } else {
+            LOGE("Invalid rotation value: %d", rotation);
+            return BAD_VALUE;
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFlash
+ *
+ * DESCRIPTION: set flash mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFlash(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_FLASH_MODE);
+    const char *prev_str = get(KEY_FLASH_MODE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setFlash(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAecLock
+ *
+ * DESCRIPTION: set AEC lock value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAecLock(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_AUTO_EXPOSURE_LOCK);
+    const char *prev_str = get(KEY_AUTO_EXPOSURE_LOCK);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setAecLock(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAwbLock
+ *
+ * DESCRIPTION: set AWB lock from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAwbLock(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_AUTO_WHITEBALANCE_LOCK);
+    const char *prev_str = get(KEY_AUTO_WHITEBALANCE_LOCK);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setAwbLock(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAutoHDR
+ *
+ * DESCRIPTION: Enable/disable auto HDR
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAutoHDR(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_AUTO_HDR_ENABLE);
+    const char *prev_str = get(KEY_QC_AUTO_HDR_ENABLE);
+    char prop[PROPERTY_VALUE_MAX];
+
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.auto.hdr.enable", prop, VALUE_DISABLE);
+    if (str != NULL) {
+       if (prev_str == NULL ||
+           strcmp(str, prev_str) != 0) {
+           LOGH("Auto HDR set to: %s", str);
+           return updateParamEntry(KEY_QC_AUTO_HDR_ENABLE, str);
+       }
+    } else {
+       if (prev_str == NULL ||
+           strcmp(prev_str, prop) != 0 ) {
+           LOGH("Auto HDR set to: %s", prop);
+           updateParamEntry(KEY_QC_AUTO_HDR_ENABLE, prop);
+       }
+    }
+
+       return NO_ERROR;
+}
+
+/*===========================================================================
+* FUNCTION   : isAutoHDREnabled
+*
+* DESCRIPTION: Query auto HDR status
+*
+* PARAMETERS : None
+*
+* RETURN     : bool true/false
+*==========================================================================*/
+bool QCameraParameters::isAutoHDREnabled()
+{
+    const char *str = get(KEY_QC_AUTO_HDR_ENABLE);
+    if (str != NULL) {
+        int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+                PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), str);
+        if (value == NAME_NOT_FOUND) {
+            LOGE("Invalid Auto HDR value %s", str);
+            return false;
+        }
+
+        LOGH("Auto HDR status is: %d", value);
+        return value ? true : false;
+    }
+
+    LOGH("Auto HDR status not set!");
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : setMCEValue
+ *
+ * DESCRIPTION: set memory color enhancement value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setMCEValue(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_MEMORY_COLOR_ENHANCEMENT);
+    const char *prev_str = get(KEY_QC_MEMORY_COLOR_ENHANCEMENT);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setMCEValue(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setDISValue
+ *
+ * DESCRIPTION: enable/disable DIS from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setDISValue(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_DIS);
+    const char *prev_str = get(KEY_QC_DIS);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setDISValue(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setLensShadeValue
+ *
+ * DESCRIPTION: set lens shade value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setLensShadeValue(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_LENSSHADE);
+    const char *prev_str = get(KEY_QC_LENSSHADE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setLensShadeValue(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFocusAreas
+ *
+ * DESCRIPTION: set focus areas from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFocusAreas(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_FOCUS_AREAS);
+
+    if (getRelatedCamSyncInfo()->mode == CAM_MODE_SECONDARY) {
+        // Ignore focus areas for secondary camera
+        LOGH("Ignore focus areas for secondary camera!! ");
+        return NO_ERROR;
+    }
+    if (str != NULL) {
+        int max_num_af_areas = getInt(KEY_MAX_NUM_FOCUS_AREAS);
+        if(max_num_af_areas == 0) {
+            LOGE("max num of AF area is 0, cannot set focus areas");
+            return BAD_VALUE;
+        }
+
+        const char *prev_str = get(KEY_FOCUS_AREAS);
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setFocusAreas(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setMeteringAreas
+ *
+ * DESCRIPTION: set metering areas from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setMeteringAreas(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_METERING_AREAS);
+    if (str != NULL) {
+        int max_num_mtr_areas = getInt(KEY_MAX_NUM_METERING_AREAS);
+        if(max_num_mtr_areas == 0) {
+            LOGE("max num of metering areas is 0, cannot set focus areas");
+            return BAD_VALUE;
+        }
+
+        const char *prev_str = get(KEY_METERING_AREAS);
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0 ||
+            (m_bNeedRestart == true)) {
+            return setMeteringAreas(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSceneMode
+ *
+ * DESCRIPTION: set scenen mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSceneMode(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_SCENE_MODE);
+    const char *prev_str = get(KEY_SCENE_MODE);
+    LOGH("str - %s, prev_str - %s", str, prev_str);
+
+    // HDR & Recording are mutually exclusive and so disable HDR if recording hint is set
+    if (m_bRecordingHint_new && m_bHDREnabled) {
+        LOGH("Disable the HDR and set it to Auto");
+        str = SCENE_MODE_AUTO;
+        m_bLocalHDREnabled = true;
+    } else if (!m_bRecordingHint_new && m_bLocalHDREnabled) {
+        LOGH("Restore the HDR from Auto scene mode");
+        str = SCENE_MODE_HDR;
+        m_bLocalHDREnabled = false;
+    }
+
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+
+            if(strcmp(str, SCENE_MODE_AUTO) == 0) {
+                m_bSceneTransitionAuto = true;
+            }
+            if (strcmp(str, SCENE_MODE_HDR) == 0) {
+
+                // If HDR is set from client  and the feature is not enabled in the backend, ignore it.
+                if (m_bHDRModeSensor && isSupportedSensorHdrSize(params)) {
+                    m_bSensorHDREnabled = true;
+                    LOGH("Sensor HDR mode Enabled");
+                } else {
+                    m_bHDREnabled = true;
+                    LOGH("S/W HDR Enabled");
+                }
+            } else {
+                m_bHDREnabled = false;
+                if (m_bSensorHDREnabled) {
+                    m_bSensorHDREnabled = false;
+                    m_bNeedRestart = true;
+                    setSensorSnapshotHDR("off");
+                }
+            }
+
+            if (m_bSensorHDREnabled) {
+                setSensorSnapshotHDR("on");
+                m_bNeedRestart = true;
+            } else if ((m_bHDREnabled) ||
+                ((prev_str != NULL) && (strcmp(prev_str, SCENE_MODE_HDR) == 0))) {
+                LOGH("scene mode changed between HDR and non-HDR, need restart");
+                m_bNeedRestart = true;
+            }
+
+            return setSceneMode(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSelectableZoneAf
+ *
+ * DESCRIPTION: set selectable zone auto focus value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSelectableZoneAf(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_SELECTABLE_ZONE_AF);
+    const char *prev_str = get(KEY_QC_SELECTABLE_ZONE_AF);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setSelectableZoneAf(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAEBracket
+ *
+ * DESCRIPTION: set AE bracket from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAEBracket(const QCameraParameters& params)
+{
+    if (isHDREnabled()) {
+        LOGH("scene mode is HDR, overwrite AE bracket setting to off");
+        return setAEBracket(AE_BRACKET_OFF);
+    }
+
+    const char *expStr = params.get(KEY_QC_CAPTURE_BURST_EXPOSURE);
+    if (NULL != expStr && strlen(expStr) > 0) {
+        set(KEY_QC_CAPTURE_BURST_EXPOSURE, expStr);
+    } else {
+        char prop[PROPERTY_VALUE_MAX];
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.capture.burst.exposures", prop, "");
+        if (strlen(prop) > 0) {
+            set(KEY_QC_CAPTURE_BURST_EXPOSURE, prop);
+        } else {
+            remove(KEY_QC_CAPTURE_BURST_EXPOSURE);
+        }
+    }
+
+    const char *str = params.get(KEY_QC_AE_BRACKET_HDR);
+    const char *prev_str = get(KEY_QC_AE_BRACKET_HDR);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setAEBracket(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAFBracket
+ *
+ * DESCRIPTION: set AF bracket from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAFBracket(const QCameraParameters& params)
+{
+    if ((m_pCapability->qcom_supported_feature_mask &
+            (CAM_QCOM_FEATURE_REFOCUS | CAM_QCOM_FEATURE_UBIFOCUS)) == 0) {
+        LOGH("AF Bracketing is not supported");
+        return NO_ERROR;
+    }
+    const char *str = params.get(KEY_QC_AF_BRACKET);
+    const char *prev_str = get(KEY_QC_AF_BRACKET);
+    LOGH("str =%s & prev_str =%s", str, prev_str);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            m_bNeedRestart = true;
+            return setAFBracket(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setReFocus
+ *
+ * DESCRIPTION: set refocus from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setReFocus(const QCameraParameters& params)
+{
+    if ((m_pCapability->qcom_supported_feature_mask &
+            (CAM_QCOM_FEATURE_REFOCUS | CAM_QCOM_FEATURE_UBIFOCUS)) == 0) {
+        LOGD("AF Bracketing is not supported");
+        return NO_ERROR;
+    }
+    const char *str = params.get(KEY_QC_RE_FOCUS);
+    const char *prev_str = get(KEY_QC_RE_FOCUS);
+    LOGH("str =%s & prev_str =%s", str, prev_str);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            m_bNeedRestart = true;
+            return setReFocus(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setChromaFlash
+ *
+ * DESCRIPTION: set chroma flash from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setChromaFlash(const QCameraParameters& params)
+{
+    if ((m_pCapability->qcom_supported_feature_mask &
+        CAM_QCOM_FEATURE_CHROMA_FLASH) == 0) {
+        LOGH("Chroma Flash is not supported");
+        return NO_ERROR;
+    }
+    const char *str = params.get(KEY_QC_CHROMA_FLASH);
+    const char *prev_str = get(KEY_QC_CHROMA_FLASH);
+    LOGH("str =%s & prev_str =%s", str, prev_str);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            m_bNeedRestart = true;
+            return setChromaFlash(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setOptiZoom
+ *
+ * DESCRIPTION: set opti zoom from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setOptiZoom(const QCameraParameters& params)
+{
+    if ((m_pCapability->qcom_supported_feature_mask &
+        CAM_QCOM_FEATURE_OPTIZOOM) == 0){
+        LOGH("Opti Zoom is not supported");
+        return NO_ERROR;
+    }
+    const char *str = params.get(KEY_QC_OPTI_ZOOM);
+    const char *prev_str = get(KEY_QC_OPTI_ZOOM);
+    LOGH("str =%s & prev_str =%s", str, prev_str);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            m_bNeedRestart = true;
+            return setOptiZoom(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setTruePortrait
+ *
+ * DESCRIPTION: set true portrait from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setTruePortrait(const QCameraParameters& params)
+{
+    if ((m_pCapability->qcom_supported_feature_mask &
+            CAM_QCOM_FEATURE_TRUEPORTRAIT) == 0) {
+        LOGD("True Portrait is not supported");
+        return NO_ERROR;
+    }
+    const char *str = params.get(KEY_QC_TRUE_PORTRAIT);
+    const char *prev_str = get(KEY_QC_TRUE_PORTRAIT);
+    LOGH("str =%s & prev_str =%s", str, prev_str);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setTruePortrait(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setHDRMode
+ *
+ * DESCRIPTION: set HDR mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setHDRMode(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_HDR_MODE);
+    const char *prev_str = get(KEY_QC_HDR_MODE);
+    uint32_t supported_hdr_modes = m_pCapability->qcom_supported_feature_mask &
+          (CAM_QCOM_FEATURE_SENSOR_HDR | CAM_QCOM_FEATURE_HDR);
+
+    LOGH("str =%s & prev_str =%s", str, prev_str);
+    if (str != NULL) {
+        if ((CAM_QCOM_FEATURE_SENSOR_HDR == supported_hdr_modes) &&
+                (strncmp(str, HDR_MODE_SENSOR, strlen(HDR_MODE_SENSOR)))) {
+            LOGH("Only sensor HDR is supported");
+            return NO_ERROR;
+        } else if  ((CAM_QCOM_FEATURE_HDR == supported_hdr_modes) &&
+                (strncmp(str, HDR_MODE_SENSOR, strlen(HDR_MODE_MULTI_FRAME)))) {
+            LOGH("Only multi frame HDR is supported");
+            return NO_ERROR;
+        } else if (!supported_hdr_modes) {
+            LOGH("HDR is not supported");
+            return NO_ERROR;
+        }
+        if (prev_str == NULL ||
+                strcmp(str, prev_str) != 0) {
+            return setHDRMode(str);
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setHDRNeed1x
+ *
+ * DESCRIPTION: set HDR need 1x from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setHDRNeed1x(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_HDR_NEED_1X);
+    const char *prev_str = get(KEY_QC_HDR_NEED_1X);
+
+    LOGH("str =%s & prev_str =%s", str, prev_str);
+    if (str != NULL) {
+        if (m_bHDRModeSensor) {
+            LOGH("Only multi frame HDR supports 1x frame");
+            return NO_ERROR;
+        }
+        if ((prev_str == NULL) || (strcmp(str, prev_str) != 0)) {
+            return setHDRNeed1x(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSeeMore
+ *
+ * DESCRIPTION: set see more (llvd) from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSeeMore(const QCameraParameters& params)
+{
+    if ((m_pCapability->qcom_supported_feature_mask &
+            CAM_QCOM_FEATURE_LLVD) == 0) {
+        LOGD("See more is not supported");
+        return NO_ERROR;
+    }
+    const char *str = params.get(KEY_QC_SEE_MORE);
+    const char *prev_str = get(KEY_QC_SEE_MORE);
+    LOGH("str =%s & prev_str =%s", str, prev_str);
+    if (str != NULL) {
+        if (prev_str == NULL || strcmp(str, prev_str) != 0) {
+            m_bNeedRestart = true;
+            return setSeeMore(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setNoiseReductionMode
+ *
+ * DESCRIPTION: set noise reduction mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setNoiseReductionMode(const QCameraParameters& params)
+{
+    if ((m_pCapability->qcom_supported_feature_mask & CAM_QTI_FEATURE_SW_TNR) == 0) {
+        LOGD("SW TNR is not supported");
+        return NO_ERROR;
+    }
+    const char *str = params.get(KEY_QC_NOISE_REDUCTION_MODE);
+    const char *prev_str = get(KEY_QC_NOISE_REDUCTION_MODE);
+    LOGH("str =%s & prev_str =%s", str, prev_str);
+    if (str != NULL) {
+        if (prev_str == NULL || strcmp(str, prev_str) != 0) {
+            m_bNeedRestart = true;
+            return setNoiseReductionMode(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setStillMore
+ *
+ * DESCRIPTION: set stillmore from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setStillMore(const QCameraParameters& params)
+{
+    if ((m_pCapability->qcom_supported_feature_mask &
+            CAM_QCOM_FEATURE_STILLMORE) == 0) {
+        LOGD("Stillmore is not supported");
+        return NO_ERROR;
+    }
+    const char *str = params.get(KEY_QC_STILL_MORE);
+    const char *prev_str = get(KEY_QC_STILL_MORE);
+    LOGH("str =%s & prev_str =%s", str, prev_str);
+    if (str != NULL) {
+        if (prev_str == NULL || strcmp(str, prev_str) != 0) {
+            m_bNeedRestart = true;
+            return setStillMore(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setRedeyeReduction
+ *
+ * DESCRIPTION: set red eye reduction setting from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRedeyeReduction(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_REDEYE_REDUCTION);
+    const char *prev_str = get(KEY_QC_REDEYE_REDUCTION);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setRedeyeReduction(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setGpsLocation
+ *
+ * DESCRIPTION: set GPS location information from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setGpsLocation(const QCameraParameters& params)
+{
+    const char *method = params.get(KEY_GPS_PROCESSING_METHOD);
+    if (method) {
+        set(KEY_GPS_PROCESSING_METHOD, method);
+    }else {
+        remove(KEY_GPS_PROCESSING_METHOD);
+    }
+
+    const char *latitude = params.get(KEY_GPS_LATITUDE);
+    if (latitude) {
+        set(KEY_GPS_LATITUDE, latitude);
+    }else {
+        remove(KEY_GPS_LATITUDE);
+    }
+
+    const char *latitudeRef = params.get(KEY_QC_GPS_LATITUDE_REF);
+    if (latitudeRef) {
+        set(KEY_QC_GPS_LATITUDE_REF, latitudeRef);
+    }else {
+        remove(KEY_QC_GPS_LATITUDE_REF);
+    }
+
+    const char *longitude = params.get(KEY_GPS_LONGITUDE);
+    if (longitude) {
+        set(KEY_GPS_LONGITUDE, longitude);
+    }else {
+        remove(KEY_GPS_LONGITUDE);
+    }
+
+    const char *longitudeRef = params.get(KEY_QC_GPS_LONGITUDE_REF);
+    if (longitudeRef) {
+        set(KEY_QC_GPS_LONGITUDE_REF, longitudeRef);
+    }else {
+        remove(KEY_QC_GPS_LONGITUDE_REF);
+    }
+
+    const char *altitudeRef = params.get(KEY_QC_GPS_ALTITUDE_REF);
+    if (altitudeRef) {
+        set(KEY_QC_GPS_ALTITUDE_REF, altitudeRef);
+    }else {
+        remove(KEY_QC_GPS_ALTITUDE_REF);
+    }
+
+    const char *altitude = params.get(KEY_GPS_ALTITUDE);
+    if (altitude) {
+        set(KEY_GPS_ALTITUDE, altitude);
+    }else {
+        remove(KEY_GPS_ALTITUDE);
+    }
+
+    const char *status = params.get(KEY_QC_GPS_STATUS);
+    if (status) {
+        set(KEY_QC_GPS_STATUS, status);
+    } else {
+        remove(KEY_QC_GPS_STATUS);
+    }
+
+    const char *timestamp = params.get(KEY_GPS_TIMESTAMP);
+    if (timestamp) {
+        set(KEY_GPS_TIMESTAMP, timestamp);
+    }else {
+        remove(KEY_GPS_TIMESTAMP);
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setNumOfSnapshot
+ *
+ * DESCRIPTION: set number of snapshot per shutter from user setting
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setNumOfSnapshot()
+{
+    int nBurstNum = 1;
+    int nExpnum = 0;
+
+    const char *bracket_str = get(KEY_QC_AE_BRACKET_HDR);
+    if (bracket_str != NULL && strlen(bracket_str) > 0) {
+        int value = lookupAttr(BRACKETING_MODES_MAP, PARAM_MAP_SIZE(BRACKETING_MODES_MAP),
+                bracket_str);
+        switch (value) {
+        case CAM_EXP_BRACKETING_ON:
+            {
+                nExpnum = 0;
+                const char *str_val = get(KEY_QC_CAPTURE_BURST_EXPOSURE);
+                if ((str_val != NULL) && (strlen(str_val) > 0)) {
+                    char prop[PROPERTY_VALUE_MAX];
+                    memset(prop, 0, sizeof(prop));
+                    strlcpy(prop, str_val, PROPERTY_VALUE_MAX);
+                    char *saveptr = NULL;
+                    char *token = strtok_r(prop, ",", &saveptr);
+                    while (token != NULL) {
+                        token = strtok_r(NULL, ",", &saveptr);
+                        nExpnum++;
+                    }
+                }
+                if (nExpnum == 0) {
+                    nExpnum = 1;
+                }
+            }
+            break;
+        default:
+            nExpnum = 1 + getNumOfExtraHDROutBufsIfNeeded();
+            break;
+        }
+    }
+
+    if (isUbiRefocus()) {
+        nBurstNum = m_pCapability->refocus_af_bracketing_need.output_count + 1;
+    }
+
+    LOGH("nBurstNum = %d, nExpnum = %d", nBurstNum, nExpnum);
+    set(KEY_QC_NUM_SNAPSHOT_PER_SHUTTER, nBurstNum * nExpnum);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setRecordingHint
+ *
+ * DESCRIPTION: set recording hint value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRecordingHint(const QCameraParameters& params)
+{
+    const char * str = params.get(KEY_RECORDING_HINT);
+    const char *prev_str = get(KEY_RECORDING_HINT);
+    if (str != NULL) {
+        if (prev_str == NULL || strcmp(str, prev_str) != 0) {
+            int32_t value = lookupAttr(TRUE_FALSE_MODES_MAP, PARAM_MAP_SIZE(TRUE_FALSE_MODES_MAP),
+                    str);
+            if(value != NAME_NOT_FOUND){
+                updateParamEntry(KEY_RECORDING_HINT, str);
+                setRecordingHintValue(value);
+                if (getFaceDetectionOption() == true) {
+                    if (!fdModeInVideo()) {
+                        setFaceDetection(value > 0 ? false : true, false);
+                    } else {
+                        setFaceDetection(true, false);
+                    }
+                }
+                if (m_bDISEnabled) {
+                    LOGH("Setting DIS value again");
+                    setDISValue(VALUE_ENABLE);
+                }
+                return NO_ERROR;
+            } else {
+                LOGE("Invalid recording hint value: %s", str);
+                return BAD_VALUE;
+            }
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setNoDisplayMode
+ *
+ * DESCRIPTION: set no display mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setNoDisplayMode(const QCameraParameters& params)
+{
+    const char *str_val  = params.get(KEY_QC_NO_DISPLAY_MODE);
+    const char *prev_str = get(KEY_QC_NO_DISPLAY_MODE);
+    char prop[PROPERTY_VALUE_MAX];
+    LOGD("str_val: %s, prev_str: %s", str_val, prev_str);
+
+    // Aux Camera Mode, set no display mode
+    if (m_relCamSyncInfo.mode == CAM_MODE_SECONDARY) {
+        if (!m_bNoDisplayMode) {
+            set(KEY_QC_NO_DISPLAY_MODE, 1);
+            m_bNoDisplayMode = true;
+            m_bNeedRestart = true;
+        }
+        return NO_ERROR;
+    }
+
+    if(str_val && strlen(str_val) > 0) {
+        if (prev_str == NULL || strcmp(str_val, prev_str) != 0) {
+            m_bNoDisplayMode = atoi(str_val);
+            set(KEY_QC_NO_DISPLAY_MODE, str_val);
+            m_bNeedRestart = true;
+        }
+    } else {
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.camera.no-display", prop, "0");
+        m_bNoDisplayMode = atoi(prop);
+    }
+    LOGH("Param m_bNoDisplayMode = %d", m_bNoDisplayMode);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setZslMode
+ *
+ * DESCRIPTION: set ZSL mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setZslMode(const QCameraParameters& params)
+{
+    const char *str_val  = params.get(KEY_QC_ZSL);
+    const char *prev_val  = get(KEY_QC_ZSL);
+    int32_t rc = NO_ERROR;
+
+    if(m_bForceZslMode) {
+        if (!m_bZslMode) {
+            // Force ZSL mode to ON
+            set(KEY_QC_ZSL, VALUE_ON);
+            setZslMode(TRUE);
+            LOGH("ZSL Mode forced to be enabled");
+        }
+    } else if (str_val != NULL) {
+        if (prev_val == NULL || strcmp(str_val, prev_val) != 0) {
+            int32_t value = lookupAttr(ON_OFF_MODES_MAP, PARAM_MAP_SIZE(ON_OFF_MODES_MAP),
+                    str_val);
+            if (value != NAME_NOT_FOUND) {
+                set(KEY_QC_ZSL, str_val);
+                rc = setZslMode(value);
+                // ZSL mode changed, need restart preview
+                m_bNeedRestart = true;
+            } else {
+                LOGE("Invalid ZSL mode value: %s", str_val);
+                rc = BAD_VALUE;
+            }
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setZslMode
+ *
+ * DESCRIPTION: set ZSL mode from user setting
+ *
+ * PARAMETERS :
+ *   @value  : ZSL mode value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setZslMode(bool value)
+{
+    int32_t rc = NO_ERROR;
+    if(m_bForceZslMode) {
+        if (!m_bZslMode) {
+            // Force ZSL mode to ON
+            set(KEY_QC_ZSL, VALUE_ON);
+            m_bZslMode_new = true;
+            m_bZslMode = true;
+            m_bNeedRestart = true;
+
+            int32_t value = m_bForceZslMode;
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_ZSL_MODE, value)) {
+                rc = BAD_VALUE;
+            }
+
+            LOGI("ZSL Mode forced to be enabled");
+        }
+    } else {
+        LOGI("ZSL Mode  -> %s", m_bZslMode_new ? "Enabled" : "Disabled");
+        m_bZslMode_new = (value > 0)? true : false;
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_ZSL_MODE, value)) {
+            rc = BAD_VALUE;
+        }
+    }
+    LOGH("enabled: %d rc = %d", m_bZslMode_new, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : updateZSLModeValue
+ *
+ * DESCRIPTION: update zsl mode value locally and to daemon
+ *
+ * PARAMETERS :
+ *   @value   : zsl mode value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateZSLModeValue(bool value)
+{
+    int32_t rc = NO_ERROR;
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        LOGE("Failed to initialize group update table");
+        return BAD_TYPE;
+    }
+
+    rc = setZslMode(value);
+    if (rc != NO_ERROR) {
+        LOGE("Failed to ZSL value");
+        return rc;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        LOGE("Failed to update recording hint");
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setWaveletDenoise
+ *
+ * DESCRIPTION: set wavelet denoise value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setWaveletDenoise(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_DENOISE);
+    const char *prev_str = get(KEY_QC_DENOISE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setWaveletDenoise(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setTemporalDenoise
+ *
+ * DESCRIPTION: set temporal denoise value from properties
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setTemporalDenoise(const QCameraParameters& params)
+{
+    if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_CPP_TNR) == 0) {
+        LOGH("TNR is not supported");
+        return NO_ERROR;
+    }
+
+    const char *str = params.get(KEY_QC_TNR_MODE);
+    const char *prev_str = get(KEY_QC_TNR_MODE);
+    const char *video_str = params.get(KEY_QC_VIDEO_TNR_MODE);
+    const char *video_prev_str = get(KEY_QC_VIDEO_TNR_MODE);
+    char video_value[PROPERTY_VALUE_MAX];
+    char preview_value[PROPERTY_VALUE_MAX];
+    bool prev_video_tnr = m_bTNRVideoOn;
+    bool prev_preview_tnr = m_bTNRPreviewOn;
+    bool prev_snap_tnr = m_bTNRSnapshotOn;
+
+    char value[PROPERTY_VALUE_MAX];
+    memset(value, 0, sizeof(value));
+    property_get("persist.camera.tnr_cds", value, "0");
+    uint8_t tnr_cds = (uint8_t)atoi(value);
+
+    if (m_bRecordingHint_new == true) {
+        if (video_str) {
+            if ((video_prev_str == NULL) || (strcmp(video_str, video_prev_str) != 0)) {
+                if (!strcmp(video_str, VALUE_ON)) {
+                    m_bTNRVideoOn = true;
+                    m_bTNRPreviewOn = true;
+                } else {
+                    m_bTNRVideoOn = false;
+                    m_bTNRPreviewOn = false;
+                }
+                updateParamEntry(KEY_QC_VIDEO_TNR_MODE, video_str);
+            } else {
+                return NO_ERROR;
+            }
+        }
+    } else {
+        if (str) {
+            if ((prev_str == NULL) || (strcmp(str, prev_str) != 0)) {
+                if (!strcmp(str, VALUE_ON)) {
+                    m_bTNRPreviewOn = true;
+                } else {
+                    m_bTNRPreviewOn = false;
+                }
+                updateParamEntry(KEY_QC_TNR_MODE, str);
+            } else {
+                return NO_ERROR;
+            }
+        }
+    }
+
+    //Read setprops only if UI is not present or disabled.
+    if ((m_bRecordingHint_new == true)
+            && ((video_str == NULL)
+            || (strcmp(video_str, VALUE_ON)))) {
+        memset(video_value, 0, sizeof(video_value));
+        property_get("persist.camera.tnr.video", video_value, VALUE_OFF);
+        if (!strcmp(video_value, VALUE_ON)) {
+            m_bTNRVideoOn = true;
+        } else {
+            m_bTNRVideoOn = false;
+        }
+        updateParamEntry(KEY_QC_VIDEO_TNR_MODE, video_value);
+
+        memset(preview_value, 0, sizeof(preview_value));
+        property_get("persist.camera.tnr.preview", preview_value, VALUE_OFF);
+        if (!strcmp(preview_value, VALUE_ON)) {
+            m_bTNRPreviewOn = true;
+        } else {
+            m_bTNRPreviewOn = false;
+        }
+        updateParamEntry(KEY_QC_TNR_MODE, preview_value);
+    } else if ((m_bRecordingHint_new != true)
+            && ((str == NULL) || (strcmp(str, VALUE_ON)))) {
+        memset(preview_value, 0, sizeof(preview_value));
+        property_get("persist.camera.tnr.preview", preview_value, VALUE_OFF);
+        if (!strcmp(preview_value, VALUE_ON)) {
+            m_bTNRPreviewOn = true;
+        } else {
+            m_bTNRPreviewOn = false;
+        }
+        updateParamEntry(KEY_QC_TNR_MODE, preview_value);
+    }
+
+    memset(value, 0, sizeof(value));
+    property_get("persist.camera.tnr.snapshot", value, VALUE_OFF);
+    if (!strcmp(value, VALUE_ON)) {
+        m_bTNRSnapshotOn = true;
+        LOGD("TNR enabled for SNAPSHOT stream");
+    } else {
+        m_bTNRSnapshotOn = false;
+    }
+
+    cam_denoise_param_t temp;
+    memset(&temp, 0, sizeof(temp));
+    if (m_bTNRVideoOn || m_bTNRPreviewOn || m_bTNRSnapshotOn) {
+        temp.denoise_enable = 1;
+        temp.process_plates = getDenoiseProcessPlate(
+                CAM_INTF_PARM_TEMPORAL_DENOISE);
+
+        if (!tnr_cds) {
+            int32_t cds_mode = lookupAttr(CDS_MODES_MAP,
+                    PARAM_MAP_SIZE(CDS_MODES_MAP), CDS_MODE_OFF);
+
+            if (cds_mode != NAME_NOT_FOUND) {
+                updateParamEntry(KEY_QC_VIDEO_CDS_MODE, CDS_MODE_OFF);
+                if (m_bTNRPreviewOn) {
+                    updateParamEntry(KEY_QC_CDS_MODE, CDS_MODE_OFF);
+                }
+                if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+                        CAM_INTF_PARM_CDS_MODE, cds_mode)) {
+                    LOGE("Failed CDS MODE to update table");
+                    return BAD_VALUE;
+                }
+                LOGD("CDS is set to = %s when TNR is enabled",
+                         CDS_MODE_OFF);
+                mCds_mode = cds_mode;
+            } else {
+                LOGE("Invalid argument for video CDS MODE %d",
+                         cds_mode);
+            }
+        } else {
+            LOGH("Enabled TNR with CDS");
+        }
+    }
+
+    if ((m_bTNRVideoOn != prev_video_tnr)
+            || (m_bTNRPreviewOn != prev_preview_tnr)
+            || (prev_snap_tnr != m_bTNRSnapshotOn)) {
+        LOGD("TNR enabled = %d, plates = %d",
+                temp.denoise_enable, temp.process_plates);
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+                CAM_INTF_PARM_TEMPORAL_DENOISE, temp)) {
+            return BAD_VALUE;
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setCameraMode
+ *
+ * DESCRIPTION: set camera mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setCameraMode(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_CAMERA_MODE);
+    if (str != NULL) {
+        set(KEY_QC_CAMERA_MODE, str);
+    } else {
+        remove(KEY_QC_CAMERA_MODE);
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSceneSelectionMode
+ *
+ * DESCRIPTION: set scene selection mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSceneSelectionMode(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_SCENE_SELECTION);
+    const char *prev_str = get(KEY_QC_SCENE_SELECTION);
+    if (NULL != str) {
+        if ((NULL == prev_str) || (strcmp(str, prev_str) != 0)) {
+            int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+                    PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), str);
+            if (value != NAME_NOT_FOUND) {
+                LOGD("Setting selection value %s", str);
+                if (value && m_bZslMode_new) {
+                    updateParamEntry(KEY_QC_SCENE_SELECTION, str);
+                    m_bNeedRestart = true;
+                    m_bSceneSelection = true;
+                } else if (!value) {
+                    updateParamEntry(KEY_QC_SCENE_SELECTION, str);
+                    m_bNeedRestart = true;
+                    m_bSceneSelection = false;
+                } else {
+                    LOGE("Trying to enable scene selection in non ZSL mode!!!");
+                    return BAD_VALUE;
+                }
+            } else {
+                LOGE("Trying to configure invalid scene selection value: %s",
+                        str);
+                return BAD_VALUE;
+            }
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSelectedScene
+ *
+ * DESCRIPTION: select specific scene
+ *
+ * PARAMETERS :
+ *   @scene   : scene mode
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSelectedScene(cam_scene_mode_type scene)
+{
+    Mutex::Autolock l(m_SceneSelectLock);
+    m_SelectedScene = scene;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getSelectedScene
+ *
+ * DESCRIPTION: get selected scene
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : currently selected scene
+ *==========================================================================*/
+cam_scene_mode_type QCameraParameters::getSelectedScene()
+{
+    Mutex::Autolock l(m_SceneSelectLock);
+    return m_SelectedScene;
+}
+
+/*==========================================================
+ * FUNCTION   : setRdiMode
+ *
+ * DESCRIPTION: set Rdi mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *===========================================================*/
+int32_t QCameraParameters::setRdiMode(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_RDI_MODE);
+    const char *prev_str = get(KEY_QC_RDI_MODE);
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+
+    property_get("persist.camera.rdi.mode", prop, VALUE_DISABLE);
+    if ((str != NULL) && (prev_str == NULL || strcmp(str, prev_str) != 0)) {
+        LOGD("RDI mode set to %s", str);
+        setRdiMode(str);
+    } else if (prev_str == NULL || strcmp(prev_str, prop) != 0 ) {
+        LOGD("RDI mode set to prop: %s", prop);
+        setRdiMode(prop);
+    }
+    return NO_ERROR;
+}
+
+/*==========================================================
+ * FUNCTION   : setSecureMode
+ *
+ * DESCRIPTION: set secure mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *===========================================================*/
+
+int32_t QCameraParameters::setSecureMode(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_SECURE_MODE);
+    const char *prev_str = get(KEY_QC_SECURE_MODE);
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+
+    property_get("persist.camera.secure.mode", prop, VALUE_DISABLE);
+    if ((str != NULL) && (prev_str == NULL || strcmp(str, prev_str) != 0)) {
+        LOGD("Secure mode set to KEY: %s", str);
+        setSecureMode(str);
+    } else if (prev_str == NULL || strcmp(prev_str, prop) != 0 ) {
+        LOGD("Secure mode set to prop: %s", prop);
+        setSecureMode(prop);
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setZslAttributes
+ *
+ * DESCRIPTION: set ZSL related attributes from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setZslAttributes(const QCameraParameters& params)
+{
+    // TODO: may switch to pure param instead of sysprop
+    char prop[PROPERTY_VALUE_MAX];
+
+    const char *str = params.get(KEY_QC_ZSL_BURST_INTERVAL);
+    if (str != NULL) {
+        set(KEY_QC_ZSL_BURST_INTERVAL, str);
+    } else {
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.camera.zsl.interval", prop, "1");
+        set(KEY_QC_ZSL_BURST_INTERVAL, prop);
+        LOGH("burst interval: %s", prop);
+    }
+
+    str = params.get(KEY_QC_ZSL_BURST_LOOKBACK);
+    if (str != NULL) {
+        set(KEY_QC_ZSL_BURST_LOOKBACK, str);
+    } else {
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.camera.zsl.backlookcnt", prop, "2");
+        uint32_t look_back_cnt = atoi(prop);
+        if (m_bFrameSyncEnabled) {
+            look_back_cnt += EXTRA_FRAME_SYNC_BUFFERS;
+        }
+        set(KEY_QC_ZSL_BURST_LOOKBACK, look_back_cnt);
+        LOGH("look back count: %s", prop);
+    }
+
+    str = params.get(KEY_QC_ZSL_QUEUE_DEPTH);
+    if (str != NULL) {
+        set(KEY_QC_ZSL_QUEUE_DEPTH, str);
+    } else {
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.camera.zsl.queuedepth", prop, "2");
+        uint32_t queue_depth = atoi(prop);
+        if (m_bFrameSyncEnabled) {
+            queue_depth += EXTRA_FRAME_SYNC_BUFFERS;
+        }
+        set(KEY_QC_ZSL_QUEUE_DEPTH, queue_depth);
+        LOGH("queue depth: %s", prop);
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFlip
+ *
+ * DESCRIPTION: set preview/ video/ picture flip mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFlip(const QCameraParameters& params)
+{
+    if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_FLIP) == 0) {
+        LOGH("flip is not supported.");
+        return NO_ERROR;
+    }
+
+    //check preview flip setting
+    const char *str = params.get(KEY_QC_PREVIEW_FLIP);
+    const char *prev_val = get(KEY_QC_PREVIEW_FLIP);
+    if(str != NULL){
+        if (prev_val == NULL || strcmp(str, prev_val) != 0) {
+            int32_t value = lookupAttr(FLIP_MODES_MAP, PARAM_MAP_SIZE(FLIP_MODES_MAP), str);
+            if(value != NAME_NOT_FOUND){
+                set(KEY_QC_PREVIEW_FLIP, str);
+                m_bPreviewFlipChanged = true;
+            }
+        }
+    }
+
+    // check video filp setting
+    str = params.get(KEY_QC_VIDEO_FLIP);
+    prev_val = get(KEY_QC_VIDEO_FLIP);
+    if(str != NULL){
+        if (prev_val == NULL || strcmp(str, prev_val) != 0) {
+            int32_t value = lookupAttr(FLIP_MODES_MAP, PARAM_MAP_SIZE(FLIP_MODES_MAP), str);
+            if(value != NAME_NOT_FOUND){
+                set(KEY_QC_VIDEO_FLIP, str);
+                m_bVideoFlipChanged = true;
+            }
+        }
+    }
+
+    // check picture filp setting
+    str = params.get(KEY_QC_SNAPSHOT_PICTURE_FLIP);
+    prev_val = get(KEY_QC_SNAPSHOT_PICTURE_FLIP);
+    if(str != NULL){
+        if (prev_val == NULL || strcmp(str, prev_val) != 0) {
+            int32_t value = lookupAttr(FLIP_MODES_MAP, PARAM_MAP_SIZE(FLIP_MODES_MAP), str);
+            if(value != NAME_NOT_FOUND){
+                set(KEY_QC_SNAPSHOT_PICTURE_FLIP, str);
+                m_bSnapshotFlipChanged = true;
+            }
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSnapshotFDReq
+ *
+ * DESCRIPTION: set requirement of Face Detection Metadata in Snapshot mode.
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSnapshotFDReq(const QCameraParameters& params)
+{
+    char prop[PROPERTY_VALUE_MAX];
+    const char *str = params.get(KEY_QC_SNAPSHOT_FD_DATA);
+
+    if(str != NULL){
+        set(KEY_QC_SNAPSHOT_FD_DATA, str);
+    }else{
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.camera.snapshot.fd", prop, "0");
+        set(KEY_QC_SNAPSHOT_FD_DATA, prop);
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setMobicat
+ *
+ * DESCRIPTION: set Mobicat on/off.
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setMobicat(const QCameraParameters& )
+{
+    char value [PROPERTY_VALUE_MAX];
+    property_get("persist.camera.mobicat", value, "0");
+    int32_t ret = NO_ERROR;
+    uint8_t enableMobi = (uint8_t)atoi(value);
+
+    if (enableMobi) {
+        tune_cmd_t tune_cmd;
+        tune_cmd.type = 2;
+        tune_cmd.module = 0;
+        tune_cmd.value = 1;
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_SET_VFE_COMMAND, tune_cmd)) {
+            return BAD_VALUE;
+        }
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_SET_PP_COMMAND, tune_cmd)) {
+            ret = BAD_VALUE;
+        }
+    }
+    m_MobiMask = enableMobi;
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : setLongshotParam
+ *
+ * DESCRIPTION: set Longshot on/off.
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setLongshotParam(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_LONG_SHOT);
+    const char *prev_str = get(KEY_QC_LONG_SHOT);
+
+    if (str != NULL) {
+        if (prev_str == NULL || strcmp(str, prev_str) != 0) {
+            set(KEY_QC_LONG_SHOT, str);
+            if (prev_str && !strcmp(str, "off") && !strcmp(prev_str, "on")) {
+                // We restart here, to reset the FPS and no
+                // of buffers as per the requirement of single snapshot usecase.
+                // Here restart happens when continuous shot is changed to off from on.
+                // In case of continuous shot on, restart is taken care when actual
+                // longshot command is triggered through sendCommand.
+                m_bNeedRestart = true;
+            }
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : checkFeatureConcurrency
+ *
+ * DESCRIPTION: check if there is a feature concurrency issue with advanced
+ *              camera features
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::checkFeatureConcurrency()
+{
+    int32_t rc = NO_ERROR;
+    uint32_t advancedFeatEnableBit = 0;
+
+    if (isStillMoreEnabled()) {
+        advancedFeatEnableBit |= CAM_QCOM_FEATURE_STILLMORE;
+    }
+    if (isHDREnabled()) {
+        advancedFeatEnableBit |= CAM_QCOM_FEATURE_HDR;
+    }
+    if (isChromaFlashEnabled()) {
+        advancedFeatEnableBit |= CAM_QCOM_FEATURE_CHROMA_FLASH;
+    }
+    if (isUbiFocusEnabled()) {
+        advancedFeatEnableBit |= CAM_QCOM_FEATURE_UBIFOCUS;
+    }
+    if (isTruePortraitEnabled()) {
+        advancedFeatEnableBit |= CAM_QCOM_FEATURE_TRUEPORTRAIT;
+    }
+    if (isOptiZoomEnabled()) {
+        advancedFeatEnableBit |= CAM_QCOM_FEATURE_OPTIZOOM;
+    }
+    if (isUbiRefocus()) {
+        advancedFeatEnableBit |= CAM_QCOM_FEATURE_REFOCUS;
+    }
+
+   if (m_bLongshotEnabled && advancedFeatEnableBit) {
+        LOGE("Failed Longshot mode bit 0x%x",
+                    advancedFeatEnableBit);
+        rc = BAD_TYPE;
+        return rc;
+    }
+
+    if(m_bRecordingHint_new) {
+        advancedFeatEnableBit &= ~CAM_QCOM_FEATURE_STILLMORE;
+
+        if (advancedFeatEnableBit) {
+            LOGE("Failed recording mode bit 0x%x",
+                    advancedFeatEnableBit);
+            rc = BAD_TYPE;
+        }
+    } else if (m_bZslMode_new) {
+        /* ZSL mode check if 2 bits are set */
+        if (advancedFeatEnableBit & (advancedFeatEnableBit - 1)) {
+            LOGE("Failed ZSL mode bit 0x%x", advancedFeatEnableBit);
+            rc = BAD_TYPE;
+        }
+    } else { /* non-ZSL mode */
+        advancedFeatEnableBit &= ~CAM_QCOM_FEATURE_HDR;
+
+        /* non-ZSL mode check if 1 bit is set */
+        if (advancedFeatEnableBit) {
+            LOGE("Failed non-ZSL mode bit 0x%x", advancedFeatEnableBit);
+            rc = BAD_TYPE;
+        }
+    }
+    LOGI("Advance feature enabled 0x%x", advancedFeatEnableBit);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : updateParameters
+ *
+ * DESCRIPTION: update parameters from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *   @needRestart : [output] if preview need restart upon setting changes
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateParameters(const String8& p,
+        bool &needRestart)
+{
+    int32_t final_rc = NO_ERROR;
+    int32_t rc;
+    m_bNeedRestart = false;
+    QCameraParameters params(p);
+
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        LOGE("Failed to initialize group update table");
+        rc = BAD_TYPE;
+        goto UPDATE_PARAM_DONE;
+    }
+
+    if ((rc = setPreviewSize(params)))                  final_rc = rc;
+    if ((rc = setVideoSize(params)))                    final_rc = rc;
+    if ((rc = setPictureSize(params)))                  final_rc = rc;
+    if ((rc = setPreviewFormat(params)))                final_rc = rc;
+    if ((rc = setPictureFormat(params)))                final_rc = rc;
+    if ((rc = setJpegQuality(params)))                  final_rc = rc;
+    if ((rc = setOrientation(params)))                  final_rc = rc;
+    if ((rc = setRotation(params)))                     final_rc = rc;
+    if ((rc = setVideoRotation(params)))                final_rc = rc;
+    if ((rc = setNoDisplayMode(params)))                final_rc = rc;
+    if ((rc = setZslMode(params)))                      final_rc = rc;
+    if ((rc = setZslAttributes(params)))                final_rc = rc;
+    if ((rc = setCameraMode(params)))                   final_rc = rc;
+    if ((rc = setSceneSelectionMode(params)))           final_rc = rc;
+    if ((rc = setRecordingHint(params)))                final_rc = rc;
+    if ((rc = setRdiMode(params)))                      final_rc = rc;
+    if ((rc = setSecureMode(params)))                   final_rc = rc;
+    if ((rc = setPreviewFrameRate(params)))             final_rc = rc;
+    if ((rc = setPreviewFpsRange(params)))              final_rc = rc;
+    if ((rc = setAutoExposure(params)))                 final_rc = rc;
+    if ((rc = setEffect(params)))                       final_rc = rc;
+    if ((rc = setBrightness(params)))                   final_rc = rc;
+    if ((rc = setZoom(params)))                         final_rc = rc;
+    if ((rc = setSharpness(params)))                    final_rc = rc;
+    if ((rc = setSaturation(params)))                   final_rc = rc;
+    if ((rc = setContrast(params)))                     final_rc = rc;
+    if ((rc = setFocusMode(params)))                    final_rc = rc;
+    if ((rc = setISOValue(params)))                     final_rc = rc;
+    if ((rc = setContinuousISO(params)))                final_rc = rc;
+    if ((rc = setExposureTime(params)))                 final_rc = rc;
+    if ((rc = setSkinToneEnhancement(params)))          final_rc = rc;
+    if ((rc = setFlash(params)))                        final_rc = rc;
+    if ((rc = setAecLock(params)))                      final_rc = rc;
+    if ((rc = setAwbLock(params)))                      final_rc = rc;
+    if ((rc = setLensShadeValue(params)))               final_rc = rc;
+    if ((rc = setMCEValue(params)))                     final_rc = rc;
+    if ((rc = setDISValue(params)))                     final_rc = rc;
+    if ((rc = setAntibanding(params)))                  final_rc = rc;
+    if ((rc = setExposureCompensation(params)))         final_rc = rc;
+    if ((rc = setWhiteBalance(params)))                 final_rc = rc;
+    if ((rc = setHDRMode(params)))                      final_rc = rc;
+    if ((rc = setHDRNeed1x(params)))                    final_rc = rc;
+    if ((rc = setManualWhiteBalance(params)))           final_rc = rc;
+    if ((rc = setSceneMode(params)))                    final_rc = rc;
+    if ((rc = setFocusAreas(params)))                   final_rc = rc;
+    if ((rc = setFocusPosition(params)))                final_rc = rc;
+    if ((rc = setMeteringAreas(params)))                final_rc = rc;
+    if ((rc = setSelectableZoneAf(params)))             final_rc = rc;
+    if ((rc = setRedeyeReduction(params)))              final_rc = rc;
+    if ((rc = setAEBracket(params)))                    final_rc = rc;
+    if ((rc = setAutoHDR(params)))                      final_rc = rc;
+    if ((rc = setGpsLocation(params)))                  final_rc = rc;
+    if ((rc = setWaveletDenoise(params)))               final_rc = rc;
+    if ((rc = setFaceRecognition(params)))              final_rc = rc;
+    if ((rc = setFlip(params)))                         final_rc = rc;
+    if ((rc = setVideoHDR(params)))                     final_rc = rc;
+    if ((rc = setVtEnable(params)))                     final_rc = rc;
+    if ((rc = setAFBracket(params)))                    final_rc = rc;
+    if ((rc = setReFocus(params)))                      final_rc = rc;
+    if ((rc = setChromaFlash(params)))                  final_rc = rc;
+    if ((rc = setTruePortrait(params)))                 final_rc = rc;
+    if ((rc = setOptiZoom(params)))                     final_rc = rc;
+    if ((rc = setBurstLEDOnPeriod(params)))             final_rc = rc;
+    if ((rc = setRetroActiveBurstNum(params)))          final_rc = rc;
+    if ((rc = setSnapshotFDReq(params)))                final_rc = rc;
+    if ((rc = setTintlessValue(params)))                final_rc = rc;
+    if ((rc = setCDSMode(params)))                      final_rc = rc;
+    if ((rc = setTemporalDenoise(params)))              final_rc = rc;
+    if ((rc = setCacheVideoBuffers(params)))            final_rc = rc;
+    if ((rc = setInitialExposureIndex(params)))         final_rc = rc;
+    if ((rc = setInstantCapture(params)))               final_rc = rc;
+    if ((rc = setInstantAEC(params)))                   final_rc = rc;
+
+    // update live snapshot size after all other parameters are set
+    if ((rc = setLiveSnapshotSize(params)))             final_rc = rc;
+    if ((rc = setJpegThumbnailSize(params)))            final_rc = rc;
+    if ((rc = setStatsDebugMask()))                     final_rc = rc;
+    if ((rc = setPAAF()))                               final_rc = rc;
+    if ((rc = setMobicat(params)))                      final_rc = rc;
+    if ((rc = setSeeMore(params)))                      final_rc = rc;
+    if ((rc = setStillMore(params)))                    final_rc = rc;
+    if ((rc = setCustomParams(params)))                 final_rc = rc;
+    if ((rc = setNoiseReductionMode(params)))           final_rc = rc;
+
+    if ((rc = setLongshotParam(params)))                final_rc = rc;
+    if ((rc = setDualLedCalibration(params)))           final_rc = rc;
+
+    setVideoBatchSize();
+    setLowLightCapture();
+
+    if ((rc = updateFlash(false)))                      final_rc = rc;
+
+#ifdef TARGET_TS_MAKEUP
+    if (params.get(KEY_TS_MAKEUP) != NULL) {
+        set(KEY_TS_MAKEUP,params.get(KEY_TS_MAKEUP));
+        final_rc = rc;
+    }
+    if (params.get(KEY_TS_MAKEUP_WHITEN) != NULL) {
+        set(KEY_TS_MAKEUP_WHITEN,params.get(KEY_TS_MAKEUP_WHITEN));
+        final_rc = rc;
+    }
+    if (params.get(KEY_TS_MAKEUP_CLEAN) != NULL) {
+        set(KEY_TS_MAKEUP_CLEAN,params.get(KEY_TS_MAKEUP_CLEAN));
+        final_rc = rc;
+    }
+#endif
+
+    if ((rc = setAdvancedCaptureMode()))                final_rc = rc;
+UPDATE_PARAM_DONE:
+    needRestart = m_bNeedRestart;
+    return final_rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : commitParameters
+ *
+ * DESCRIPTION: commit parameter changes to backend
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::commitParameters()
+{
+    return commitSetBatch();
+}
+
+/*===========================================================================
+ * FUNCTION   : initDefaultParameters
+ *
+ * DESCRIPTION: initialize default parameters for the first time
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::initDefaultParameters()
+{
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        LOGE("Failed to initialize group update table");
+        return BAD_TYPE;
+    }
+    int32_t hal_version = CAM_HAL_V1;
+    ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_HAL_VERSION, hal_version);
+
+    /*************************Initialize Values******************************/
+    // Set read only parameters from camera capability
+    set(KEY_SMOOTH_ZOOM_SUPPORTED,
+        m_pCapability->smooth_zoom_supported? VALUE_TRUE : VALUE_FALSE);
+    set(KEY_ZOOM_SUPPORTED,
+        m_pCapability->zoom_supported? VALUE_TRUE : VALUE_FALSE);
+    set(KEY_VIDEO_SNAPSHOT_SUPPORTED,
+        m_pCapability->video_snapshot_supported? VALUE_TRUE : VALUE_FALSE);
+    set(KEY_VIDEO_STABILIZATION_SUPPORTED,
+        m_pCapability->video_stablization_supported? VALUE_TRUE : VALUE_FALSE);
+    set(KEY_AUTO_EXPOSURE_LOCK_SUPPORTED,
+        m_pCapability->auto_exposure_lock_supported? VALUE_TRUE : VALUE_FALSE);
+    set(KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED,
+        m_pCapability->auto_wb_lock_supported? VALUE_TRUE : VALUE_FALSE);
+    set(KEY_MAX_NUM_DETECTED_FACES_HW, m_pCapability->max_num_roi);
+    set(KEY_MAX_NUM_DETECTED_FACES_SW, m_pCapability->max_num_roi);
+    set(KEY_QC_MAX_NUM_REQUESTED_FACES, m_pCapability->max_num_roi);
+    // Set focal length, horizontal view angle, and vertical view angle
+    setFloat(KEY_FOCAL_LENGTH, m_pCapability->focal_length);
+    setFloat(KEY_HORIZONTAL_VIEW_ANGLE, m_pCapability->hor_view_angle);
+    setFloat(KEY_VERTICAL_VIEW_ANGLE, m_pCapability->ver_view_angle);
+    set(QCameraParameters::KEY_FOCUS_DISTANCES, "Infinity,Infinity,Infinity");
+    set(KEY_QC_AUTO_HDR_SUPPORTED,
+        (m_pCapability->auto_hdr_supported)? VALUE_TRUE : VALUE_FALSE);
+    // Set supported preview sizes
+    if (m_pCapability->preview_sizes_tbl_cnt > 0 &&
+        m_pCapability->preview_sizes_tbl_cnt <= MAX_SIZES_CNT) {
+        String8 previewSizeValues = createSizesString(
+                m_pCapability->preview_sizes_tbl, m_pCapability->preview_sizes_tbl_cnt);
+        set(KEY_SUPPORTED_PREVIEW_SIZES, previewSizeValues.string());
+        LOGH("supported preview sizes: %s", previewSizeValues.string());
+        // Set default preview size
+        CameraParameters::setPreviewSize(m_pCapability->preview_sizes_tbl[0].width,
+                                         m_pCapability->preview_sizes_tbl[0].height);
+    } else {
+        LOGW("supported preview sizes cnt is 0 or exceeds max!!!");
+    }
+
+    // Set supported video sizes
+    if (m_pCapability->video_sizes_tbl_cnt > 0 &&
+        m_pCapability->video_sizes_tbl_cnt <= MAX_SIZES_CNT) {
+        String8 videoSizeValues = createSizesString(
+                m_pCapability->video_sizes_tbl, m_pCapability->video_sizes_tbl_cnt);
+        set(KEY_SUPPORTED_VIDEO_SIZES, videoSizeValues.string());
+        LOGH("supported video sizes: %s", videoSizeValues.string());
+        // Set default video size
+        CameraParameters::setVideoSize(m_pCapability->video_sizes_tbl[0].width,
+                                       m_pCapability->video_sizes_tbl[0].height);
+
+        //Set preferred Preview size for video
+        String8 vSize = createSizesString(&m_pCapability->preview_sizes_tbl[0], 1);
+        set(KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, vSize.string());
+    } else {
+        LOGW("supported video sizes cnt is 0 or exceeds max!!!");
+    }
+
+    // Set supported picture sizes
+    if (m_pCapability->picture_sizes_tbl_cnt > 0 &&
+        m_pCapability->picture_sizes_tbl_cnt <= MAX_SIZES_CNT) {
+        String8 pictureSizeValues = createSizesString(
+                m_pCapability->picture_sizes_tbl, m_pCapability->picture_sizes_tbl_cnt);
+        set(KEY_SUPPORTED_PICTURE_SIZES, pictureSizeValues.string());
+        LOGH("supported pic sizes: %s", pictureSizeValues.string());
+        // Set default picture size to the smallest resolution
+        CameraParameters::setPictureSize(
+           m_pCapability->picture_sizes_tbl[m_pCapability->picture_sizes_tbl_cnt-1].width,
+           m_pCapability->picture_sizes_tbl[m_pCapability->picture_sizes_tbl_cnt-1].height);
+    } else {
+        LOGW("supported picture sizes cnt is 0 or exceeds max!!!");
+    }
+
+    // Need check if scale should be enabled
+    if (m_pCapability->scale_picture_sizes_cnt > 0 &&
+        m_pCapability->scale_picture_sizes_cnt <= MAX_SCALE_SIZES_CNT){
+        //get scale size, enable scaling. And re-set picture size table with scale sizes
+        m_reprocScaleParam.setScaleEnable(true);
+        int rc_s = m_reprocScaleParam.setScaleSizeTbl(
+            m_pCapability->scale_picture_sizes_cnt, m_pCapability->scale_picture_sizes,
+            m_pCapability->picture_sizes_tbl_cnt, m_pCapability->picture_sizes_tbl);
+        if(rc_s == NO_ERROR){
+            cam_dimension_t *totalSizeTbl = m_reprocScaleParam.getTotalSizeTbl();
+            size_t totalSizeCnt = m_reprocScaleParam.getTotalSizeTblCnt();
+            String8 pictureSizeValues = createSizesString(totalSizeTbl, totalSizeCnt);
+            set(KEY_SUPPORTED_PICTURE_SIZES, pictureSizeValues.string());
+            LOGH("scaled supported pic sizes: %s", pictureSizeValues.string());
+        }else{
+            m_reprocScaleParam.setScaleEnable(false);
+            LOGW("reset scaled picture size table failed.");
+        }
+    }else{
+        m_reprocScaleParam.setScaleEnable(false);
+    }
+
+    // Set supported thumbnail sizes
+    String8 thumbnailSizeValues = createSizesString(
+            THUMBNAIL_SIZES_MAP,
+            PARAM_MAP_SIZE(THUMBNAIL_SIZES_MAP));
+    set(KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, thumbnailSizeValues.string());
+    // Set default thumnail size
+    set(KEY_JPEG_THUMBNAIL_WIDTH, THUMBNAIL_SIZES_MAP[0].width);
+    set(KEY_JPEG_THUMBNAIL_HEIGHT, THUMBNAIL_SIZES_MAP[0].height);
+
+    // Set supported livesnapshot sizes
+    if (m_pCapability->livesnapshot_sizes_tbl_cnt > 0 &&
+        m_pCapability->livesnapshot_sizes_tbl_cnt <= MAX_SIZES_CNT) {
+        String8 liveSnpashotSizeValues = createSizesString(
+                m_pCapability->livesnapshot_sizes_tbl,
+                m_pCapability->livesnapshot_sizes_tbl_cnt);
+        set(KEY_QC_SUPPORTED_LIVESNAPSHOT_SIZES, liveSnpashotSizeValues.string());
+        LOGD("supported live snapshot sizes: %s", liveSnpashotSizeValues.string());
+        m_LiveSnapshotSize =
+            m_pCapability->livesnapshot_sizes_tbl[m_pCapability->livesnapshot_sizes_tbl_cnt-1];
+    }
+
+    // Set supported preview formats
+    String8 previewFormatValues = createValuesString(
+            m_pCapability->supported_preview_fmts,
+            m_pCapability->supported_preview_fmt_cnt,
+            PREVIEW_FORMATS_MAP,
+            PARAM_MAP_SIZE(PREVIEW_FORMATS_MAP));
+    set(KEY_SUPPORTED_PREVIEW_FORMATS, previewFormatValues.string());
+    // Set default preview format
+    CameraParameters::setPreviewFormat(PIXEL_FORMAT_YUV420SP);
+
+    // Set default Video Format as OPAQUE
+    // Internally both Video and Camera subsystems use NV21_VENUS
+    set(KEY_VIDEO_FRAME_FORMAT, PIXEL_FORMAT_ANDROID_OPAQUE);
+
+    // Set supported picture formats
+    String8 pictureTypeValues(PIXEL_FORMAT_JPEG);
+    String8 str = createValuesString(
+            m_pCapability->supported_raw_fmts,
+            m_pCapability->supported_raw_fmt_cnt,
+            PICTURE_TYPES_MAP,
+            PARAM_MAP_SIZE(PICTURE_TYPES_MAP));
+    if (str.string() != NULL) {
+        pictureTypeValues.append(",");
+        pictureTypeValues.append(str);
+    }
+
+    set(KEY_SUPPORTED_PICTURE_FORMATS, pictureTypeValues.string());
+    // Set default picture Format
+    CameraParameters::setPictureFormat(PIXEL_FORMAT_JPEG);
+    // Set raw image size
+    char raw_size_str[32];
+    snprintf(raw_size_str, sizeof(raw_size_str), "%dx%d",
+             m_pCapability->raw_dim[0].width, m_pCapability->raw_dim[0].height);
+    set(KEY_QC_RAW_PICUTRE_SIZE, raw_size_str);
+    LOGD("KEY_QC_RAW_PICUTRE_SIZE: w: %d, h: %d ",
+       m_pCapability->raw_dim[0].width, m_pCapability->raw_dim[0].height);
+
+    //set default jpeg quality and thumbnail quality
+    set(KEY_JPEG_QUALITY, 85);
+    set(KEY_JPEG_THUMBNAIL_QUALITY, 85);
+
+    // Set FPS ranges
+    if (m_pCapability->fps_ranges_tbl_cnt > 0 &&
+        m_pCapability->fps_ranges_tbl_cnt <= MAX_SIZES_CNT) {
+        int default_fps_index = 0;
+        String8 fpsRangeValues = createFpsRangeString(m_pCapability->fps_ranges_tbl,
+                                                      m_pCapability->fps_ranges_tbl_cnt,
+                                                      default_fps_index);
+        set(KEY_SUPPORTED_PREVIEW_FPS_RANGE, fpsRangeValues.string());
+
+        int min_fps =
+            int(m_pCapability->fps_ranges_tbl[default_fps_index].min_fps * 1000);
+        int max_fps =
+            int(m_pCapability->fps_ranges_tbl[default_fps_index].max_fps * 1000);
+        m_default_fps_range = m_pCapability->fps_ranges_tbl[default_fps_index];
+        //Set video fps same as preview fps
+        setPreviewFpsRange(min_fps, max_fps, min_fps, max_fps);
+
+        // Set legacy preview fps
+        String8 fpsValues = createFpsString(m_pCapability->fps_ranges_tbl[default_fps_index]);
+        set(KEY_SUPPORTED_PREVIEW_FRAME_RATES, fpsValues.string());
+        LOGH("supported fps rates: %s", fpsValues.string());
+        CameraParameters::setPreviewFrameRate(int(m_pCapability->fps_ranges_tbl[default_fps_index].max_fps));
+    } else {
+        LOGW("supported fps ranges cnt is 0 or exceeds max!!!");
+    }
+
+    // Set supported focus modes
+    if (m_pCapability->supported_focus_modes_cnt > 0) {
+        String8 focusModeValues = createValuesString(
+                m_pCapability->supported_focus_modes,
+                m_pCapability->supported_focus_modes_cnt,
+                FOCUS_MODES_MAP,
+                PARAM_MAP_SIZE(FOCUS_MODES_MAP));
+        set(KEY_SUPPORTED_FOCUS_MODES, focusModeValues);
+
+        // Set default focus mode and update corresponding parameter buf
+        const char *focusMode = lookupNameByValue(FOCUS_MODES_MAP,
+                PARAM_MAP_SIZE(FOCUS_MODES_MAP),
+                m_pCapability->supported_focus_modes[0]);
+        if (focusMode != NULL) {
+            setFocusMode(focusMode);
+        } else {
+            setFocusMode(FOCUS_MODE_FIXED);
+        }
+    } else {
+        LOGW("supported focus modes cnt is 0!!!");
+    }
+
+    // Set focus areas
+    if (m_pCapability->max_num_focus_areas > MAX_ROI) {
+        m_pCapability->max_num_focus_areas = MAX_ROI;
+    }
+    set(KEY_MAX_NUM_FOCUS_AREAS, m_pCapability->max_num_focus_areas);
+    if (m_pCapability->max_num_focus_areas > 0) {
+        setFocusAreas(DEFAULT_CAMERA_AREA);
+    }
+
+    // Set metering areas
+    if (m_pCapability->max_num_metering_areas > MAX_ROI) {
+        m_pCapability->max_num_metering_areas = MAX_ROI;
+    }
+    set(KEY_MAX_NUM_METERING_AREAS, m_pCapability->max_num_metering_areas);
+    if (m_pCapability->max_num_metering_areas > 0) {
+        setMeteringAreas(DEFAULT_CAMERA_AREA);
+    }
+
+    // set focus position, we should get them from m_pCapability
+    m_pCapability->min_focus_pos[CAM_MANUAL_FOCUS_MODE_INDEX] = 0;
+    m_pCapability->max_focus_pos[CAM_MANUAL_FOCUS_MODE_INDEX] = 1023;
+    set(KEY_QC_MIN_FOCUS_POS_INDEX,
+            (int) m_pCapability->min_focus_pos[CAM_MANUAL_FOCUS_MODE_INDEX]);
+    set(KEY_QC_MAX_FOCUS_POS_INDEX,
+            (int) m_pCapability->max_focus_pos[CAM_MANUAL_FOCUS_MODE_INDEX]);
+
+    m_pCapability->min_focus_pos[CAM_MANUAL_FOCUS_MODE_DAC_CODE] = 0;
+    m_pCapability->max_focus_pos[CAM_MANUAL_FOCUS_MODE_DAC_CODE] = 1023;
+    set(KEY_QC_MIN_FOCUS_POS_DAC,
+            (int) m_pCapability->min_focus_pos[CAM_MANUAL_FOCUS_MODE_DAC_CODE]);
+    set(KEY_QC_MAX_FOCUS_POS_DAC,
+            (int) m_pCapability->max_focus_pos[CAM_MANUAL_FOCUS_MODE_DAC_CODE]);
+
+    m_pCapability->min_focus_pos[CAM_MANUAL_FOCUS_MODE_RATIO] = 0;
+    m_pCapability->max_focus_pos[CAM_MANUAL_FOCUS_MODE_RATIO] = 100;
+    set(KEY_QC_MIN_FOCUS_POS_RATIO,
+            (int) m_pCapability->min_focus_pos[CAM_MANUAL_FOCUS_MODE_RATIO]);
+    set(KEY_QC_MAX_FOCUS_POS_RATIO,
+            (int) m_pCapability->max_focus_pos[CAM_MANUAL_FOCUS_MODE_RATIO]);
+
+    m_pCapability->min_focus_pos[CAM_MANUAL_FOCUS_MODE_DIOPTER] = 0;
+    if (m_pCapability->min_focus_distance > 0) {
+        m_pCapability->max_focus_pos[CAM_MANUAL_FOCUS_MODE_DIOPTER] =
+                m_pCapability->min_focus_distance;
+    } else {
+        m_pCapability->max_focus_pos[CAM_MANUAL_FOCUS_MODE_DIOPTER] = 0;
+    }
+    setFloat(KEY_QC_MIN_FOCUS_POS_DIOPTER,
+            m_pCapability->min_focus_pos[CAM_MANUAL_FOCUS_MODE_DIOPTER]);
+    setFloat(KEY_QC_MAX_FOCUS_POS_DIOPTER,
+            m_pCapability->max_focus_pos[CAM_MANUAL_FOCUS_MODE_DIOPTER]);
+
+    //set supported manual focus modes
+    String8 manualFocusModes(VALUE_OFF);
+    if (m_pCapability->supported_focus_modes_cnt > 1 &&
+        m_pCapability->min_focus_distance > 0) {
+        manualFocusModes.append(",");
+        manualFocusModes.append(KEY_QC_FOCUS_SCALE_MODE);
+        manualFocusModes.append(",");
+        manualFocusModes.append(KEY_QC_FOCUS_DIOPTER_MODE);
+    }
+    set(KEY_QC_SUPPORTED_MANUAL_FOCUS_MODES, manualFocusModes.string());
+
+    // Set Saturation
+    set(KEY_QC_MIN_SATURATION, m_pCapability->saturation_ctrl.min_value);
+    set(KEY_QC_MAX_SATURATION, m_pCapability->saturation_ctrl.max_value);
+    set(KEY_QC_SATURATION_STEP, m_pCapability->saturation_ctrl.step);
+    setSaturation(m_pCapability->saturation_ctrl.def_value);
+
+    // Set Sharpness
+    set(KEY_QC_MIN_SHARPNESS, m_pCapability->sharpness_ctrl.min_value);
+    set(KEY_QC_MAX_SHARPNESS, m_pCapability->sharpness_ctrl.max_value);
+    set(KEY_QC_SHARPNESS_STEP, m_pCapability->sharpness_ctrl.step);
+    setSharpness(m_pCapability->sharpness_ctrl.def_value);
+
+    // Set Contrast
+    set(KEY_QC_MIN_CONTRAST, m_pCapability->contrast_ctrl.min_value);
+    set(KEY_QC_MAX_CONTRAST, m_pCapability->contrast_ctrl.max_value);
+    set(KEY_QC_CONTRAST_STEP, m_pCapability->contrast_ctrl.step);
+    setContrast(m_pCapability->contrast_ctrl.def_value);
+
+    // Set SCE factor
+    set(KEY_QC_MIN_SCE_FACTOR, m_pCapability->sce_ctrl.min_value); // -100
+    set(KEY_QC_MAX_SCE_FACTOR, m_pCapability->sce_ctrl.max_value); // 100
+    set(KEY_QC_SCE_FACTOR_STEP, m_pCapability->sce_ctrl.step);     // 10
+    setSkinToneEnhancement(m_pCapability->sce_ctrl.def_value);     // 0
+
+    // Set Brightness
+    set(KEY_QC_MIN_BRIGHTNESS, m_pCapability->brightness_ctrl.min_value); // 0
+    set(KEY_QC_MAX_BRIGHTNESS, m_pCapability->brightness_ctrl.max_value); // 6
+    set(KEY_QC_BRIGHTNESS_STEP, m_pCapability->brightness_ctrl.step);     // 1
+    setBrightness(m_pCapability->brightness_ctrl.def_value);
+
+    // Set Auto exposure
+    String8 autoExposureValues = createValuesString(
+            m_pCapability->supported_aec_modes,
+            m_pCapability->supported_aec_modes_cnt,
+            AUTO_EXPOSURE_MAP,
+            PARAM_MAP_SIZE(AUTO_EXPOSURE_MAP));
+    set(KEY_QC_SUPPORTED_AUTO_EXPOSURE, autoExposureValues.string());
+    setAutoExposure(AUTO_EXPOSURE_FRAME_AVG);
+
+    // Set Exposure Compensation
+    set(KEY_MAX_EXPOSURE_COMPENSATION, m_pCapability->exposure_compensation_max); // 12
+    set(KEY_MIN_EXPOSURE_COMPENSATION, m_pCapability->exposure_compensation_min); // -12
+    setFloat(KEY_EXPOSURE_COMPENSATION_STEP, m_pCapability->exposure_compensation_step); // 1/6
+    setExposureCompensation(m_pCapability->exposure_compensation_default); // 0
+
+    // Set Instant AEC modes
+    String8 instantAECModes = createValuesString(
+            m_pCapability->supported_instant_aec_modes,
+            m_pCapability->supported_instant_aec_modes_cnt,
+            INSTANT_AEC_MODES_MAP,
+            PARAM_MAP_SIZE(INSTANT_AEC_MODES_MAP));
+    set(KEY_QC_INSTANT_AEC_SUPPORTED_MODES, instantAECModes.string());
+
+    // Set Instant Capture modes
+    String8 instantCaptureModes = createValuesString(
+            m_pCapability->supported_instant_aec_modes,
+            m_pCapability->supported_instant_aec_modes_cnt,
+            INSTANT_CAPTURE_MODES_MAP,
+            PARAM_MAP_SIZE(INSTANT_CAPTURE_MODES_MAP));
+    set(KEY_QC_INSTANT_CAPTURE_SUPPORTED_MODES, instantCaptureModes.string());
+
+
+    // Set Antibanding
+    String8 antibandingValues = createValuesString(
+            m_pCapability->supported_antibandings,
+            m_pCapability->supported_antibandings_cnt,
+            ANTIBANDING_MODES_MAP,
+            PARAM_MAP_SIZE(ANTIBANDING_MODES_MAP));
+    set(KEY_SUPPORTED_ANTIBANDING, antibandingValues);
+    setAntibanding(ANTIBANDING_OFF);
+
+    // Set Effect
+    String8 effectValues = createValuesString(
+            m_pCapability->supported_effects,
+            m_pCapability->supported_effects_cnt,
+            EFFECT_MODES_MAP,
+            PARAM_MAP_SIZE(EFFECT_MODES_MAP));
+
+    if (m_pCapability->supported_effects_cnt > 0) {
+        set(KEY_SUPPORTED_EFFECTS, effectValues);
+    } else {
+        LOGW("Color effects are not available");
+        set(KEY_SUPPORTED_EFFECTS, EFFECT_NONE);
+    }
+    setEffect(EFFECT_NONE);
+
+    // Set WhiteBalance
+    String8 whitebalanceValues = createValuesString(
+            m_pCapability->supported_white_balances,
+            m_pCapability->supported_white_balances_cnt,
+            WHITE_BALANCE_MODES_MAP,
+            PARAM_MAP_SIZE(WHITE_BALANCE_MODES_MAP));
+    set(KEY_SUPPORTED_WHITE_BALANCE, whitebalanceValues);
+    setWhiteBalance(WHITE_BALANCE_AUTO);
+
+    // set supported wb cct, we should get them from m_pCapability
+    m_pCapability->min_wb_cct = 2000;
+    m_pCapability->max_wb_cct = 8000;
+    set(KEY_QC_MIN_WB_CCT, m_pCapability->min_wb_cct);
+    set(KEY_QC_MAX_WB_CCT, m_pCapability->max_wb_cct);
+
+    // set supported wb rgb gains, ideally we should get them from m_pCapability
+    //but for now hardcode.
+    m_pCapability->min_wb_gain = 1.0;
+    m_pCapability->max_wb_gain = 4.0;
+    setFloat(KEY_QC_MIN_WB_GAIN, m_pCapability->min_wb_gain);
+    setFloat(KEY_QC_MAX_WB_GAIN, m_pCapability->max_wb_gain);
+
+    //set supported manual wb modes
+    String8 manualWBModes(VALUE_OFF);
+    if(m_pCapability->sensor_type.sens_type != CAM_SENSOR_YUV) {
+        manualWBModes.append(",");
+        manualWBModes.append(KEY_QC_WB_CCT_MODE);
+        manualWBModes.append(",");
+        manualWBModes.append(KEY_QC_WB_GAIN_MODE);
+    }
+    set(KEY_QC_SUPPORTED_MANUAL_WB_MODES, manualWBModes.string());
+
+    // Set Flash mode
+    if(m_pCapability->supported_flash_modes_cnt > 0) {
+       String8 flashValues = createValuesString(
+               m_pCapability->supported_flash_modes,
+               m_pCapability->supported_flash_modes_cnt,
+               FLASH_MODES_MAP,
+               PARAM_MAP_SIZE(FLASH_MODES_MAP));
+       set(KEY_SUPPORTED_FLASH_MODES, flashValues);
+       setFlash(FLASH_MODE_OFF);
+    } else {
+        LOGW("supported flash modes cnt is 0!!!");
+    }
+
+    // Set Scene Mode
+    String8 sceneModeValues = createValuesString(
+            m_pCapability->supported_scene_modes,
+            m_pCapability->supported_scene_modes_cnt,
+            SCENE_MODES_MAP,
+            PARAM_MAP_SIZE(SCENE_MODES_MAP));
+    set(KEY_SUPPORTED_SCENE_MODES, sceneModeValues);
+    setSceneMode(SCENE_MODE_AUTO);
+
+    // Set CDS Mode
+    String8 cdsModeValues = createValuesStringFromMap(
+            CDS_MODES_MAP,
+            PARAM_MAP_SIZE(CDS_MODES_MAP));
+    set(KEY_QC_SUPPORTED_CDS_MODES, cdsModeValues);
+
+    // Set video CDS Mode
+    String8 videoCdsModeValues = createValuesStringFromMap(
+            CDS_MODES_MAP,
+            PARAM_MAP_SIZE(CDS_MODES_MAP));
+    set(KEY_QC_SUPPORTED_VIDEO_CDS_MODES, videoCdsModeValues);
+
+    // Set TNR Mode
+    String8 tnrModeValues = createValuesStringFromMap(
+            ON_OFF_MODES_MAP,
+            PARAM_MAP_SIZE(ON_OFF_MODES_MAP));
+    set(KEY_QC_SUPPORTED_TNR_MODES, tnrModeValues);
+
+    // Set video TNR Mode
+    String8 videoTnrModeValues = createValuesStringFromMap(
+            ON_OFF_MODES_MAP,
+            PARAM_MAP_SIZE(ON_OFF_MODES_MAP));
+    set(KEY_QC_SUPPORTED_VIDEO_TNR_MODES, videoTnrModeValues);
+
+    // Set ISO Mode
+    String8 isoValues = createValuesString(
+            m_pCapability->supported_iso_modes,
+            m_pCapability->supported_iso_modes_cnt,
+            ISO_MODES_MAP,
+            PARAM_MAP_SIZE(ISO_MODES_MAP));
+    set(KEY_QC_SUPPORTED_ISO_MODES, isoValues);
+    setISOValue(ISO_AUTO);
+
+    // Set exposure time
+    String8 manualExpModes(VALUE_OFF);
+    bool expTimeSupported = false;
+    bool manualISOSupported = false;
+    //capability values are in nano sec, convert to milli sec for upper layers
+    char expTimeStr[20];
+    double min_exp_time = (double) m_pCapability->exposure_time_range[0] / 1000000.0;
+    double max_exp_time = (double) m_pCapability->exposure_time_range[1] / 1000000.0;
+    snprintf(expTimeStr, sizeof(expTimeStr), "%f", min_exp_time);
+    set(KEY_QC_MIN_EXPOSURE_TIME, expTimeStr);
+    snprintf(expTimeStr, sizeof(expTimeStr), "%f", max_exp_time);
+    set(KEY_QC_MAX_EXPOSURE_TIME, expTimeStr);
+    if ((min_exp_time > 0) && (max_exp_time > min_exp_time)) {
+        manualExpModes.append(",");
+        manualExpModes.append(KEY_QC_EXP_TIME_PRIORITY);
+        expTimeSupported = true;
+    }
+    LOGH(", Exposure time min %f ms, max %f ms",
+            min_exp_time, max_exp_time);
+
+    // Set iso
+    set(KEY_QC_MIN_ISO, m_pCapability->sensitivity_range.min_sensitivity);
+    set(KEY_QC_MAX_ISO, m_pCapability->sensitivity_range.max_sensitivity);
+    LOGH(", ISO min %d, max %d",
+            m_pCapability->sensitivity_range.min_sensitivity,
+            m_pCapability->sensitivity_range.max_sensitivity);
+    if ((m_pCapability->sensitivity_range.min_sensitivity > 0) &&
+            (m_pCapability->sensitivity_range.max_sensitivity >
+                    m_pCapability->sensitivity_range.min_sensitivity)) {
+        manualExpModes.append(",");
+        manualExpModes.append(KEY_QC_ISO_PRIORITY);
+        manualISOSupported = true;
+    }
+    if (expTimeSupported && manualISOSupported) {
+        manualExpModes.append(",");
+        manualExpModes.append(KEY_QC_USER_SETTING);
+    }
+    //finally set supported manual exposure modes
+    set(KEY_QC_SUPPORTED_MANUAL_EXPOSURE_MODES, manualExpModes.string());
+
+    // Set HFR
+    String8 hfrValues = createHfrValuesString(
+            m_pCapability->hfr_tbl,
+            m_pCapability->hfr_tbl_cnt,
+            HFR_MODES_MAP,
+            PARAM_MAP_SIZE(HFR_MODES_MAP));
+    set(KEY_QC_SUPPORTED_VIDEO_HIGH_FRAME_RATE_MODES, hfrValues.string());
+    set(KEY_QC_VIDEO_HIGH_SPEED_RECORDING, "off");
+    set(KEY_QC_VIDEO_HIGH_FRAME_RATE, "off");
+    String8 hfrSizeValues = createHfrSizesString(
+            m_pCapability->hfr_tbl,
+            m_pCapability->hfr_tbl_cnt);
+    set(KEY_QC_SUPPORTED_HFR_SIZES, hfrSizeValues.string());
+    LOGD("HFR values = %s HFR Sizes = %s", hfrValues.string(), hfrSizeValues.string());
+    setHighFrameRate(CAM_HFR_MODE_OFF);
+
+    // Set Focus algorithms
+    String8 focusAlgoValues = createValuesString(
+            m_pCapability->supported_focus_algos,
+            m_pCapability->supported_focus_algos_cnt,
+            FOCUS_ALGO_MAP,
+            PARAM_MAP_SIZE(FOCUS_ALGO_MAP));
+    set(KEY_QC_SUPPORTED_FOCUS_ALGOS, focusAlgoValues);
+    setSelectableZoneAf(FOCUS_ALGO_AUTO);
+
+    // Set Zoom Ratios
+    if (m_pCapability->zoom_supported > 0) {
+        String8 zoomRatioValues = createZoomRatioValuesString(
+                m_pCapability->zoom_ratio_tbl,
+                m_pCapability->zoom_ratio_tbl_cnt);
+        set(KEY_ZOOM_RATIOS, zoomRatioValues);
+        set(KEY_MAX_ZOOM, (int)(m_pCapability->zoom_ratio_tbl_cnt - 1));
+        setZoom(0);
+    }
+
+    // Set Bracketing/HDR
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.capture.burst.exposures", prop, "");
+    if (strlen(prop) > 0) {
+        set(KEY_QC_CAPTURE_BURST_EXPOSURE, prop);
+    }
+    String8 bracketingValues = createValuesStringFromMap(
+            BRACKETING_MODES_MAP,
+            PARAM_MAP_SIZE(BRACKETING_MODES_MAP));
+    set(KEY_QC_SUPPORTED_AE_BRACKET_MODES, bracketingValues);
+    setAEBracket(AE_BRACKET_OFF);
+
+    //Set AF Bracketing.
+    for (size_t i = 0; i < m_pCapability->supported_focus_modes_cnt; i++) {
+        if ((CAM_FOCUS_MODE_AUTO == m_pCapability->supported_focus_modes[i]) &&
+                ((m_pCapability->qcom_supported_feature_mask &
+                        CAM_QCOM_FEATURE_UBIFOCUS) > 0)) {
+            String8 afBracketingValues = createValuesStringFromMap(
+                    AF_BRACKETING_MODES_MAP,
+                    PARAM_MAP_SIZE(AF_BRACKETING_MODES_MAP));
+            set(KEY_QC_SUPPORTED_AF_BRACKET_MODES, afBracketingValues);
+            setAFBracket(AF_BRACKET_OFF);
+            break;
+         }
+    }
+
+    //Set Refocus.
+    //Re-use ubifocus flag for now.
+    for (size_t i = 0; i < m_pCapability->supported_focus_modes_cnt; i++) {
+        if ((CAM_FOCUS_MODE_AUTO == m_pCapability->supported_focus_modes[i]) &&
+                (m_pCapability->qcom_supported_feature_mask &
+                    CAM_QCOM_FEATURE_REFOCUS) > 0) {
+            String8 reFocusValues = createValuesStringFromMap(
+                    RE_FOCUS_MODES_MAP,
+                    PARAM_MAP_SIZE(RE_FOCUS_MODES_MAP));
+            set(KEY_QC_SUPPORTED_RE_FOCUS_MODES, reFocusValues);
+            setReFocus(RE_FOCUS_OFF);
+        }
+    }
+
+    //Set Chroma Flash.
+    if ((m_pCapability->supported_flash_modes_cnt > 0) &&
+            (m_pCapability->qcom_supported_feature_mask &
+            CAM_QCOM_FEATURE_CHROMA_FLASH) > 0) {
+        String8 chromaFlashValues = createValuesStringFromMap(
+                CHROMA_FLASH_MODES_MAP,
+                PARAM_MAP_SIZE(CHROMA_FLASH_MODES_MAP));
+        set(KEY_QC_SUPPORTED_CHROMA_FLASH_MODES, chromaFlashValues);
+        setChromaFlash(CHROMA_FLASH_OFF);
+    }
+
+    //Set Opti Zoom.
+    if (m_pCapability->zoom_supported &&
+            (m_pCapability->qcom_supported_feature_mask &
+            CAM_QCOM_FEATURE_OPTIZOOM) > 0){
+        String8 optiZoomValues = createValuesStringFromMap(
+                OPTI_ZOOM_MODES_MAP,
+                PARAM_MAP_SIZE(OPTI_ZOOM_MODES_MAP));
+        set(KEY_QC_SUPPORTED_OPTI_ZOOM_MODES, optiZoomValues);
+        setOptiZoom(OPTI_ZOOM_OFF);
+    }
+
+    //Set HDR Type
+    uint32_t supported_hdr_modes = m_pCapability->qcom_supported_feature_mask &
+            (CAM_QCOM_FEATURE_SENSOR_HDR | CAM_QCOM_FEATURE_HDR);
+    if (supported_hdr_modes) {
+        if (CAM_QCOM_FEATURE_SENSOR_HDR == supported_hdr_modes) {
+            String8 hdrModeValues;
+            hdrModeValues.append(HDR_MODE_SENSOR);
+            set(KEY_QC_SUPPORTED_KEY_QC_HDR_MODES, hdrModeValues);
+            setHDRMode(HDR_MODE_SENSOR);
+        } else if (CAM_QCOM_FEATURE_HDR == supported_hdr_modes) {
+            String8 hdrModeValues;
+            hdrModeValues.append(HDR_MODE_MULTI_FRAME);
+            set(KEY_QC_SUPPORTED_KEY_QC_HDR_MODES, hdrModeValues);
+            setHDRMode(HDR_MODE_MULTI_FRAME);
+        } else {
+            String8 hdrModeValues = createValuesStringFromMap(
+                    HDR_MODES_MAP,
+                    PARAM_MAP_SIZE(HDR_MODES_MAP));
+            set(KEY_QC_SUPPORTED_KEY_QC_HDR_MODES, hdrModeValues);
+            setHDRMode(HDR_MODE_MULTI_FRAME);
+        }
+    }
+
+    //Set HDR need 1x
+    String8 hdrNeed1xValues;
+    if (!m_bHDRModeSensor) {
+        hdrNeed1xValues = createValuesStringFromMap(TRUE_FALSE_MODES_MAP,
+                PARAM_MAP_SIZE(TRUE_FALSE_MODES_MAP));
+    } else {
+        hdrNeed1xValues.append(VALUE_FALSE);
+    }
+    setHDRNeed1x(VALUE_FALSE);
+    set(KEY_QC_SUPPORTED_HDR_NEED_1X, hdrNeed1xValues);
+
+    //Set True Portrait
+    if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_TRUEPORTRAIT) > 0) {
+        String8 truePortraitValues = createValuesStringFromMap(
+                TRUE_PORTRAIT_MODES_MAP,
+                PARAM_MAP_SIZE(TRUE_PORTRAIT_MODES_MAP));
+        set(KEY_QC_SUPPORTED_TRUE_PORTRAIT_MODES, truePortraitValues);
+    }
+
+    // Set Denoise
+    if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_DENOISE2D) > 0){
+    String8 denoiseValues = createValuesStringFromMap(
+        DENOISE_ON_OFF_MODES_MAP, PARAM_MAP_SIZE(DENOISE_ON_OFF_MODES_MAP));
+    set(KEY_QC_SUPPORTED_DENOISE, denoiseValues.string());
+#ifdef DEFAULT_DENOISE_MODE_ON
+    setWaveletDenoise(DENOISE_ON);
+#else
+    setWaveletDenoise(DENOISE_OFF);
+#endif
+    }
+
+    // Set feature enable/disable
+    String8 enableDisableValues = createValuesStringFromMap(
+            ENABLE_DISABLE_MODES_MAP, PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP));
+
+    // Set Lens Shading
+    set(KEY_QC_SUPPORTED_LENSSHADE_MODES, enableDisableValues);
+    setLensShadeValue(VALUE_ENABLE);
+    // Set MCE
+    set(KEY_QC_SUPPORTED_MEM_COLOR_ENHANCE_MODES, enableDisableValues);
+    setMCEValue(VALUE_ENABLE);
+
+    // Set DIS
+    set(KEY_QC_SUPPORTED_DIS_MODES, enableDisableValues);
+    setDISValue(VALUE_DISABLE);
+
+    // Set Histogram
+    set(KEY_QC_SUPPORTED_HISTOGRAM_MODES,
+        m_pCapability->histogram_supported ? enableDisableValues : "");
+    set(KEY_QC_HISTOGRAM, VALUE_DISABLE);
+
+    //Set Red Eye Reduction
+    set(KEY_QC_SUPPORTED_REDEYE_REDUCTION, enableDisableValues);
+    setRedeyeReduction(VALUE_DISABLE);
+
+    //Set SkinTone Enhancement
+    set(KEY_QC_SUPPORTED_SKIN_TONE_ENHANCEMENT_MODES, enableDisableValues);
+
+    // Enable LTM by default and disable it in HDR & SeeMore usecases
+    setToneMapMode(true, false);
+
+    // Set feature on/off
+    String8 onOffValues = createValuesStringFromMap(
+            ON_OFF_MODES_MAP, PARAM_MAP_SIZE(ON_OFF_MODES_MAP));
+
+    //Set See more (LLVD)
+    if (m_pCapability->qcom_supported_feature_mask &
+            CAM_QCOM_FEATURE_LLVD) {
+        set(KEY_QC_SUPPORTED_SEE_MORE_MODES, onOffValues);
+        setSeeMore(VALUE_OFF);
+    }
+
+    //Set Still more
+    if (m_pCapability->qcom_supported_feature_mask &
+            CAM_QCOM_FEATURE_STILLMORE) {
+        String8 stillMoreValues = createValuesStringFromMap(
+                STILL_MORE_MODES_MAP,
+                PARAM_MAP_SIZE(STILL_MORE_MODES_MAP));
+        set(KEY_QC_SUPPORTED_STILL_MORE_MODES, stillMoreValues);
+        setStillMore(STILL_MORE_OFF);
+    }
+
+    //Set Noise Reduction mode
+    if (m_pCapability->qcom_supported_feature_mask &
+            CAM_QTI_FEATURE_SW_TNR) {
+        String8 noiseReductionModesValues = createValuesStringFromMap(
+                NOISE_REDUCTION_MODES_MAP, PARAM_MAP_SIZE(NOISE_REDUCTION_MODES_MAP));
+        set(KEY_QC_NOISE_REDUCTION_MODE_VALUES, noiseReductionModesValues);
+        setNoiseReductionMode(VALUE_OFF);
+    }
+
+    //Set Scene Detection
+    set(KEY_QC_SUPPORTED_SCENE_DETECT, onOffValues);
+    setSceneDetect(VALUE_OFF);
+    m_bHDREnabled = false;
+    m_bHDR1xFrameEnabled = false;
+
+    m_bHDRThumbnailProcessNeeded = false;
+    m_bHDR1xExtraBufferNeeded = true;
+    for (uint32_t i=0; i<m_pCapability->hdr_bracketing_setting.num_frames; i++) {
+        if (0 == m_pCapability->hdr_bracketing_setting.exp_val.values[i]) {
+            m_bHDR1xExtraBufferNeeded = false;
+            break;
+        }
+    }
+
+    // Set HDR output scaling
+    char value[PROPERTY_VALUE_MAX];
+
+    property_get("persist.camera.hdr.outcrop", value, VALUE_DISABLE);
+    if (strncmp(VALUE_ENABLE, value, sizeof(VALUE_ENABLE))) {
+      m_bHDROutputCropEnabled = false;
+    } else {
+      m_bHDROutputCropEnabled = true;
+    }
+
+    //Set Face Detection
+    set(KEY_QC_SUPPORTED_FACE_DETECTION, onOffValues);
+    set(KEY_QC_FACE_DETECTION, VALUE_OFF);
+
+    //Set Face Recognition
+    //set(KEY_QC_SUPPORTED_FACE_RECOGNITION, onOffValues);
+    //set(KEY_QC_FACE_RECOGNITION, VALUE_OFF);
+
+    //Set ZSL
+    set(KEY_QC_SUPPORTED_ZSL_MODES, onOffValues);
+#ifdef DEFAULT_ZSL_MODE_ON
+    set(KEY_QC_ZSL, VALUE_ON);
+    m_bZslMode = true;
+#else
+    set(KEY_QC_ZSL, VALUE_OFF);
+    m_bZslMode = false;
+#endif
+
+    // Check if zsl mode property is enabled.
+    // If yes, force the camera to be in zsl mode
+    // and force zsl mode to be enabled in dual camera mode.
+    memset(value, 0x0, PROPERTY_VALUE_MAX);
+    property_get("persist.camera.zsl.mode", value, "0");
+    int32_t zsl_mode = atoi(value);
+    if((zsl_mode == 1) ||
+            (m_bZslMode == true) ||
+            (m_relCamSyncInfo.sync_control == CAM_SYNC_RELATED_SENSORS_ON)) {
+        LOGH("%d: Forcing Camera to ZSL mode enabled");
+        set(KEY_QC_ZSL, VALUE_ON);
+        m_bForceZslMode = true;
+        m_bZslMode = true;
+        int32_t value = m_bForceZslMode;
+        ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_ZSL_MODE, value);
+    }
+    m_bZslMode_new = m_bZslMode;
+
+    set(KEY_QC_SCENE_SELECTION, VALUE_DISABLE);
+
+    // Rdi mode
+    set(KEY_QC_SUPPORTED_RDI_MODES, enableDisableValues);
+    setRdiMode(VALUE_DISABLE);
+
+    // Secure mode
+    set(KEY_QC_SUPPORTED_SECURE_MODES, enableDisableValues);
+    setSecureMode(VALUE_DISABLE);
+
+    //Set video HDR
+    if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_VIDEO_HDR) > 0) {
+        set(KEY_QC_SUPPORTED_VIDEO_HDR_MODES, onOffValues);
+        set(KEY_QC_VIDEO_HDR, VALUE_OFF);
+    }
+
+    //Set HW Sensor Snapshot HDR
+    if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_SENSOR_HDR)> 0) {
+        set(KEY_QC_SUPPORTED_SENSOR_HDR_MODES, onOffValues);
+        set(KEY_QC_SENSOR_HDR, VALUE_OFF);
+        m_bSensorHDREnabled = false;
+    }
+
+    // Set VT TimeStamp
+    set(KEY_QC_VT_ENABLE, VALUE_DISABLE);
+    //Set Touch AF/AEC
+    String8 touchValues = createValuesStringFromMap(
+            TOUCH_AF_AEC_MODES_MAP, PARAM_MAP_SIZE(TOUCH_AF_AEC_MODES_MAP));
+
+    set(KEY_QC_SUPPORTED_TOUCH_AF_AEC, touchValues);
+    set(KEY_QC_TOUCH_AF_AEC, TOUCH_AF_AEC_OFF);
+
+    //set flip mode
+    if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_FLIP) > 0) {
+        String8 flipModes = createValuesStringFromMap(
+                FLIP_MODES_MAP, PARAM_MAP_SIZE(FLIP_MODES_MAP));
+        set(KEY_QC_SUPPORTED_FLIP_MODES, flipModes);
+        set(KEY_QC_PREVIEW_FLIP, FLIP_MODE_OFF);
+        set(KEY_QC_VIDEO_FLIP, FLIP_MODE_OFF);
+        set(KEY_QC_SNAPSHOT_PICTURE_FLIP, FLIP_MODE_OFF);
+    }
+
+    // Set default Auto Exposure lock value
+    setAecLock(VALUE_FALSE);
+
+    // Set default AWB_LOCK lock value
+    setAwbLock(VALUE_FALSE);
+
+    // Set default Camera mode
+    set(KEY_QC_CAMERA_MODE, 0);
+
+    // Add support for internal preview restart
+    set(KEY_INTERNAL_PERVIEW_RESTART, VALUE_TRUE);
+    // Set default burst number
+    set(KEY_QC_SNAPSHOT_BURST_NUM, 0);
+    set(KEY_QC_NUM_RETRO_BURST_PER_SHUTTER, 0);
+
+    //Get RAM size and disable features which are memory rich
+    struct sysinfo info;
+    sysinfo(&info);
+
+    LOGH("totalram = %ld, freeram = %ld ", info.totalram,
+        info.freeram);
+    if (info.totalram > TOTAL_RAM_SIZE_512MB) {
+        set(KEY_QC_ZSL_HDR_SUPPORTED, VALUE_TRUE);
+    } else {
+        m_bIsLowMemoryDevice = true;
+        set(KEY_QC_ZSL_HDR_SUPPORTED, VALUE_FALSE);
+    }
+
+    setOfflineRAW();
+    memset(mStreamPpMask, 0, sizeof(cam_feature_mask_t)*CAM_STREAM_TYPE_MAX);
+    //Set video buffers as uncached by default
+    set(KEY_QC_CACHE_VIDEO_BUFFERS, VALUE_DISABLE);
+
+    // Set default longshot mode
+    set(KEY_QC_LONG_SHOT, "off");
+    //Enable longshot by default
+    set(KEY_QC_LONGSHOT_SUPPORTED, VALUE_TRUE);
+
+    int32_t rc = commitParameters();
+    if (rc == NO_ERROR) {
+        rc = setNumOfSnapshot();
+    }
+
+    //Set Video Rotation
+    String8 videoRotationValues = createValuesStringFromMap(VIDEO_ROTATION_MODES_MAP,
+            PARAM_MAP_SIZE(VIDEO_ROTATION_MODES_MAP));
+
+    set(KEY_QC_SUPPORTED_VIDEO_ROTATION_VALUES, videoRotationValues.string());
+    set(KEY_QC_VIDEO_ROTATION, VIDEO_ROTATION_0);
+
+    //Check for EZTune
+    setEztune();
+    //Default set for video batch size
+    set(KEY_QC_VIDEO_BATCH_SIZE, 0);
+
+    //Setup dual-camera
+    setDcrf();
+
+    // For Aux Camera of dual camera Mode,
+    // by default set no display mode
+    if (m_relCamSyncInfo.mode == CAM_MODE_SECONDARY) {
+        set(KEY_QC_NO_DISPLAY_MODE, 1);
+        m_bNoDisplayMode = true;
+    }
+
+    cam_dimension_t pic_dim;
+    pic_dim.width = 0;
+    pic_dim.height = 0;
+
+    for(uint32_t i = 0;
+            i < (m_pCapability->picture_sizes_tbl_cnt - 1);
+            i++) {
+        if ((pic_dim.width * pic_dim.height) <
+                (int32_t)(m_pCapability->picture_sizes_tbl[i].width *
+                m_pCapability->picture_sizes_tbl[i].height)) {
+            pic_dim.width =
+                    m_pCapability->picture_sizes_tbl[i].width;
+            pic_dim.height =
+                    m_pCapability->picture_sizes_tbl[i].height;
+        }
+    }
+    LOGD("max pic size = %d %d", pic_dim.width,
+            pic_dim.height);
+    setMaxPicSize(pic_dim);
+
+    setManualCaptureMode(CAM_MANUAL_CAPTURE_TYPE_OFF);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocate
+ *
+ * DESCRIPTION: Allocate buffer memory for parameter obj (if necessary)
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::allocate()
+{
+    int32_t rc = NO_ERROR;
+
+    if (m_pParamHeap != NULL) {
+        return rc;
+    }
+
+    //Allocate Set Param Buffer
+    m_pParamHeap = new QCameraHeapMemory(QCAMERA_ION_USE_CACHE);
+    if (m_pParamHeap == NULL) {
+        return NO_MEMORY;
+    }
+
+    rc = m_pParamHeap->allocate(1, sizeof(parm_buffer_t), NON_SECURE);
+    if(rc != OK) {
+        rc = NO_MEMORY;
+        LOGE("Error!! Param buffers have not been allocated");
+        delete m_pParamHeap;
+        m_pParamHeap = NULL;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : init
+ *
+ * DESCRIPTION: initialize parameter obj
+ *
+ * PARAMETERS :
+ *   @capabilities  : ptr to camera capabilities
+ *   @mmops         : ptr to memory ops table for mapping/unmapping
+ *   @adjustFPS     : object reference for additional (possibly thermal)
+ *                    framerate adjustment
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::init(cam_capability_t *capabilities,
+        mm_camera_vtbl_t *mmOps, QCameraAdjustFPS *adjustFPS)
+{
+    int32_t rc = NO_ERROR;
+
+    m_pCapability = capabilities;
+    m_pCamOpsTbl = mmOps;
+    m_AdjustFPS = adjustFPS;
+
+    if (m_pParamHeap == NULL) {
+        LOGE("Parameter buffers have not been allocated");
+        rc = UNKNOWN_ERROR;
+        goto TRANS_INIT_ERROR1;
+    }
+
+    //Map memory for parameters buffer
+    cam_buf_map_type_list bufMapList;
+    rc = QCameraBufferMaps::makeSingletonBufMapList(
+            CAM_MAPPING_BUF_TYPE_PARM_BUF, 0 /*stream id*/,
+            0 /*buffer index*/, -1 /*plane index*/, 0 /*cookie*/,
+            m_pParamHeap->getFd(0), sizeof(parm_buffer_t), bufMapList,
+                    m_pParamHeap->getPtr(0));
+
+    if (rc == NO_ERROR) {
+        rc = m_pCamOpsTbl->ops->map_bufs(m_pCamOpsTbl->camera_handle,
+                &bufMapList);
+    }
+
+    if(rc < 0) {
+        LOGE("failed to map SETPARM buffer");
+        rc = FAILED_TRANSACTION;
+        goto TRANS_INIT_ERROR2;
+    }
+    m_pParamBuf = (parm_buffer_t*) DATA_PTR(m_pParamHeap,0);
+
+    // Check if it is dual camera mode
+    if(m_relCamSyncInfo.sync_control == CAM_SYNC_RELATED_SENSORS_ON) {
+        //Allocate related cam sync buffer
+        //this is needed for the payload that goes along with bundling cmd for related
+        //camera use cases
+        m_pRelCamSyncHeap = new QCameraHeapMemory(QCAMERA_ION_USE_CACHE);
+        rc = m_pRelCamSyncHeap->allocate(1,
+                sizeof(cam_sync_related_sensors_event_info_t), NON_SECURE);
+        if(rc != OK) {
+            rc = NO_MEMORY;
+            LOGE("Failed to allocate Related cam sync Heap memory");
+            goto TRANS_INIT_ERROR3;
+        }
+
+        //Map memory for related cam sync buffer
+        rc = m_pCamOpsTbl->ops->map_buf(m_pCamOpsTbl->camera_handle,
+                CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF,
+                m_pRelCamSyncHeap->getFd(0),
+                sizeof(cam_sync_related_sensors_event_info_t),
+                (cam_sync_related_sensors_event_info_t*)DATA_PTR(m_pRelCamSyncHeap,0));
+        if(rc < 0) {
+            LOGE("failed to map Related cam sync buffer");
+            rc = FAILED_TRANSACTION;
+            goto TRANS_INIT_ERROR4;
+        }
+        m_pRelCamSyncBuf =
+                (cam_sync_related_sensors_event_info_t*) DATA_PTR(m_pRelCamSyncHeap,0);
+    }
+    initDefaultParameters();
+    mCommon.init(capabilities);
+    m_bInited = true;
+
+    goto TRANS_INIT_DONE;
+
+TRANS_INIT_ERROR4:
+    m_pRelCamSyncHeap->deallocate();
+
+TRANS_INIT_ERROR3:
+    delete m_pRelCamSyncHeap;
+    m_pRelCamSyncHeap = NULL;
+
+TRANS_INIT_ERROR2:
+    m_pParamHeap->deallocate();
+    delete m_pParamHeap;
+    m_pParamHeap = NULL;
+
+TRANS_INIT_ERROR1:
+    m_pCapability = NULL;
+    m_pCamOpsTbl = NULL;
+    m_AdjustFPS = NULL;
+
+TRANS_INIT_DONE:
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : deinit
+ *
+ * DESCRIPTION: deinitialize
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::deinit()
+{
+    if (!m_bInited) {
+        return;
+    }
+
+    //clear all entries in the map
+    String8 emptyStr;
+    QCameraParameters::unflatten(emptyStr);
+
+    if ((NULL != m_pCamOpsTbl) && (m_pCamOpsTbl->ops != NULL)) {
+        m_pCamOpsTbl->ops->unmap_buf(
+                             m_pCamOpsTbl->camera_handle,
+                             CAM_MAPPING_BUF_TYPE_PARM_BUF);
+
+        if (m_relCamSyncInfo.sync_control == CAM_SYNC_RELATED_SENSORS_ON) {
+            m_pCamOpsTbl->ops->unmap_buf(
+                    m_pCamOpsTbl->camera_handle,
+                    CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF);
+        }
+    }
+
+    m_pCapability = NULL;
+    if (NULL != m_pParamHeap) {
+        m_pParamHeap->deallocate();
+        delete m_pParamHeap;
+        m_pParamHeap = NULL;
+        m_pParamBuf = NULL;
+    }
+    if (NULL != m_pRelCamSyncHeap) {
+        m_pRelCamSyncHeap->deallocate();
+        delete m_pRelCamSyncHeap;
+        m_pRelCamSyncHeap = NULL;
+        m_pRelCamSyncBuf = NULL;
+    }
+
+    m_AdjustFPS = NULL;
+    m_tempMap.clear();
+    m_pCamOpsTbl = NULL;
+    m_AdjustFPS = NULL;
+
+    m_bInited = false;
+}
+
+/*===========================================================================
+ * FUNCTION   : parse_pair
+ *
+ * DESCRIPTION: helper function to parse string like "640x480" or "10000,20000"
+ *
+ * PARAMETERS :
+ *   @str     : input string to be parse
+ *   @first   : [output] first value of the pair
+ *   @second  : [output]  second value of the pair
+ *   @delim   : [input] delimeter to seperate the pair
+ *   @endptr  : [output] ptr to the end of the pair string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::parse_pair(const char *str,
+                                      int *first,
+                                      int *second,
+                                      char delim,
+                                      char **endptr = NULL)
+{
+    // Find the first integer.
+    char *end;
+    int w = (int)strtol(str, &end, 10);
+    // If a delimeter does not immediately follow, give up.
+    if (*end != delim) {
+        LOGE("Cannot find delimeter (%c) in str=%s", delim, str);
+        return BAD_VALUE;
+    }
+
+    // Find the second integer, immediately after the delimeter.
+    int h = (int)strtol(end+1, &end, 10);
+
+    *first = w;
+    *second = h;
+
+    if (endptr) {
+        *endptr = end;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : parseSizesList
+ *
+ * DESCRIPTION: helper function to parse string containing sizes
+ *
+ * PARAMETERS :
+ *   @sizesStr: [input] input string to be parse
+ *   @sizes   : [output] reference to store parsed sizes
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::parseSizesList(const char *sizesStr, Vector<Size> &sizes)
+{
+    if (sizesStr == 0) {
+        return;
+    }
+
+    char *sizeStartPtr = (char *)sizesStr;
+
+    while (true) {
+        int width, height;
+        int success = parse_pair(sizeStartPtr, &width, &height, 'x',
+                                 &sizeStartPtr);
+        if (success == -1 || (*sizeStartPtr != ',' && *sizeStartPtr != '\0')) {
+            LOGE("Picture sizes string \"%s\" contains invalid character.", sizesStr);
+            return;
+        }
+        sizes.push(Size(width, height));
+
+        if (*sizeStartPtr == '\0') {
+            return;
+        }
+        sizeStartPtr++;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : adjustPreviewFpsRange
+ *
+ * DESCRIPTION: adjust preview FPS ranges
+ *              according to external events
+ *
+ * PARAMETERS :
+ *   @minFPS  : min FPS value
+ *   @maxFPS  : max FPS value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::adjustPreviewFpsRange(cam_fps_range_t *fpsRange)
+{
+    if ( fpsRange == NULL ) {
+        return BAD_VALUE;
+    }
+
+    if ( m_pParamBuf == NULL ) {
+        return NO_INIT;
+    }
+
+    int32_t rc = initBatchUpdate(m_pParamBuf);
+    if ( rc != NO_ERROR ) {
+        LOGE("Failed to initialize group update table");
+        return rc;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_FPS_RANGE, *fpsRange)) {
+        LOGE("Parameters batch failed");
+        return BAD_VALUE;
+    }
+
+    rc = commitSetBatch();
+    if ( rc != NO_ERROR ) {
+        LOGE("Failed to commit batch parameters");
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPreviewFpsRanges
+ *
+ * DESCRIPTION: set preview FPS ranges
+ *
+ * PARAMETERS :
+ *   @minFPS  : min FPS value
+ *   @maxFPS  : max FPS value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPreviewFpsRange(int min_fps,
+        int max_fps, int vid_min_fps,int vid_max_fps)
+{
+    char str[32];
+    char value[PROPERTY_VALUE_MAX];
+    int fixedFpsValue;
+    /*This property get value should be the fps that user needs*/
+    property_get("persist.debug.set.fixedfps", value, "0");
+    fixedFpsValue = atoi(value);
+
+    LOGD("E minFps = %d, maxFps = %d , vid minFps = %d, vid maxFps = %d",
+                 min_fps, max_fps, vid_min_fps, vid_max_fps);
+
+    if(fixedFpsValue != 0) {
+        min_fps = max_fps = fixedFpsValue*1000;
+        if (!isHfrMode()) {
+             vid_min_fps = vid_max_fps = fixedFpsValue*1000;
+        }
+    }
+    snprintf(str, sizeof(str), "%d,%d", min_fps, max_fps);
+    LOGH("Setting preview fps range %s", str);
+    updateParamEntry(KEY_PREVIEW_FPS_RANGE, str);
+    cam_fps_range_t fps_range;
+    memset(&fps_range, 0x00, sizeof(cam_fps_range_t));
+    fps_range.min_fps = (float)min_fps / 1000.0f;
+    fps_range.max_fps = (float)max_fps / 1000.0f;
+    fps_range.video_min_fps = (float)vid_min_fps / 1000.0f;
+    fps_range.video_max_fps = (float)vid_max_fps / 1000.0f;
+
+    LOGH("Updated: minFps = %d, maxFps = %d ,"
+            " vid minFps = %d, vid maxFps = %d",
+             min_fps, max_fps, vid_min_fps, vid_max_fps);
+
+    if ( NULL != m_AdjustFPS ) {
+        if (m_ThermalMode == QCAMERA_THERMAL_ADJUST_FPS &&
+                !m_bRecordingHint) {
+            float minVideoFps = min_fps, maxVideoFps = max_fps;
+            if (isHfrMode()) {
+                minVideoFps = m_hfrFpsRange.video_min_fps;
+                maxVideoFps = m_hfrFpsRange.video_max_fps;
+            }
+            m_AdjustFPS->recalcFPSRange(min_fps, max_fps, minVideoFps, maxVideoFps, fps_range);
+            LOGH("Thermal adjusted Preview fps range %3.2f,%3.2f, %3.2f, %3.2f",
+                   fps_range.min_fps, fps_range.max_fps,
+                  fps_range.video_min_fps, fps_range.video_max_fps);
+        }
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
+        return BAD_VALUE;
+    }
+
+    return NO_ERROR;
+}
+
+
+
+/*===========================================================================
+ * FUNCTION   : setAutoExposure
+ *
+ * DESCRIPTION: set auto exposure
+ *
+ * PARAMETERS :
+ *   @autoExp : auto exposure value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAutoExposure(const char *autoExp)
+{
+    if (autoExp != NULL) {
+        int32_t value = lookupAttr(AUTO_EXPOSURE_MAP, PARAM_MAP_SIZE(AUTO_EXPOSURE_MAP), autoExp);
+        if (value != NAME_NOT_FOUND) {
+            LOGH("Setting auto exposure %s", autoExp);
+            updateParamEntry(KEY_QC_AUTO_EXPOSURE, autoExp);
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_AEC_ALGO_TYPE, value)) {
+                return BAD_VALUE;
+            }
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid auto exposure value: %s", (autoExp == NULL) ? "NULL" : autoExp);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setEffect
+ *
+ * DESCRIPTION: set effect
+ *
+ * PARAMETERS :
+ *   @effect  : effect value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setEffect(const char *effect)
+{
+    if (effect != NULL) {
+        int32_t value = lookupAttr(EFFECT_MODES_MAP, PARAM_MAP_SIZE(EFFECT_MODES_MAP), effect);
+        if (value != NAME_NOT_FOUND) {
+            LOGH("Setting effect %s", effect);
+            updateParamEntry(KEY_EFFECT, effect);
+            uint8_t prmEffect = static_cast<uint8_t>(value);
+            mParmEffect = prmEffect;
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_EFFECT, prmEffect)) {
+                return BAD_VALUE;
+            }
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid effect value: %s", (effect == NULL) ? "NULL" : effect);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setBrightness
+ *
+ * DESCRIPTION: set brightness control value
+ *
+ * PARAMETERS :
+ *   @brightness  : brightness control value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setBrightness(int brightness)
+{
+    char val[16];
+    snprintf(val, sizeof(val), "%d", brightness);
+    updateParamEntry(KEY_QC_BRIGHTNESS, val);
+
+    LOGH("Setting brightness %s", val);
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_BRIGHTNESS, brightness)) {
+        return BAD_VALUE;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFocusMode
+ *
+ * DESCRIPTION: set focus mode
+ *
+ * PARAMETERS :
+ *   @focusMode  : focus mode value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFocusMode(const char *focusMode)
+{
+    if (focusMode != NULL) {
+        int32_t value = lookupAttr(FOCUS_MODES_MAP, PARAM_MAP_SIZE(FOCUS_MODES_MAP), focusMode);
+        if (value != NAME_NOT_FOUND) {
+            int32_t rc = NO_ERROR;
+            LOGH("Setting focus mode %s", focusMode);
+            mFocusMode = (cam_focus_mode_type)value;
+
+            updateParamEntry(KEY_FOCUS_MODE, focusMode);
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+                    CAM_INTF_PARM_FOCUS_MODE, (uint8_t)value)) {
+                rc = BAD_VALUE;
+            }
+            if (strcmp(focusMode,"infinity")==0){
+                set(QCameraParameters::KEY_FOCUS_DISTANCES, "Infinity,Infinity,Infinity");
+            }
+            return rc;
+        }
+    }
+    LOGE("Invalid focus mode value: %s", (focusMode == NULL) ? "NULL" : focusMode);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFocusPosition
+ *
+ * DESCRIPTION: set focus position
+ *
+ * PARAMETERS :
+ *   @typeStr : focus position type, index or dac_code
+ *   @posStr : focus positon.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t  QCameraParameters::setFocusPosition(const char *typeStr, const char *posStr)
+{
+    LOGH(", type:%s, pos: %s", typeStr, posStr);
+    int32_t type = atoi(typeStr);
+    float pos = (float) atof(posStr);
+
+    if ((type >= CAM_MANUAL_FOCUS_MODE_INDEX) &&
+            (type < CAM_MANUAL_FOCUS_MODE_MAX)) {
+        // get max and min focus position from m_pCapability
+        float minFocusPos = m_pCapability->min_focus_pos[type];
+        float maxFocusPos = m_pCapability->max_focus_pos[type];
+        LOGH(", focusPos min: %f, max: %f", minFocusPos, maxFocusPos);
+
+        if (pos >= minFocusPos && pos <= maxFocusPos) {
+            updateParamEntry(KEY_QC_MANUAL_FOCUS_POS_TYPE, typeStr);
+            updateParamEntry(KEY_QC_MANUAL_FOCUS_POSITION, posStr);
+
+            cam_manual_focus_parm_t manual_focus;
+            manual_focus.flag = (cam_manual_focus_mode_type)type;
+            if (manual_focus.flag == CAM_MANUAL_FOCUS_MODE_DIOPTER) {
+                manual_focus.af_manual_diopter = pos;
+            } else if (manual_focus.flag == CAM_MANUAL_FOCUS_MODE_RATIO) {
+                manual_focus.af_manual_lens_position_ratio = (int32_t) pos;
+            } else if (manual_focus.flag == CAM_MANUAL_FOCUS_MODE_INDEX) {
+                manual_focus.af_manual_lens_position_index = (int32_t) pos;
+            } else {
+                manual_focus.af_manual_lens_position_dac = (int32_t) pos;
+            }
+
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_MANUAL_FOCUS_POS,
+                    manual_focus)) {
+                return BAD_VALUE;
+            }
+            return NO_ERROR;
+        }
+    }
+
+    LOGE("invalid params, type:%d, pos: %f", type, pos);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : updateAEInfo
+ *
+ * DESCRIPTION: update exposure information from metadata callback
+ *
+ * PARAMETERS :
+ *   @ae_params : auto exposure params
+ *
+ * RETURN     : void
+ *==========================================================================*/
+void  QCameraParameters::updateAEInfo(cam_3a_params_t &ae_params)
+{
+    const char *prevExpTime = get(KEY_QC_CURRENT_EXPOSURE_TIME);
+    char newExpTime[15];
+    snprintf(newExpTime, sizeof(newExpTime), "%f", ae_params.exp_time*1000.0);
+
+    if (prevExpTime == NULL || strcmp(prevExpTime, newExpTime)) {
+        LOGD("update exposure time: old: %s, new: %s", prevExpTime, newExpTime);
+        set(KEY_QC_CURRENT_EXPOSURE_TIME, newExpTime);
+    }
+
+    int32_t prevISO = getInt(KEY_QC_CURRENT_ISO);
+    int32_t newISO = ae_params.iso_value;
+    if (prevISO != newISO) {
+        LOGD("update iso: old:%d, new:%d", prevISO, newISO);
+        set(KEY_QC_CURRENT_ISO, newISO);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : updateCurrentFocusPosition
+ *
+ * DESCRIPTION: update current focus position from metadata callback
+ *
+ * PARAMETERS :
+ *   @pos : current focus position
+ *
+ * RETURN     : void
+ *==========================================================================*/
+void  QCameraParameters::updateCurrentFocusPosition(cam_focus_pos_info_t &cur_pos_info)
+{
+    int prevScalePos = getInt(KEY_QC_FOCUS_POSITION_SCALE);
+    int newScalePos = (int) cur_pos_info.scale;
+    if (prevScalePos != newScalePos) {
+        LOGD("update focus scale: old:%d, new:%d", prevScalePos, newScalePos);
+        set(KEY_QC_FOCUS_POSITION_SCALE, newScalePos);
+    }
+
+    float prevDiopterPos = getFloat(KEY_QC_FOCUS_POSITION_DIOPTER);
+    float newDiopterPos = cur_pos_info.diopter;
+    if (prevDiopterPos != newDiopterPos) {
+        LOGD("update focus diopter: old:%f, new:%f", prevDiopterPos, newDiopterPos);
+        setFloat(KEY_QC_FOCUS_POSITION_DIOPTER, newDiopterPos);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setSharpness
+ *
+ * DESCRIPTION: set sharpness control value
+ *
+ * PARAMETERS :
+ *   @sharpness  : sharpness control value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSharpness(int sharpness)
+{
+    char val[16];
+    snprintf(val, sizeof(val), "%d", sharpness);
+    updateParamEntry(KEY_QC_SHARPNESS, val);
+    LOGH("Setting sharpness %s", val);
+    m_nSharpness = sharpness;
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_SHARPNESS, m_nSharpness)) {
+        return BAD_VALUE;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSkinToneEnhancement
+ *
+ * DESCRIPTION: set skin tone enhancement value
+ *
+ * PARAMETERS :
+ *   @sceFactore  : skin tone enhancement factor value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSkinToneEnhancement(int sceFactor)
+{
+    char val[16];
+    snprintf(val, sizeof(val), "%d", sceFactor);
+    updateParamEntry(KEY_QC_SCE_FACTOR, val);
+    LOGH("Setting skintone enhancement %s", val);
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_SCE_FACTOR, sceFactor)) {
+        return BAD_VALUE;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSaturation
+ *
+ * DESCRIPTION: set saturation control value
+ *
+ * PARAMETERS :
+ *   @saturation : saturation control value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSaturation(int saturation)
+{
+    char val[16];
+    snprintf(val, sizeof(val), "%d", saturation);
+    updateParamEntry(KEY_QC_SATURATION, val);
+    LOGH("Setting saturation %s", val);
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_SATURATION, saturation)) {
+        return BAD_VALUE;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setContrast
+ *
+ * DESCRIPTION: set contrast control value
+ *
+ * PARAMETERS :
+ *   @contrast : contrast control value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setContrast(int contrast)
+{
+    char val[16];
+    snprintf(val, sizeof(val), "%d", contrast);
+    updateParamEntry(KEY_QC_CONTRAST, val);
+    LOGH("Setting contrast %s", val);
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_CONTRAST, contrast)) {
+        return BAD_VALUE;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSceneDetect
+ *
+ * DESCRIPTION: set scenen detect value
+ *
+ * PARAMETERS :
+ *   @sceneDetect  : scene detect value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSceneDetect(const char *sceneDetect)
+{
+    if (sceneDetect != NULL) {
+        int32_t value = lookupAttr(ON_OFF_MODES_MAP, PARAM_MAP_SIZE(ON_OFF_MODES_MAP),
+                sceneDetect);
+        if (value != NAME_NOT_FOUND) {
+            LOGH("Setting Scene Detect %s", sceneDetect);
+            updateParamEntry(KEY_QC_SCENE_DETECT, sceneDetect);
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_ASD_ENABLE, value)) {
+                return BAD_VALUE;
+            }
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid Scene Detect value: %s",
+          (sceneDetect == NULL) ? "NULL" : sceneDetect);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSensorSnapshotHDR
+ *
+ * DESCRIPTION: set snapshot HDR value
+ *
+ * PARAMETERS :
+ *   @snapshotHDR  : snapshot HDR value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSensorSnapshotHDR(const char *snapshotHDR)
+{
+    if (snapshotHDR != NULL) {
+        int32_t value = (cam_sensor_hdr_type_t) lookupAttr(ON_OFF_MODES_MAP,
+                PARAM_MAP_SIZE(ON_OFF_MODES_MAP), snapshotHDR);
+        if (value != NAME_NOT_FOUND) {
+            LOGH("Setting Sensor Snapshot HDR %s", snapshotHDR);
+            updateParamEntry(KEY_QC_SENSOR_HDR, snapshotHDR);
+
+            char zz_prop[PROPERTY_VALUE_MAX];
+            memset(zz_prop, 0, sizeof(zz_prop));
+            property_get("persist.camera.zzhdr.enable", zz_prop, "0");
+            uint8_t zzhdr_enable = (uint8_t)atoi(zz_prop);
+
+            if (zzhdr_enable && (value != CAM_SENSOR_HDR_OFF)) {
+                value = CAM_SENSOR_HDR_ZIGZAG;
+                LOGH("%s: Overriding to ZZ HDR Mode", __func__);
+            }
+
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_SENSOR_HDR, (cam_sensor_hdr_type_t)value)) {
+                return BAD_VALUE;
+            }
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid Snapshot HDR value: %s",
+          (snapshotHDR == NULL) ? "NULL" : snapshotHDR);
+    return BAD_VALUE;
+
+}
+
+
+/*===========================================================================
+ * FUNCTION   : setVideoHDR
+ *
+ * DESCRIPTION: set video HDR value
+ *
+ * PARAMETERS :
+ *   @videoHDR  : svideo HDR value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setVideoHDR(const char *videoHDR)
+{
+    if (videoHDR != NULL) {
+        int32_t value = lookupAttr(ON_OFF_MODES_MAP, PARAM_MAP_SIZE(ON_OFF_MODES_MAP), videoHDR);
+        if (value != NAME_NOT_FOUND) {
+
+            char zz_prop[PROPERTY_VALUE_MAX];
+            memset(zz_prop, 0, sizeof(zz_prop));
+            property_get("persist.camera.zzhdr.video", zz_prop, "0");
+            uint8_t use_zzhdr_video = (uint8_t)atoi(zz_prop);
+
+            if (use_zzhdr_video) {
+                LOGH("%s: Using ZZ HDR for video mode", __func__);
+                if (value)
+                    value = CAM_SENSOR_HDR_ZIGZAG;
+                else
+                    value = CAM_SENSOR_HDR_OFF;
+                LOGH("%s: Overriding to sensor HDR Mode to:%d", __func__, value);
+                if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_SENSOR_HDR, (cam_sensor_hdr_type_t) value)) {
+                    LOGE("%s: Override to sensor HDR mode for video HDR failed", __func__);
+                    return BAD_VALUE;
+                }
+                updateParamEntry(KEY_QC_VIDEO_HDR, videoHDR);
+            } else {
+                LOGH("%s: Setting Video HDR %s", __func__, videoHDR);
+                updateParamEntry(KEY_QC_VIDEO_HDR, videoHDR);
+                if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_VIDEO_HDR, value)) {
+                    return BAD_VALUE;
+                }
+            }
+
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid Video HDR value: %s",
+          (videoHDR == NULL) ? "NULL" : videoHDR);
+    return BAD_VALUE;
+}
+
+
+
+/*===========================================================================
+ * FUNCTION   : setVtEnable
+ *
+ * DESCRIPTION: set vt Enable value
+ *
+ * PARAMETERS :
+ *   @videoHDR  : svtEnable value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setVtEnable(const char *vtEnable)
+{
+    if (vtEnable != NULL) {
+        int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+                PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), vtEnable);
+        if (value != NAME_NOT_FOUND) {
+            LOGH("Setting Vt Enable %s", vtEnable);
+            m_bAVTimerEnabled = true;
+            updateParamEntry(KEY_QC_VT_ENABLE, vtEnable);
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_VT, value)) {
+                return BAD_VALUE;
+            }
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid Vt Enable value: %s",
+          (vtEnable == NULL) ? "NULL" : vtEnable);
+    m_bAVTimerEnabled = false;
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFaceRecognition
+ *
+ * DESCRIPTION: set face recognition value
+ *
+ * PARAMETERS :
+ *   @faceRecog  : face recognition value string
+ *   @maxFaces   : number of max faces to be detected/recognized
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFaceRecognition(const char *faceRecog,
+        uint32_t maxFaces)
+{
+    if (faceRecog != NULL) {
+        int32_t value = lookupAttr(ON_OFF_MODES_MAP, PARAM_MAP_SIZE(ON_OFF_MODES_MAP), faceRecog);
+        if (value != NAME_NOT_FOUND) {
+            LOGH("Setting face recognition %s", faceRecog);
+            updateParamEntry(KEY_QC_FACE_RECOGNITION, faceRecog);
+
+            uint32_t faceProcMask = m_nFaceProcMask;
+            if (value > 0) {
+                faceProcMask |= CAM_FACE_PROCESS_MASK_RECOGNITION;
+            } else {
+                faceProcMask &= (uint32_t)(~CAM_FACE_PROCESS_MASK_RECOGNITION);
+            }
+
+            if(m_nFaceProcMask == faceProcMask) {
+                LOGH("face process mask not changed, no ops here");
+                return NO_ERROR;
+            }
+            m_nFaceProcMask = faceProcMask;
+            LOGH("FaceProcMask -> %d", m_nFaceProcMask);
+
+            // set parm for face process
+            cam_fd_set_parm_t fd_set_parm;
+            memset(&fd_set_parm, 0, sizeof(cam_fd_set_parm_t));
+            fd_set_parm.fd_mode = m_nFaceProcMask;
+            fd_set_parm.num_fd = maxFaces;
+
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_FD, fd_set_parm)) {
+                return BAD_VALUE;
+            }
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid face recognition value: %s", (faceRecog == NULL) ? "NULL" : faceRecog);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setZoom
+ *
+ * DESCRIPTION: set zoom level
+ *
+ * PARAMETERS :
+ *   @zoom_level : zoom level
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setZoom(int zoom_level)
+{
+    char val[16];
+    snprintf(val, sizeof(val), "%d", zoom_level);
+    updateParamEntry(KEY_ZOOM, val);
+    LOGH("zoom level: %d", zoom_level);
+    mZoomLevel = zoom_level;
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_ZOOM, zoom_level)) {
+        return BAD_VALUE;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setISOValue
+ *
+ * DESCRIPTION: set ISO value
+ *
+ * PARAMETERS :
+ *   @isoValue : ISO value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t  QCameraParameters::setISOValue(const char *isoValue)
+{
+    if (isoValue != NULL) {
+        if (!strcmp(isoValue, ISO_MANUAL)) {
+            LOGD("iso manual mode - use continuous iso");
+            updateParamEntry(KEY_QC_ISO_MODE, isoValue);
+            return NO_ERROR;
+        }
+        int32_t value = lookupAttr(ISO_MODES_MAP, PARAM_MAP_SIZE(ISO_MODES_MAP), isoValue);
+        if (value != NAME_NOT_FOUND) {
+            LOGH("Setting ISO value %s", isoValue);
+            updateParamEntry(KEY_QC_ISO_MODE, isoValue);
+
+            cam_intf_parm_manual_3a_t iso_settings;
+            memset(&iso_settings, 0, sizeof(cam_intf_parm_manual_3a_t));
+            iso_settings.previewOnly = FALSE;
+            iso_settings.value = value;
+            if (getManualCaptureMode() != CAM_MANUAL_CAPTURE_TYPE_OFF) {
+                iso_settings.previewOnly = TRUE;
+            }
+
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_ISO, iso_settings)) {
+                return BAD_VALUE;
+            }
+            m_isoValue = value;
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid ISO value: %s",
+          (isoValue == NULL) ? "NULL" : isoValue);
+    return BAD_VALUE;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : setContinuousISO
+ *
+ * DESCRIPTION: set continuous ISO value
+ *
+ * PARAMETERS :
+ *   @params : ISO value parameter
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t  QCameraParameters::setContinuousISO(const QCameraParameters& params)
+{
+    const char *iso = params.get(KEY_QC_ISO_MODE);
+    LOGD("current iso mode: %s", iso);
+
+    if (iso != NULL) {
+        if (strcmp(iso, ISO_MANUAL)) {
+            LOGD("dont set iso to back-end.");
+            return NO_ERROR;
+        }
+    }
+
+    const char *str = params.get(KEY_QC_CONTINUOUS_ISO);
+    const char *prev_str = get(KEY_QC_CONTINUOUS_ISO);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setContinuousISO(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setExposureTime
+ *
+ * DESCRIPTION: set exposure time
+ *
+ * PARAMETERS :
+ *   @expTimeStr : string of exposure time in ms
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t  QCameraParameters::setExposureTime(const char *expTimeStr)
+{
+    if (expTimeStr != NULL) {
+        double expTimeMs = atof(expTimeStr);
+        //input is in milli seconds. Convert to nano sec for backend
+        int64_t expTimeNs = (int64_t)(expTimeMs*1000000L);
+
+        // expTime == 0 means not to use manual exposure time.
+        if ((0 <= expTimeNs) &&
+                ((expTimeNs == 0) ||
+                ((expTimeNs >= m_pCapability->exposure_time_range[0]) &&
+                (expTimeNs <= m_pCapability->exposure_time_range[1])))) {
+            LOGH(", exposure time: %f ms", expTimeMs);
+            updateParamEntry(KEY_QC_EXPOSURE_TIME, expTimeStr);
+
+            cam_intf_parm_manual_3a_t exp_settings;
+            memset(&exp_settings, 0, sizeof(cam_intf_parm_manual_3a_t));
+            if (getManualCaptureMode() != CAM_MANUAL_CAPTURE_TYPE_OFF) {
+                exp_settings.previewOnly = TRUE;
+                if (expTimeMs < QCAMERA_MAX_EXP_TIME_LEVEL1) {
+                    exp_settings.value = expTimeNs;
+                } else {
+                    exp_settings.value =
+                            (int64_t)(QCAMERA_MAX_EXP_TIME_LEVEL1*1000000L);
+                }
+            } else {
+                exp_settings.previewOnly = FALSE;
+                exp_settings.value = expTimeNs;
+            }
+
+            //Based on exposure values we can decide the capture type here
+            if (getManualCaptureMode() != CAM_MANUAL_CAPTURE_TYPE_OFF) {
+                if (expTimeMs < QCAMERA_MAX_EXP_TIME_LEVEL1) {
+                    setManualCaptureMode(CAM_MANUAL_CAPTURE_TYPE_1);
+                } else if (expTimeMs < QCAMERA_MAX_EXP_TIME_LEVEL2) {
+                    setManualCaptureMode(CAM_MANUAL_CAPTURE_TYPE_2);
+                } else if (expTimeMs < QCAMERA_MAX_EXP_TIME_LEVEL4) {
+                    setManualCaptureMode(CAM_MANUAL_CAPTURE_TYPE_3);
+                } else {
+                    setManualCaptureMode(CAM_MANUAL_CAPTURE_TYPE_OFF);
+                }
+            }
+
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_EXPOSURE_TIME,
+                    exp_settings)) {
+                return BAD_VALUE;
+            }
+            m_expTime = expTimeNs;
+
+            return NO_ERROR;
+        }
+    }
+
+    LOGE("Invalid exposure time, value: %s",
+          (expTimeStr == NULL) ? "NULL" : expTimeStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setLongshotEnable
+ *
+ * DESCRIPTION: set a flag indicating longshot mode
+ *
+ * PARAMETERS :
+ *   @enable  : true - Longshot enabled
+ *              false - Longshot disabled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setLongshotEnable(bool enable)
+{
+    int32_t rc = NO_ERROR;
+    int8_t value = enable ? 1 : 0;
+
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        LOGE("Failed to initialize group update table");
+        return BAD_TYPE;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_LONGSHOT_ENABLE, value)) {
+        LOGE("Failed to update table");
+        return BAD_VALUE;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        LOGE("Failed to parameter changes");
+        return rc;
+    }
+
+    m_bLongshotEnabled = enable;
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFlash
+ *
+ * DESCRIPTION: set flash mode
+ *
+ * PARAMETERS :
+ *   @flashStr : LED flash mode value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFlash(const char *flashStr)
+{
+    if (flashStr != NULL) {
+        int32_t value = lookupAttr(FLASH_MODES_MAP, PARAM_MAP_SIZE(FLASH_MODES_MAP), flashStr);
+        if (value != NAME_NOT_FOUND) {
+            LOGH("Setting Flash value %s", flashStr);
+            updateParamEntry(KEY_FLASH_MODE, flashStr);
+            mFlashValue = value;
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid flash value: %s", (flashStr == NULL) ? "NULL" : flashStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : updateFlashMode
+ *
+ * DESCRIPTION: update flash mode
+ *
+ * PARAMETERS :
+ *   @flashStr : LED flash mode value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateFlashMode(cam_flash_mode_t flash_mode)
+{
+    int32_t rc = NO_ERROR;
+    if (flash_mode >= CAM_FLASH_MODE_MAX) {
+        LOGH("Error!! Invalid flash mode (%d)", flash_mode);
+        return BAD_VALUE;
+    }
+    LOGH("Setting Flash mode from EZTune %d", flash_mode);
+
+    const char *flash_mode_str = lookupNameByValue(FLASH_MODES_MAP,
+            PARAM_MAP_SIZE(FLASH_MODES_MAP), flash_mode);
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        LOGE("Failed to initialize group update table");
+        return BAD_TYPE;
+    }
+    rc = setFlash(flash_mode_str);
+    if (rc != NO_ERROR) {
+        LOGE("Failed to update Flash mode");
+        return rc;
+    }
+
+    LOGH("Setting Flash mode %d", mFlashValue);
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_LED_MODE, mFlashValue)) {
+        LOGE("Failed to set led mode");
+        return BAD_VALUE;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        LOGE("Failed to commit parameters");
+        return rc;
+    }
+
+    return NO_ERROR;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : configureFlash
+ *
+ * DESCRIPTION: configure Flash Bracketing.
+ *
+ * PARAMETERS :
+ *    @frame_config : output configuration structure to fill in.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::configureFlash(cam_capture_frame_config_t &frame_config)
+{
+    LOGH("E");
+    int32_t rc = NO_ERROR;
+    uint32_t i = 0;
+
+    if (isChromaFlashEnabled()) {
+
+        rc = setToneMapMode(false, false);
+        if (rc != NO_ERROR) {
+            LOGE("Failed to configure tone map");
+            return rc;
+        }
+
+        rc = setCDSMode(CAM_CDS_MODE_OFF, false);
+        if (rc != NO_ERROR) {
+            LOGE("Failed to configure csd mode");
+            return rc;
+        }
+
+        LOGH("Enable Chroma Flash capture");
+        cam_flash_mode_t flash_mode = CAM_FLASH_MODE_OFF;
+        frame_config.num_batch =
+                m_pCapability->chroma_flash_settings_need.burst_count;
+        if (frame_config.num_batch > CAM_MAX_FLASH_BRACKETING) {
+            frame_config.num_batch = CAM_MAX_FLASH_BRACKETING;
+        }
+        for (i = 0; i < frame_config.num_batch; i++) {
+            flash_mode = (m_pCapability->chroma_flash_settings_need.flash_bracketing[i]) ?
+                    CAM_FLASH_MODE_ON:CAM_FLASH_MODE_OFF;
+            frame_config.configs[i].num_frames = 1;
+            frame_config.configs[i].type = CAM_CAPTURE_FLASH;
+            frame_config.configs[i].flash_mode = flash_mode;
+        }
+    } else if (mFlashValue != CAM_FLASH_MODE_OFF) {
+        frame_config.num_batch = 1;
+        for (i = 0; i < frame_config.num_batch; i++) {
+            frame_config.configs[i].num_frames = getNumOfSnapshots();
+            frame_config.configs[i].type = CAM_CAPTURE_FLASH;
+            frame_config.configs[i].flash_mode =(cam_flash_mode_t)mFlashValue;
+        }
+    }
+
+    LOGD("Flash frame batch cnt = %d",frame_config.num_batch);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : configureHDRBracketing
+ *
+ * DESCRIPTION: configure HDR Bracketing.
+ *
+ * PARAMETERS :
+ *    @frame_config : output configuration structure to fill in.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::configureHDRBracketing(cam_capture_frame_config_t &frame_config)
+{
+    LOGH("E");
+    int32_t rc = NO_ERROR;
+    uint32_t i = 0;
+
+    uint32_t hdrFrameCount = m_pCapability->hdr_bracketing_setting.num_frames;
+    LOGH("HDR values %d, %d frame count: %u",
+          (int8_t) m_pCapability->hdr_bracketing_setting.exp_val.values[0],
+          (int8_t) m_pCapability->hdr_bracketing_setting.exp_val.values[1],
+          hdrFrameCount);
+
+    frame_config.num_batch = hdrFrameCount;
+
+    cam_bracket_mode mode =
+            m_pCapability->hdr_bracketing_setting.exp_val.mode;
+    if (mode == CAM_EXP_BRACKETING_ON) {
+        rc = setToneMapMode(false, true);
+        if (rc != NO_ERROR) {
+            LOGW("Failed to disable tone map during HDR");
+        }
+    }
+    for (i = 0; i < frame_config.num_batch; i++) {
+        frame_config.configs[i].num_frames = 1;
+        frame_config.configs[i].type = CAM_CAPTURE_BRACKETING;
+        frame_config.configs[i].hdr_mode.mode = mode;
+        frame_config.configs[i].hdr_mode.values =
+                m_pCapability->hdr_bracketing_setting.exp_val.values[i];
+        LOGD("exp values %d",
+                (int)frame_config.configs[i].hdr_mode.values);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : configureAEBracketing
+ *
+ * DESCRIPTION: configure AE Bracketing.
+ *
+ * PARAMETERS :
+ *    @frame_config : output configuration structure to fill in.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::configureAEBracketing(cam_capture_frame_config_t &frame_config)
+{
+    LOGH("E");
+    int32_t rc = NO_ERROR;
+    uint32_t i = 0;
+    char exp_value[MAX_EXP_BRACKETING_LENGTH];
+
+    rc = setToneMapMode(false, true);
+    if (rc != NO_ERROR) {
+        LOGH("Failed to disable tone map during AEBracketing");
+    }
+
+    uint32_t burstCount = 0;
+    const char *str_val = m_AEBracketingClient.values;
+    if ((str_val != NULL) && (strlen(str_val) > 0)) {
+        char prop[PROPERTY_VALUE_MAX];
+        memset(prop, 0, sizeof(prop));
+        strlcpy(prop, str_val, PROPERTY_VALUE_MAX);
+        char *saveptr = NULL;
+        char *token = strtok_r(prop, ",", &saveptr);
+        if (token != NULL) {
+            exp_value[burstCount++] = (char)atoi(token);
+            while (token != NULL) {
+                token = strtok_r(NULL, ",", &saveptr);
+                if (token != NULL) {
+                    exp_value[burstCount++] = (char)atoi(token);
+                }
+            }
+        }
+    }
+
+    frame_config.num_batch = burstCount;
+    cam_bracket_mode mode = m_AEBracketingClient.mode;
+
+    for (i = 0; i < frame_config.num_batch; i++) {
+        frame_config.configs[i].num_frames = 1;
+        frame_config.configs[i].type = CAM_CAPTURE_BRACKETING;
+        frame_config.configs[i].hdr_mode.mode = mode;
+        frame_config.configs[i].hdr_mode.values =
+                m_AEBracketingClient.values[i];
+        LOGD("exp values %d", (int)m_AEBracketingClient.values[i]);
+    }
+
+    LOGH("num_frame = %d X", burstCount);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : configureLowLight
+ *
+ * DESCRIPTION: configure low light frame capture use case.
+ *
+ * PARAMETERS :
+ *    @frame_config : output configuration structure to fill in.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::configureLowLight(cam_capture_frame_config_t &frame_config)
+{
+    int32_t rc = NO_ERROR;
+
+    frame_config.num_batch = 1;
+    frame_config.configs[0].num_frames = getNumOfSnapshots();
+    frame_config.configs[0].type = CAM_CAPTURE_LOW_LIGHT;
+    frame_config.configs[0].low_light_mode = CAM_LOW_LIGHT_ON;
+    LOGH("Snapshot Count: %d", frame_config.configs[0].num_frames);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : configureManualCapture
+ *
+ * DESCRIPTION: configure manual capture.
+ *
+ * PARAMETERS :
+ *    @frame_config : output configaration structure to fill in.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::configureManualCapture(cam_capture_frame_config_t &frame_config)
+{
+    int32_t rc = NO_ERROR;
+    uint32_t i = 0;
+
+    LOGD("E");
+    if (getManualCaptureMode()) {
+        frame_config.num_batch = 1;
+        for (i = 0; i < frame_config.num_batch; i++) {
+            frame_config.configs[i].num_frames = getNumOfSnapshots();
+            frame_config.configs[i].type = CAM_CAPTURE_MANUAL_3A;
+            if (m_expTime != 0) {
+                frame_config.configs[i].manual_3A_mode.exp_mode = CAM_SETTINGS_TYPE_ON;
+                frame_config.configs[i].manual_3A_mode.exp_time = m_expTime;
+            } else {
+                frame_config.configs[i].manual_3A_mode.exp_mode = CAM_SETTINGS_TYPE_AUTO;
+                frame_config.configs[i].manual_3A_mode.exp_time = 0;
+            }
+
+            if (m_isoValue != 0) {
+                frame_config.configs[i].manual_3A_mode.iso_mode = CAM_SETTINGS_TYPE_ON;
+                frame_config.configs[i].manual_3A_mode.iso_value = m_isoValue;
+            } else {
+                frame_config.configs[i].manual_3A_mode.iso_mode = CAM_SETTINGS_TYPE_AUTO;
+                frame_config.configs[i].manual_3A_mode.iso_value = 0;
+            }
+        }
+    }
+    LOGD("X: batch cnt = %d", frame_config.num_batch);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : configFrameCapture
+ *
+ * DESCRIPTION: configuration for ZSL special captures (FLASH/HDR etc)
+ *
+ * PARAMETERS :
+ *   @commitSettings : flag to enable or disable commit this this settings
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::configFrameCapture(bool commitSettings)
+{
+    int32_t rc = NO_ERROR;
+    int32_t value;
+
+    memset(&m_captureFrameConfig, 0, sizeof(cam_capture_frame_config_t));
+
+    if (commitSettings) {
+        if(initBatchUpdate(m_pParamBuf) < 0 ) {
+            LOGE("Failed to initialize group update table");
+            return BAD_TYPE;
+        }
+    }
+
+    if (isHDREnabled() || m_bAeBracketingEnabled || m_bAFBracketingOn ||
+          m_bOptiZoomOn || m_bReFocusOn || m_LowLightLevel
+          || getManualCaptureMode()) {
+        value = CAM_FLASH_MODE_OFF;
+    } else if (isChromaFlashEnabled()) {
+        value = CAM_FLASH_MODE_ON;
+    } else {
+        value = mFlashValue;
+    }
+
+    if (value != CAM_FLASH_MODE_OFF) {
+        configureFlash(m_captureFrameConfig);
+    } else if(isHDREnabled()) {
+        configureHDRBracketing (m_captureFrameConfig);
+    } else if(isAEBracketEnabled()) {
+        configureAEBracketing (m_captureFrameConfig);
+    } else if (m_LowLightLevel) {
+        configureLowLight (m_captureFrameConfig);
+
+        //Added reset capture type as a last batch for back-end to restore settings.
+        int32_t batch_count = m_captureFrameConfig.num_batch;
+        m_captureFrameConfig.configs[batch_count].type = CAM_CAPTURE_RESET;
+        m_captureFrameConfig.configs[batch_count].num_frames = 0;
+        m_captureFrameConfig.num_batch++;
+    } else if (getManualCaptureMode() >= CAM_MANUAL_CAPTURE_TYPE_2){
+        rc = configureManualCapture (m_captureFrameConfig);
+        //Added reset capture type as a last batch for back-end to restore settings.
+        int32_t batch_count = m_captureFrameConfig.num_batch;
+        m_captureFrameConfig.configs[batch_count].type = CAM_CAPTURE_RESET;
+        m_captureFrameConfig.configs[batch_count].num_frames = 0;
+        m_captureFrameConfig.num_batch++;
+    }
+
+    rc = ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_CAPTURE_FRAME_CONFIG,
+            (cam_capture_frame_config_t)m_captureFrameConfig);
+    if (rc != NO_ERROR) {
+        rc = BAD_VALUE;
+        LOGE("Failed to set capture settings");
+        return rc;
+    }
+
+    if (commitSettings) {
+        rc = commitSetBatch();
+        if (rc != NO_ERROR) {
+            LOGE("Failed to commit parameters");
+            return rc;
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : resetFrameCapture
+ *
+ * DESCRIPTION: reset special captures settings(FLASH/HDR etc)
+ *
+ * PARAMETERS :
+ *   @commitSettings : flag to enable or disable commit this this settings
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::resetFrameCapture(bool commitSettings)
+{
+    int32_t rc = NO_ERROR;
+    memset(&m_captureFrameConfig, 0, sizeof(cam_capture_frame_config_t));
+
+    if (commitSettings) {
+        if(initBatchUpdate(m_pParamBuf) < 0 ) {
+            LOGE("Failed to initialize group update table");
+            return BAD_TYPE;
+        }
+    }
+
+    if (isHDREnabled() || isAEBracketEnabled()) {
+        rc = setToneMapMode(true, true);
+        if (rc != NO_ERROR) {
+            LOGH("Failed to enable tone map during HDR/AEBracketing");
+        }
+        rc = stopAEBracket();
+    } else if ((isChromaFlashEnabled()) || (mFlashValue != CAM_FLASH_MODE_OFF)
+            || (getLowLightLevel() != CAM_LOW_LIGHT_OFF)) {
+        rc = setToneMapMode(true, false);
+        if (rc != NO_ERROR) {
+            LOGH("Failed to enable tone map during chroma flash");
+        }
+
+        rc = setCDSMode(mCds_mode, false);
+        if (rc != NO_ERROR) {
+            LOGE("Failed to configure csd mode");
+            return rc;
+        }
+    }
+
+    rc = ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_CAPTURE_FRAME_CONFIG,
+            (cam_capture_frame_config_t)m_captureFrameConfig);
+    if (rc != NO_ERROR) {
+        rc = BAD_VALUE;
+        LOGE("Failed to set capture settings");
+        return rc;
+    }
+
+    if (commitSettings) {
+        rc = commitSetBatch();
+        if (rc != NO_ERROR) {
+            LOGE("Failed to commit parameters");
+            return rc;
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAecLock
+ *
+ * DESCRIPTION: set AEC lock value
+ *
+ * PARAMETERS :
+ *   @aecLockStr : AEC lock value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAecLock(const char *aecLockStr)
+{
+    if (aecLockStr != NULL) {
+        int32_t value = lookupAttr(TRUE_FALSE_MODES_MAP, PARAM_MAP_SIZE(TRUE_FALSE_MODES_MAP),
+                aecLockStr);
+        if (value != NAME_NOT_FOUND) {
+            LOGH("Setting AECLock value %s", aecLockStr);
+            updateParamEntry(KEY_AUTO_EXPOSURE_LOCK, aecLockStr);
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+                    CAM_INTF_PARM_AEC_LOCK, (uint32_t)value)) {
+                return BAD_VALUE;
+            }
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid AECLock value: %s",
+        (aecLockStr == NULL) ? "NULL" : aecLockStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAwbLock
+ *
+ * DESCRIPTION: set AWB lock value
+ *
+ * PARAMETERS :
+ *   @awbLockStr : AWB lock value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAwbLock(const char *awbLockStr)
+{
+    if (awbLockStr != NULL) {
+        int32_t value = lookupAttr(TRUE_FALSE_MODES_MAP, PARAM_MAP_SIZE(TRUE_FALSE_MODES_MAP),
+                awbLockStr);
+        if (value != NAME_NOT_FOUND) {
+            LOGH("Setting AWBLock value %s", awbLockStr);
+            updateParamEntry(KEY_AUTO_WHITEBALANCE_LOCK, awbLockStr);
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+                    CAM_INTF_PARM_AWB_LOCK, (uint32_t)value)) {
+                return BAD_VALUE;
+            }
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid AWBLock value: %s", (awbLockStr == NULL) ? "NULL" : awbLockStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setMCEValue
+ *
+ * DESCRIPTION: set memory color enhancement value
+ *
+ * PARAMETERS :
+ *   @mceStr : MCE value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setMCEValue(const char *mceStr)
+{
+    if (mceStr != NULL) {
+        int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+                PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), mceStr);
+        if (value != NAME_NOT_FOUND) {
+            LOGH("Setting AWBLock value %s", mceStr);
+            updateParamEntry(KEY_QC_MEMORY_COLOR_ENHANCEMENT, mceStr);
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_MCE, value)) {
+                return BAD_VALUE;
+            }
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid MCE value: %s", (mceStr == NULL) ? "NULL" : mceStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setTintlessValue
+ *
+ * DESCRIPTION: enable/disable tintless from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setTintlessValue(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_TINTLESS_ENABLE);
+    const char *prev_str = get(KEY_QC_TINTLESS_ENABLE);
+    char prop[PROPERTY_VALUE_MAX];
+
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.tintless", prop, VALUE_ENABLE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setTintlessValue(str);
+        }
+    } else {
+        if (prev_str == NULL ||
+            strcmp(prev_str, prop) != 0 ) {
+            setTintlessValue(prop);
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setTintless
+ *
+ * DESCRIPTION: set tintless mode
+ *
+ * PARAMETERS :
+ *   @enable : 1 = enable, 0 = disable
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+void QCameraParameters::setTintless(bool enable)
+{
+    if (enable) {
+        setTintlessValue(VALUE_ENABLE);
+    } else {
+        setTintlessValue(VALUE_DISABLE);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setTintlessValue
+ *
+ * DESCRIPTION: set tintless value
+ *
+ * PARAMETERS :
+ *   @tintStr : Tintless value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setTintlessValue(const char *tintStr)
+{
+    if (tintStr != NULL) {
+        int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+                PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), tintStr);
+        if (value != NAME_NOT_FOUND) {
+            LOGH("Setting Tintless value %s", tintStr);
+            updateParamEntry(KEY_QC_TINTLESS_ENABLE, tintStr);
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_TINTLESS, value)) {
+                return BAD_VALUE;
+            }
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid Tintless value: %s", (tintStr == NULL) ? "NULL" : tintStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setCDSMode
+ *
+ * DESCRIPTION: Set CDS mode
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setCDSMode(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_CDS_MODE);
+    const char *prev_str = get(KEY_QC_CDS_MODE);
+    const char *video_str = params.get(KEY_QC_VIDEO_CDS_MODE);
+    const char *video_prev_str = get(KEY_QC_VIDEO_CDS_MODE);
+    int32_t rc = NO_ERROR;
+
+    if (m_bRecordingHint_new == true) {
+        if (video_str) {
+            if ((video_prev_str == NULL) || (strcmp(video_str, video_prev_str) != 0)) {
+                int32_t cds_mode = lookupAttr(CDS_MODES_MAP, PARAM_MAP_SIZE(CDS_MODES_MAP),
+                        video_str);
+                if (cds_mode != NAME_NOT_FOUND) {
+                    updateParamEntry(KEY_QC_VIDEO_CDS_MODE, video_str);
+                    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_CDS_MODE, cds_mode)) {
+                        LOGE("Failed CDS MODE to update table");
+                        rc = BAD_VALUE;
+                    } else {
+                        LOGD("Set CDS in video mode = %d", cds_mode);
+                        mCds_mode = cds_mode;
+                        m_bNeedRestart = true;
+                    }
+                } else {
+                    LOGE("Invalid argument for video CDS MODE %d",  cds_mode);
+                    rc = BAD_VALUE;
+                }
+            }
+        } else {
+            char video_prop[PROPERTY_VALUE_MAX];
+            memset(video_prop, 0, sizeof(video_prop));
+            property_get("persist.camera.video.CDS", video_prop, CDS_MODE_ON);
+            int32_t cds_mode = lookupAttr(CDS_MODES_MAP, PARAM_MAP_SIZE(CDS_MODES_MAP),
+                    video_prop);
+            if (cds_mode != NAME_NOT_FOUND) {
+                updateParamEntry(KEY_QC_VIDEO_CDS_MODE, video_prop);
+                if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_CDS_MODE, cds_mode)) {
+                    LOGE("Failed CDS MODE to update table");
+                    rc = BAD_VALUE;
+                } else {
+                    LOGD("Set CDS in video mode from setprop = %d", cds_mode);
+                    mCds_mode = cds_mode;
+                }
+            } else {
+                LOGE("Invalid prop for video CDS MODE %d",  cds_mode);
+                rc = BAD_VALUE;
+            }
+        }
+    } else {
+        if (str) {
+            if ((prev_str == NULL) || (strcmp(str, prev_str) != 0)) {
+                int32_t cds_mode = lookupAttr(CDS_MODES_MAP, PARAM_MAP_SIZE(CDS_MODES_MAP),
+                        str);
+                if (cds_mode != NAME_NOT_FOUND) {
+                    updateParamEntry(KEY_QC_CDS_MODE, str);
+                    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_CDS_MODE, cds_mode)) {
+                        LOGE("Failed CDS MODE to update table");
+                        rc = BAD_VALUE;
+                    } else {
+                        LOGD("Set CDS in capture mode = %d", cds_mode);
+                        mCds_mode = cds_mode;
+                        m_bNeedRestart = true;
+                    }
+                } else {
+                    LOGE("Invalid argument for snapshot CDS MODE %d",  cds_mode);
+                    rc = BAD_VALUE;
+                }
+            }
+        } else {
+            char prop[PROPERTY_VALUE_MAX];
+            memset(prop, 0, sizeof(prop));
+            property_get("persist.camera.CDS", prop, CDS_MODE_ON);
+            int32_t cds_mode = lookupAttr(CDS_MODES_MAP, PARAM_MAP_SIZE(CDS_MODES_MAP),
+                    prop);
+            if (cds_mode != NAME_NOT_FOUND) {
+                updateParamEntry(KEY_QC_CDS_MODE, prop);
+                if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_CDS_MODE, cds_mode)) {
+                    LOGE("Failed CDS MODE to update table");
+                    rc = BAD_VALUE;
+                } else {
+                    LOGD("Set CDS in snapshot mode from setprop = %d", cds_mode);
+                    mCds_mode = cds_mode;
+                }
+            } else {
+                LOGE("Invalid prop for snapshot CDS MODE %d",  cds_mode);
+                rc = BAD_VALUE;
+            }
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setInitialExposureIndex
+ *
+ * DESCRIPTION: Set initial exposure index value
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setInitialExposureIndex(const QCameraParameters& params)
+{
+    int32_t rc = NO_ERROR;
+    int value = -1;
+    const char *str = params.get(KEY_QC_INITIAL_EXPOSURE_INDEX);
+    const char *prev_str = get(KEY_QC_INITIAL_EXPOSURE_INDEX);
+    if (str) {
+        if ((prev_str == NULL) || (strcmp(str, prev_str) != 0)) {
+            value = atoi(str);
+            LOGD("Set initial exposure index value from param = %d", value);
+            if (value >= 0) {
+                updateParamEntry(KEY_QC_INITIAL_EXPOSURE_INDEX, str);
+            }
+        }
+    } else {
+        char prop[PROPERTY_VALUE_MAX];
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.camera.initial.exp.val", prop, "");
+        if ((strlen(prop) > 0) &&
+                ( (prev_str == NULL) || (strcmp(prop, prev_str) != 0))) {
+            value = atoi(prop);
+            LOGD("Set initial exposure index value from setprop = %d", value);
+            if (value >= 0) {
+                updateParamEntry(KEY_QC_INITIAL_EXPOSURE_INDEX, prop);
+            }
+        }
+    }
+
+    if (value >= 0) {
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+                CAM_INTF_PARM_INITIAL_EXPOSURE_INDEX, (uint32_t)value)) {
+            LOGE("Failed to update initial exposure index value");
+            rc = BAD_VALUE;
+        }
+    } else {
+        LOGD("Invalid value for initial exposure index value %d", value);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setInstantCapture
+ *
+ * DESCRIPTION: Set Instant Capture related params
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setInstantCapture(const QCameraParameters& params)
+{
+    int32_t rc = NO_ERROR;
+    int value = -1;
+    // Check for instant capture, this will enable instant AEC as well.
+    // This param will trigger the instant AEC param to backend
+    // And also will be useful for instant capture.
+    const char *str = params.get(KEY_QC_INSTANT_CAPTURE);
+    const char *prev_str = get(KEY_QC_INSTANT_CAPTURE);
+    if (str) {
+        if ((prev_str == NULL) || (strcmp(str, prev_str) != 0)) {
+            value = lookupAttr(INSTANT_CAPTURE_MODES_MAP,
+                    PARAM_MAP_SIZE(INSTANT_CAPTURE_MODES_MAP), str);
+            LOGD("Set instant Capture from param = %d", value);
+            if(value != NAME_NOT_FOUND) {
+                updateParamEntry(KEY_QC_INSTANT_CAPTURE, str);
+            } else {
+                LOGE("Invalid value for instant capture %s", str);
+                return BAD_VALUE;
+            }
+        }
+    } else {
+        char prop[PROPERTY_VALUE_MAX];
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.camera.instant.capture", prop, KEY_QC_INSTANT_CAPTURE_DISABLE);
+        if ((prev_str == NULL) || (strcmp(prop, prev_str) != 0)) {
+            value = lookupAttr(INSTANT_CAPTURE_MODES_MAP,
+                    PARAM_MAP_SIZE(INSTANT_CAPTURE_MODES_MAP), prop);
+            LOGD("Set instant capture from setprop = %d", value);
+            if (value != NAME_NOT_FOUND) {
+                updateParamEntry(KEY_QC_INSTANT_CAPTURE, prop);
+            } else {
+                LOGE("Invalid value for instant capture %s", prop);
+                return BAD_VALUE;
+            }
+        }
+    }
+
+    // Set instant AEC param to the backend for either instant capture or instant AEC
+    // 0 - disbale (normal AEC)
+    // 1 - Aggressive AEC (algo used in backend)
+    // 2 - Fast AEC (algo used in backend)
+    if (value != NAME_NOT_FOUND && value != -1) {
+        m_bInstantCapture = (value > 0)? true : false;
+        setInstantAEC((uint8_t)value, false);
+    }
+
+
+    // get frame aec bound value from setprop.
+    // This value indicates the number of frames, camera interface
+    // will wait for getting the instant capture frame.
+    // Default value set to 7.
+    // This value also indicates the number of frames, that HAL
+    // will not display and will not send preview frames to app
+    // This will be applicable only if instant capture is set.
+    if (m_bInstantCapture) {
+        char prop[PROPERTY_VALUE_MAX];
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.camera.ae.capture.bound", prop, "7");
+        int32_t frame_bound = atoi(prop);
+        if (frame_bound >= 0) {
+            mAecFrameBound = (uint8_t)frame_bound;
+        } else {
+            LOGE("Invalid prop for aec frame bound %d", frame_bound);
+            rc = BAD_VALUE;
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setInstantAEC
+ *
+ * DESCRIPTION: Set Instant AEC related params
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setInstantAEC(const QCameraParameters& params)
+{
+    int32_t rc = NO_ERROR;
+    int value = -1;
+
+    // Check for instant AEC only when instant capture is not enabled.
+    // Instant capture already takes care of the instant AEC as well.
+    if (!m_bInstantCapture) {
+        // Check for instant AEC. Instant AEC will only enable fast AEC.
+        // It will not enable instant capture.
+        // This param will trigger the instant AEC param to backend
+        // Instant AEC param is session based param,
+        // the param change will be applicable for next camera open/close session.
+        const char *str = params.get(KEY_QC_INSTANT_AEC);
+        const char *prev_str = get(KEY_QC_INSTANT_AEC);
+        if (str) {
+            if ((prev_str == NULL) || (strcmp(str, prev_str) != 0)) {
+                value = lookupAttr(INSTANT_AEC_MODES_MAP,
+                        PARAM_MAP_SIZE(INSTANT_AEC_MODES_MAP), str);
+                LOGD("Set instant AEC from param = %d", value);
+                if(value != NAME_NOT_FOUND) {
+                    updateParamEntry(KEY_QC_INSTANT_AEC, str);
+                } else {
+                    LOGE("Invalid value for instant AEC %s", str);
+                    return BAD_VALUE;
+                }
+            }
+        } else {
+            char prop[PROPERTY_VALUE_MAX];
+            memset(prop, 0, sizeof(prop));
+            property_get("persist.camera.instant.aec", prop, KEY_QC_INSTANT_AEC_DISABLE);
+            if ((prev_str == NULL) || (strcmp(prop, prev_str) != 0)) {
+                value = lookupAttr(INSTANT_AEC_MODES_MAP,
+                        PARAM_MAP_SIZE(INSTANT_AEC_MODES_MAP), prop);
+                LOGD("Set instant AEC from setprop = %d", value);
+                if(value != NAME_NOT_FOUND) {
+                    updateParamEntry(KEY_QC_INSTANT_AEC, prop);
+                } else {
+                    LOGE("Invalid value for instant AEC %s", prop);
+                    return BAD_VALUE;
+                }
+            }
+        }
+
+        // Set instant AEC param to the backend for either instant capture or instant AEC
+        // 0 - disbale (normal AEC)
+        // 1 - Aggressive AEC (algo used in backend)
+        // 2 - Fast AEC (algo used in backend)
+        if (value != NAME_NOT_FOUND && value != -1) {
+            setInstantAEC((uint8_t)value, false);
+        }
+
+    }
+
+    // get frame aec preview skip count from setprop.
+    // This value indicates the number of frames, that HAL
+    // will not display and will not send preview frames to app
+    // Default value set to 7.
+    // This will be applicable only if instant aec is set.
+    if (m_bInstantAEC) {
+        char prop[PROPERTY_VALUE_MAX];
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.camera.ae.instant.bound", prop, "7");
+        int32_t aec_frame_skip_cnt = atoi(prop);
+        if (aec_frame_skip_cnt >= 0) {
+            mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
+        } else {
+            LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
+            rc = BAD_VALUE;
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setDISValue
+ *
+ * DESCRIPTION: set DIS value
+ *
+ * PARAMETERS :
+ *   @disStr : DIS value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setDISValue(const char *disStr)
+{
+    if (disStr != NULL) {
+        int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+                PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), disStr);
+        if (value != NAME_NOT_FOUND) {
+            //For some IS types (like EIS 2.0), when DIS value is changed, we need to restart
+            //preview because of topology change in backend. But, for now, restart preview
+            //for all IS types.
+            m_bNeedRestart = true;
+            LOGH("Setting DIS value %s", disStr);
+            updateParamEntry(KEY_QC_DIS, disStr);
+            if (!(strcmp(disStr,"enable"))) {
+                m_bDISEnabled = true;
+            } else {
+                m_bDISEnabled = false;
+            }
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_DIS_ENABLE, value)) {
+                return BAD_VALUE;
+            }
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid DIS value: %s", (disStr == NULL) ? "NULL" : disStr);
+    m_bDISEnabled = false;
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : updateOisValue
+ *
+ * DESCRIPTION: update OIS value
+ *
+ * PARAMETERS :
+ *   @oisValue : OIS value TRUE/FALSE
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateOisValue(bool oisValue)
+{
+    uint8_t enable = 0;
+    int32_t rc = NO_ERROR;
+
+    // Check for OIS disable
+    char ois_prop[PROPERTY_VALUE_MAX];
+    memset(ois_prop, 0, sizeof(ois_prop));
+    property_get("persist.camera.ois.disable", ois_prop, "0");
+    uint8_t ois_disable = (uint8_t)atoi(ois_prop);
+
+    //Enable OIS if it is camera mode or Camcoder 4K mode
+    if (!m_bRecordingHint || (is4k2kVideoResolution() && m_bRecordingHint)) {
+        enable = 1;
+        LOGH("Valid OIS mode!! ");
+    }
+    // Disable OIS if setprop is set
+    if (ois_disable || !oisValue) {
+        //Disable OIS
+        enable = 0;
+        LOGH("Disable OIS mode!! ois_disable(%d) oisValue(%d)",
+                 ois_disable, oisValue);
+
+    }
+    m_bOISEnabled = enable;
+    if (m_bOISEnabled) {
+        updateParamEntry(KEY_QC_OIS, VALUE_ENABLE);
+    } else {
+        updateParamEntry(KEY_QC_OIS, VALUE_DISABLE);
+    }
+
+    if (initBatchUpdate(m_pParamBuf) < 0 ) {
+        LOGE("Failed to initialize group update table");
+        return BAD_TYPE;
+    }
+
+    LOGH("Sending OIS mode (%d)", enable);
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_META_LENS_OPT_STAB_MODE, enable)) {
+        LOGE("Failed to update table");
+        return BAD_VALUE;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        LOGE("Failed to parameter changes");
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setHighFrameRate
+ *
+ * DESCRIPTION: set high frame rate
+ *
+ * PARAMETERS :
+ *   @hfrMode : HFR mode
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setHighFrameRate(const int32_t hfrMode)
+{
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_HFR, hfrMode)) {
+        return BAD_VALUE;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setLensShadeValue
+ *
+ * DESCRIPTION: set lens shade value
+ *
+ * PARAMETERS :
+ *   @lensSahdeStr : lens shade value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setLensShadeValue(const char *lensShadeStr)
+{
+    if (lensShadeStr != NULL) {
+        int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+                PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), lensShadeStr);
+        if (value != NAME_NOT_FOUND) {
+            LOGH("Setting LensShade value %s", lensShadeStr);
+            updateParamEntry(KEY_QC_LENSSHADE, lensShadeStr);
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_ROLLOFF, value)) {
+                return BAD_VALUE;
+            }
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid LensShade value: %s",
+          (lensShadeStr == NULL) ? "NULL" : lensShadeStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setExposureCompensation
+ *
+ * DESCRIPTION: set exposure compensation value
+ *
+ * PARAMETERS :
+ *   @expComp : exposure compensation value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setExposureCompensation(int expComp)
+{
+    char val[16];
+    snprintf(val, sizeof(val), "%d", expComp);
+    updateParamEntry(KEY_EXPOSURE_COMPENSATION, val);
+
+    // Don't need to pass step as part of setParameter because
+    // camera daemon is already aware of it.
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_EXPOSURE_COMPENSATION, expComp)) {
+        return BAD_VALUE;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setWhiteBalance
+ *
+ * DESCRIPTION: set white balance mode
+ *
+ * PARAMETERS :
+ *   @wbStr   : white balance mode value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setWhiteBalance(const char *wbStr)
+{
+    if (wbStr != NULL) {
+        int32_t value = lookupAttr(WHITE_BALANCE_MODES_MAP,
+                PARAM_MAP_SIZE(WHITE_BALANCE_MODES_MAP), wbStr);
+        if (value != NAME_NOT_FOUND) {
+            LOGH("Setting WhiteBalance value %s", wbStr);
+            updateParamEntry(KEY_WHITE_BALANCE, wbStr);
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_WHITE_BALANCE, value)) {
+                return BAD_VALUE;
+            }
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid WhiteBalance value: %s", (wbStr == NULL) ? "NULL" : wbStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setWBManualCCT
+ *
+ * DESCRIPTION: set setWBManualCCT time
+ *
+ * PARAMETERS :
+ *   @cctStr : string of wb cct, range (2000, 8000) in K.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t  QCameraParameters::setWBManualCCT(const char *cctStr)
+{
+    if (cctStr != NULL) {
+        int32_t cctVal = atoi(cctStr);
+        int32_t minCct = m_pCapability->min_wb_cct; /* 2000K */
+        int32_t maxCct = m_pCapability->max_wb_cct; /* 8000K */
+
+        if (cctVal >= minCct && cctVal <= maxCct) {
+            LOGH(", cct value: %d", cctVal);
+            updateParamEntry(KEY_QC_WB_MANUAL_CCT, cctStr);
+            cam_manual_wb_parm_t manual_wb;
+            manual_wb.type = CAM_MANUAL_WB_MODE_CCT;
+            manual_wb.cct = cctVal;
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_WB_MANUAL, manual_wb)) {
+                return BAD_VALUE;
+            }
+            return NO_ERROR;
+        }
+    }
+
+    LOGE("Invalid cct, value: %s",
+            (cctStr == NULL) ? "NULL" : cctStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : updateAWBParams
+ *
+ * DESCRIPTION: update CCT parameters key
+ *
+ * PARAMETERS :
+ *   @awb_params : WB parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateAWBParams(cam_awb_params_t &awb_params)
+{
+    //check and update CCT
+    int32_t prev_cct = getInt(KEY_QC_WB_MANUAL_CCT);
+    if (prev_cct != awb_params.cct_value) {
+        LOGD("update current cct value. old:%d, now:%d",
+                prev_cct, awb_params.cct_value);
+        set(KEY_QC_WB_MANUAL_CCT, awb_params.cct_value);
+    }
+
+    //check and update WB gains
+    const char *prev_gains = get(KEY_QC_MANUAL_WB_GAINS);
+    char gainStr[30];
+    snprintf(gainStr, sizeof(gainStr), "%f,%f,%f", awb_params.rgb_gains.r_gain,
+        awb_params.rgb_gains.g_gain, awb_params.rgb_gains.b_gain);
+
+    if (prev_gains == NULL || strcmp(prev_gains, gainStr)) {
+        set(KEY_QC_MANUAL_WB_GAINS, gainStr);
+        LOGD("update currernt RGB gains: old %s new %s", prev_gains, gainStr);
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : parseGains
+ *
+ * DESCRIPTION: parse WB gains
+ *
+ * PARAMETERS :
+ *   @gainStr : WB result string
+ *   @r_gain  : WB red gain
+ *   @g_gain  : WB green gain
+ *   @b_gain  : WB blue gain
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::parseGains(const char *gainStr, double &r_gain,
+                                          double &g_gain, double &b_gain)
+{
+    int32_t rc = NO_ERROR;
+    char *saveptr = NULL;
+    size_t gains_size = strlen(gainStr) + 1;
+    char* gains = (char*) calloc(1, gains_size);
+    if (NULL == gains) {
+        LOGE("No memory for gains");
+        return NO_MEMORY;
+    }
+    strlcpy(gains, gainStr, gains_size);
+    char *token = strtok_r(gains, ",", &saveptr);
+
+    if (NULL != token) {
+        r_gain = (float) atof(token);
+        token = strtok_r(NULL, ",", &saveptr);
+    }
+
+    if (NULL != token) {
+        g_gain = (float) atof(token);
+        token = strtok_r(NULL, ",", &saveptr);
+    }
+
+    if (NULL != token) {
+        b_gain = (float) atof(token);
+    } else {
+        LOGE("Malformed string for gains");
+        rc = BAD_VALUE;
+    }
+
+    free(gains);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setManualWBGains
+ *
+ * DESCRIPTION: set manual wb gains for r,g,b
+ *
+ * PARAMETERS :
+ *   @cctStr : string of wb gains, range (1.0, 4.0).
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setManualWBGains(const char *gainStr)
+{
+    int32_t rc = NO_ERROR;
+    if (gainStr != NULL) {
+        double r_gain,g_gain,b_gain;
+        rc = parseGains(gainStr, r_gain, g_gain, b_gain);
+        if (rc != NO_ERROR) {
+            return rc;
+        }
+
+        double minGain = m_pCapability->min_wb_gain;
+        double maxGain = m_pCapability->max_wb_gain;
+
+        if (r_gain >= minGain && r_gain <= maxGain &&
+            g_gain >= minGain && g_gain <= maxGain &&
+            b_gain >= minGain && b_gain <= maxGain) {
+            LOGH(", setting rgb gains: r = %lf g = %lf b = %lf",
+                     r_gain, g_gain, b_gain);
+            updateParamEntry(KEY_QC_MANUAL_WB_GAINS, gainStr);
+            cam_manual_wb_parm_t manual_wb;
+            manual_wb.type = CAM_MANUAL_WB_MODE_GAIN;
+            manual_wb.gains.r_gain = r_gain;
+            manual_wb.gains.g_gain = g_gain;
+            manual_wb.gains.b_gain = b_gain;
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_WB_MANUAL, manual_wb)) {
+                return BAD_VALUE;
+            }
+            return NO_ERROR;
+        }
+    }
+
+    LOGH("Invalid manual wb gains: %s",
+          (gainStr == NULL) ? "NULL" : gainStr);
+    return BAD_VALUE;
+}
+
+int QCameraParameters::getAutoFlickerMode()
+{
+    /* Enable Advanced Auto Antibanding where we can set
+       any of the following option
+       ie. CAM_ANTIBANDING_MODE_AUTO
+           CAM_ANTIBANDING_MODE_AUTO_50HZ
+           CAM_ANTIBANDING_MODE_AUTO_60HZ
+      Currently setting it to default    */
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.set.afd", prop, "3");
+    return atoi(prop);
+}
+
+/*===========================================================================
+ * FUNCTION   : setAntibanding
+ *
+ * DESCRIPTION: set antibanding value
+ *
+ * PARAMETERS :
+ *   @antiBandingStr : antibanding value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAntibanding(const char *antiBandingStr)
+{
+    if (antiBandingStr != NULL) {
+        int32_t value = lookupAttr(ANTIBANDING_MODES_MAP, PARAM_MAP_SIZE(ANTIBANDING_MODES_MAP),
+                antiBandingStr);
+        if (value != NAME_NOT_FOUND) {
+            LOGH("Setting AntiBanding value %s", antiBandingStr);
+            updateParamEntry(KEY_ANTIBANDING, antiBandingStr);
+            if(value == CAM_ANTIBANDING_MODE_AUTO) {
+               value = getAutoFlickerMode();
+            }
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+                    CAM_INTF_PARM_ANTIBANDING, (uint32_t)value)) {
+                return BAD_VALUE;
+            }
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid AntiBanding value: %s",
+          (antiBandingStr == NULL) ? "NULL" : antiBandingStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFocusAreas
+ *
+ * DESCRIPTION: set focus areas
+ *
+ * PARAMETERS :
+ *   @focusAreasStr : focus areas value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFocusAreas(const char *focusAreasStr)
+{
+    if (m_pCapability->max_num_focus_areas == 0 ||
+        focusAreasStr == NULL) {
+        LOGD("Parameter string is null");
+        return NO_ERROR;
+    }
+
+    cam_area_t *areas = (cam_area_t *)malloc(sizeof(cam_area_t) * m_pCapability->max_num_focus_areas);
+    if (NULL == areas) {
+        LOGE("No memory for areas");
+        return NO_MEMORY;
+    }
+    memset(areas, 0, sizeof(cam_area_t) * m_pCapability->max_num_focus_areas);
+    int num_areas_found = 0;
+    if (parseCameraAreaString(focusAreasStr,
+                              m_pCapability->max_num_focus_areas,
+                              areas,
+                              num_areas_found) != NO_ERROR) {
+        LOGE("Failed to parse the string: %s", focusAreasStr);
+        free(areas);
+        return BAD_VALUE;
+    }
+
+    if (validateCameraAreas(areas, num_areas_found) == false) {
+        LOGE("invalid areas specified : %s", focusAreasStr);
+        free(areas);
+        return BAD_VALUE;
+    }
+
+    updateParamEntry(KEY_FOCUS_AREAS, focusAreasStr);
+
+    //for special area string (0, 0, 0, 0, 0), set the num_areas_found to 0,
+    //so no action is takenby the lower layer
+    if (num_areas_found == 1 &&
+        areas[0].rect.left == 0 &&
+        areas[0].rect.top == 0 &&
+        areas[0].rect.width == 0 &&
+        areas[0].rect.height == 0 &&
+        areas[0].weight == 0) {
+        num_areas_found = 0;
+    }
+
+    int previewWidth, previewHeight;
+    getPreviewSize(&previewWidth, &previewHeight);
+    cam_roi_info_t af_roi_value;
+    memset(&af_roi_value, 0, sizeof(cam_roi_info_t));
+    af_roi_value.num_roi = (uint8_t)num_areas_found;
+    for (int i = 0; i < num_areas_found; i++) {
+        LOGH("FocusArea[%d] = (%d, %d, %d, %d)",
+               i, (areas[i].rect.top), (areas[i].rect.left),
+              (areas[i].rect.width), (areas[i].rect.height));
+
+        // Transform the coords from (-1000, 1000)
+        // to (0, previewWidth or previewHeight).
+        af_roi_value.roi[i].left =
+                (int32_t)(((double)areas[i].rect.left + 1000.0) *
+                    ((double)previewWidth / 2000.0));
+        af_roi_value.roi[i].top =
+                (int32_t)(((double)areas[i].rect.top + 1000.0) *
+                    ((double)previewHeight / 2000.0));
+        af_roi_value.roi[i].width =
+                (int32_t)((double)areas[i].rect.width *
+                    (double)previewWidth / 2000.0);
+        af_roi_value.roi[i].height =
+                (int32_t)((double)areas[i].rect.height *
+                    (double)previewHeight / 2000.0);
+        af_roi_value.weight[i] = areas[i].weight;
+    }
+    free(areas);
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_AF_ROI, af_roi_value)) {
+        return BAD_VALUE;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setMeteringAreas
+ *
+ * DESCRIPTION: set metering areas value
+ *
+ * PARAMETERS :
+ *   @meteringAreasStr : metering areas value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setMeteringAreas(const char *meteringAreasStr)
+{
+    if (m_pCapability->max_num_metering_areas == 0 ||
+        meteringAreasStr == NULL) {
+        LOGD("Parameter string is null");
+        return NO_ERROR;
+    }
+
+    cam_area_t *areas = (cam_area_t *)malloc(sizeof(cam_area_t) * m_pCapability->max_num_metering_areas);
+    if (NULL == areas) {
+        LOGE("No memory for areas");
+        return NO_MEMORY;
+    }
+    memset(areas, 0, sizeof(cam_area_t) * m_pCapability->max_num_metering_areas);
+    int num_areas_found = 0;
+    if (parseCameraAreaString(meteringAreasStr,
+                              m_pCapability->max_num_metering_areas,
+                              areas,
+                              num_areas_found) < 0) {
+        LOGE("Failed to parse the string: %s", meteringAreasStr);
+        free(areas);
+        return BAD_VALUE;
+    }
+
+    if (validateCameraAreas(areas, num_areas_found) == false) {
+        LOGE("invalid areas specified : %s", meteringAreasStr);
+        free(areas);
+        return BAD_VALUE;
+    }
+
+    updateParamEntry(KEY_METERING_AREAS, meteringAreasStr);
+
+    //for special area string (0, 0, 0, 0, 0), set the num_areas_found to 0,
+    //so no action is takenby the lower layer
+    if (num_areas_found == 1 &&
+        areas[0].rect.left == 0 &&
+        areas[0].rect.top == 0 &&
+        areas[0].rect.width == 0 &&
+        areas[0].rect.height == 0 &&
+        areas[0].weight == 0) {
+        num_areas_found = 0;
+    }
+    cam_set_aec_roi_t aec_roi_value;
+    int previewWidth, previewHeight;
+    getPreviewSize(&previewWidth, &previewHeight);
+
+    memset(&aec_roi_value, 0, sizeof(cam_set_aec_roi_t));
+    if (num_areas_found > 0) {
+        aec_roi_value.aec_roi_enable = CAM_AEC_ROI_ON;
+        aec_roi_value.aec_roi_type = CAM_AEC_ROI_BY_COORDINATE;
+
+        for (int i = 0; i < num_areas_found; i++) {
+            LOGH("MeteringArea[%d] = (%d, %d, %d, %d)",
+                   i, (areas[i].rect.top), (areas[i].rect.left),
+                  (areas[i].rect.width), (areas[i].rect.height));
+
+            // Transform the coords from (-1000, 1000) to
+            // (0, previewWidth or previewHeight).
+            aec_roi_value.cam_aec_roi_position.coordinate[i].x =
+                    (uint32_t)((((double)areas[i].rect.left +
+                        (double)areas[i].rect.width / 2.0) + 1000.0) *
+                            (double)previewWidth / 2000.0);
+            aec_roi_value.cam_aec_roi_position.coordinate[i].y =
+                    (uint32_t)((((double)areas[i].rect.top +
+                        (double)areas[i].rect.height / 2.0) + 1000.0) *
+                            (double)previewHeight / 2000.0);
+        }
+    } else {
+        aec_roi_value.aec_roi_enable = CAM_AEC_ROI_OFF;
+    }
+    free(areas);
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_AEC_ROI, aec_roi_value)) {
+        return BAD_VALUE;
+    }
+
+    return NO_ERROR;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : isSupportedSensorHdrSize
+ *
+ * DESCRIPTION: Checks if the requested snapshot size is compatible with currently
+ *              configured HDR mode, currently primary target for validation is
+ *              zzhdr however this function can be extended in the future to vet
+ *              all sensor based HDR configs
+ *
+ * PARAMETERS :
+ *   @params  : CameraParameters object
+ *
+ * RETURN     : boolean type
+ *              True  -- indicates supported config
+ *              False -- indicated unsupported config should fallback to other
+ *              available HDR modes
+ *==========================================================================*/
+bool QCameraParameters::isSupportedSensorHdrSize(const QCameraParameters& params)
+{
+    char value[PROPERTY_VALUE_MAX];
+    memset(value, 0, sizeof(value));
+    property_get("persist.camera.zzhdr.enable", value, "0");
+    uint8_t zzhdr_enable = (uint8_t)atoi(value);
+
+    if (zzhdr_enable) {
+
+        int req_w, req_h;
+        params.getPictureSize(&req_w, &req_h);
+
+        // Check if requested w x h is in zzhdr supported list
+        for (size_t i = 0; i< m_pCapability->zzhdr_sizes_tbl_cnt; ++i) {
+
+            if (req_w == m_pCapability->zzhdr_sizes_tbl[i].width &&
+                    req_h == m_pCapability->zzhdr_sizes_tbl[i].height) {
+                LOGD("%s: Found match for %d x %d", __func__, req_w, req_h);
+                return true;
+            }
+        }
+        LOGH("%s: %d x %d is not supported for zzhdr mode", __func__, req_w, req_h);
+        return false;
+    }
+
+    return true;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSceneMode
+ *
+ * DESCRIPTION: set scene mode
+ *
+ * PARAMETERS :
+ *   @sceneModeStr : scene mode value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSceneMode(const char *sceneModeStr)
+{
+    if (sceneModeStr != NULL) {
+        int32_t value = lookupAttr(SCENE_MODES_MAP, PARAM_MAP_SIZE(SCENE_MODES_MAP), sceneModeStr);
+        if (value != NAME_NOT_FOUND) {
+            LOGD("Setting SceneMode %s", sceneModeStr);
+            updateParamEntry(KEY_SCENE_MODE, sceneModeStr);
+            if (m_bSensorHDREnabled) {
+              // Incase of HW HDR mode, we do not update the same as Best shot mode.
+              LOGH("H/W HDR mode enabled. Do not set Best Shot Mode");
+              return NO_ERROR;
+            }
+            if (m_bSceneSelection) {
+                setSelectedScene((cam_scene_mode_type) value);
+            }
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_BESTSHOT_MODE,
+                    (uint32_t)value)) {
+                return BAD_VALUE;
+            }
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid Secene Mode: %s",
+           (sceneModeStr == NULL) ? "NULL" : sceneModeStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSelectableZoneAf
+ *
+ * DESCRIPTION: set selectable zone AF algorithm
+ *
+ * PARAMETERS :
+ *   @selZoneAFStr : selectable zone AF algorithm value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSelectableZoneAf(const char *selZoneAFStr)
+{
+    if (selZoneAFStr != NULL) {
+        int32_t value = lookupAttr(FOCUS_ALGO_MAP, PARAM_MAP_SIZE(FOCUS_ALGO_MAP), selZoneAFStr);
+        if (value != NAME_NOT_FOUND) {
+            LOGD("Setting Selectable Zone AF value %s", selZoneAFStr);
+            updateParamEntry(KEY_QC_SELECTABLE_ZONE_AF, selZoneAFStr);
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_FOCUS_ALGO_TYPE, value)) {
+                return BAD_VALUE;
+            }
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid selectable zone af value: %s",
+           (selZoneAFStr == NULL) ? "NULL" : selZoneAFStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : isAEBracketEnabled
+ *
+ * DESCRIPTION: checks if AE bracketing is enabled
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : TRUE/FALSE
+ *==========================================================================*/
+bool QCameraParameters::isAEBracketEnabled()
+{
+    const char *str = get(KEY_QC_AE_BRACKET_HDR);
+    if (str != NULL) {
+        if (strcmp(str, AE_BRACKET_OFF) != 0) {
+            return true;
+        }
+    }
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAEBracket
+ *
+ * DESCRIPTION: set AE bracket value
+ *
+ * PARAMETERS :
+ *   @aecBracketStr : AE bracket value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAEBracket(const char *aecBracketStr)
+{
+    if (aecBracketStr == NULL) {
+        LOGD("setAEBracket with NULL value");
+        return NO_ERROR;
+    }
+
+    cam_exp_bracketing_t expBracket;
+    memset(&expBracket, 0, sizeof(expBracket));
+
+    int value = lookupAttr(BRACKETING_MODES_MAP, PARAM_MAP_SIZE(BRACKETING_MODES_MAP),
+            aecBracketStr);
+    switch (value) {
+    case CAM_EXP_BRACKETING_ON:
+        {
+            LOGD("EXP_BRACKETING_ON");
+            const char *str_val = get(KEY_QC_CAPTURE_BURST_EXPOSURE);
+            if ((str_val != NULL) && (strlen(str_val)>0)) {
+                expBracket.mode = CAM_EXP_BRACKETING_ON;
+                m_bAeBracketingEnabled = true;
+                strlcpy(expBracket.values, str_val, MAX_EXP_BRACKETING_LENGTH);
+                LOGD("setting Exposure Bracketing value of %s",
+                       expBracket.values);
+            }
+            else {
+                /* Apps not set capture-burst-exposures, error case fall into bracketing off mode */
+                LOGD("capture-burst-exposures not set, back to HDR OFF mode");
+                m_bAeBracketingEnabled = false;
+                expBracket.mode = CAM_EXP_BRACKETING_OFF;
+            }
+        }
+        break;
+    default:
+        {
+            m_bAeBracketingEnabled = false;
+            LOGH(", EXP_BRACKETING_OFF");
+            expBracket.mode = CAM_EXP_BRACKETING_OFF;
+        }
+        break;
+    }
+
+    // Cache client AE bracketing configuration
+    memcpy(&m_AEBracketingClient, &expBracket, sizeof(cam_exp_bracketing_t));
+
+    /* save the value*/
+    updateParamEntry(KEY_QC_AE_BRACKET_HDR, aecBracketStr);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : set3ALock
+ *
+ * DESCRIPTION: enable/disable 3A lock.
+ *
+ * PARAMETERS :
+ *   @lock3A  : lock or unlock
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::set3ALock(bool lock3A)
+{
+    int32_t rc = NO_ERROR;
+    LOGH("Setting Lock %d", lock3A);
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        LOGE("Failed to initialize group update table");
+        return BAD_TYPE;
+    }
+    uint32_t focus_mode = CAM_FOCUS_MODE_AUTO;
+    if (lock3A) {
+        if (isUbiFocusEnabled() || isUbiRefocus()) {
+            //For Ubi focus move focus to infinity.
+            focus_mode = CAM_FOCUS_MODE_INFINITY;
+        } else if (isOptiZoomEnabled() || isStillMoreEnabled()) {
+            //For optizoom and stillmore, set focus as fixed.
+            focus_mode = CAM_FOCUS_MODE_FIXED;
+        }
+    } else {
+        // retrieve previous focus value.
+        const char *focus = get(KEY_FOCUS_MODE);
+        int val = lookupAttr(FOCUS_MODES_MAP, PARAM_MAP_SIZE(FOCUS_MODES_MAP), focus);
+        if (val != NAME_NOT_FOUND) {
+            focus_mode = (uint32_t) val;
+            LOGD("focus mode %s", focus);
+        }
+    }
+    //Lock AWB
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_AWB_LOCK, (uint32_t)lock3A)) {
+        return BAD_VALUE;
+    }
+    //Lock AEC
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_AEC_LOCK, (uint32_t)lock3A)) {
+        return BAD_VALUE;
+    }
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_FOCUS_MODE, focus_mode)) {
+        return BAD_VALUE;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        LOGE("Failed to commit batch");
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAndCommitZoom
+ *
+ * DESCRIPTION: set zoom.
+ *
+ * PARAMETERS :
+ *     @zoom_level : zoom level to set.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAndCommitZoom(int zoom_level)
+{
+    LOGH("E");
+    int32_t rc = NO_ERROR;
+    if (initBatchUpdate(m_pParamBuf) < 0 ) {
+        LOGE("Failed to initialize group update table");
+        return BAD_TYPE;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_ZOOM, zoom_level)) {
+        LOGE("Failed to update table");
+        return BAD_VALUE;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        LOGE("Failed to set Flash value");
+    }
+
+    mZoomLevel = zoom_level;
+    LOGH("X");
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : isOptiZoomEnabled
+ *
+ * DESCRIPTION: checks whether optizoom is enabled
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : true - enabled, false - disabled
+ *
+ *==========================================================================*/
+bool QCameraParameters::isOptiZoomEnabled()
+{
+    if (m_bOptiZoomOn && (0 <= mParmZoomLevel)) {
+        uint32_t zoom_level = (uint32_t) mParmZoomLevel;
+        cam_opti_zoom_t *opti_zoom_settings_need =
+                &(m_pCapability->opti_zoom_settings_need);
+        uint32_t zoom_threshold = (uint32_t) opti_zoom_settings_need->zoom_threshold;
+        LOGH("current zoom level =%u & zoom_threshold =%u",
+                 zoom_level, zoom_threshold);
+
+        if (zoom_level >= zoom_threshold) {
+            return true;
+        }
+    }
+
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : setNoiseReductionMode
+ *
+ * DESCRIPTION: set noise reduction mode
+ *
+ * PARAMETERS :
+ *   @noiseReductionModeStr : noise reduction mode
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setNoiseReductionMode(const char *noiseReductionModeStr)
+{
+    LOGH("noiseReductionModeStr = %s", noiseReductionModeStr);
+    if (noiseReductionModeStr != NULL) {
+        int value = lookupAttr(NOISE_REDUCTION_MODES_MAP, PARAM_MAP_SIZE(NOISE_REDUCTION_MODES_MAP),
+                noiseReductionModeStr);
+        if (value != NAME_NOT_FOUND) {
+            m_bHighQualityNoiseReductionMode =
+                    !strncmp(VALUE_HIGH_QUALITY, noiseReductionModeStr, strlen(VALUE_HIGH_QUALITY));
+            updateParamEntry(KEY_QC_NOISE_REDUCTION_MODE, noiseReductionModeStr);
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid noise reduction mode value: %s",
+            (noiseReductionModeStr == NULL) ? "NULL" : noiseReductionModeStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : commitAFBracket
+ *
+ * DESCRIPTION: commit AF Bracket.
+ *
+ * PARAMETERS :
+ *   @AFBracket : AF bracketing configuration
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::commitAFBracket(cam_af_bracketing_t afBracket)
+{
+
+    int32_t rc = NO_ERROR;
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        LOGE("Failed to initialize group update table");
+        return BAD_TYPE;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_FOCUS_BRACKETING, afBracket)) {
+        LOGE("Failed to update table");
+        return BAD_VALUE;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        LOGE("Failed to commit batch");
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAFBracket
+ *
+ * DESCRIPTION: set AF bracket value
+ *
+ * PARAMETERS :
+ *   @afBracketStr : AF bracket value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAFBracket(const char *afBracketStr)
+{
+    LOGH("afBracketStr =%s",afBracketStr);
+
+    if(afBracketStr != NULL) {
+        int value = lookupAttr(AF_BRACKETING_MODES_MAP, PARAM_MAP_SIZE(AF_BRACKETING_MODES_MAP),
+                afBracketStr);
+        if (value != NAME_NOT_FOUND) {
+            m_bAFBracketingOn = (value != 0);
+            updateParamEntry(KEY_QC_AF_BRACKET, afBracketStr);
+
+            return NO_ERROR;
+        }
+    }
+
+    LOGE("Invalid af bracket value: %s",
+        (afBracketStr == NULL) ? "NULL" : afBracketStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setReFocus
+ *
+ * DESCRIPTION: set refocus value
+ *
+ * PARAMETERS :
+ *   @afBracketStr : refocus value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setReFocus(const char *reFocusStr)
+{
+    LOGH("reFocusStr =%s",reFocusStr);
+
+    if (reFocusStr != NULL) {
+        int value = lookupAttr(RE_FOCUS_MODES_MAP, PARAM_MAP_SIZE(RE_FOCUS_MODES_MAP),
+                reFocusStr);
+        if (value != NAME_NOT_FOUND) {
+            m_bReFocusOn = (value != 0);
+            updateParamEntry(KEY_QC_RE_FOCUS, reFocusStr);
+            return NO_ERROR;
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setChromaFlash
+ *
+ * DESCRIPTION: set chroma flash value
+ *
+ * PARAMETERS :
+ *   @aecBracketStr : chroma flash value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setChromaFlash(const char *chromaFlashStr)
+{
+    LOGH("chromaFlashStr =%s",chromaFlashStr);
+    if(chromaFlashStr != NULL) {
+        int value = lookupAttr(CHROMA_FLASH_MODES_MAP, PARAM_MAP_SIZE(CHROMA_FLASH_MODES_MAP),
+                chromaFlashStr);
+        if(value != NAME_NOT_FOUND) {
+            m_bChromaFlashOn = (value != 0);
+            updateParamEntry(KEY_QC_CHROMA_FLASH, chromaFlashStr);
+
+            return NO_ERROR;
+        }
+    }
+
+    LOGE("Invalid chroma flash value: %s",
+        (chromaFlashStr == NULL) ? "NULL" : chromaFlashStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setOptiZoom
+ *
+ * DESCRIPTION: set opti zoom value
+ *
+ * PARAMETERS :
+ *   @optiZoomStr : opti zoom value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setOptiZoom(const char *optiZoomStr)
+{
+    LOGH("optiZoomStr =%s",optiZoomStr);
+    if(optiZoomStr != NULL) {
+        int value = lookupAttr(OPTI_ZOOM_MODES_MAP, PARAM_MAP_SIZE(OPTI_ZOOM_MODES_MAP),
+                optiZoomStr);
+        if(value != NAME_NOT_FOUND) {
+            m_bOptiZoomOn = (value != 0);
+            updateParamEntry(KEY_QC_OPTI_ZOOM, optiZoomStr);
+
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid opti zoom value: %s",
+        (optiZoomStr == NULL) ? "NULL" : optiZoomStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setTruePortrait
+ *
+ * DESCRIPTION: set true portrait value
+ *
+ * PARAMETERS :
+ *   @optiZoomStr : true portrait value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setTruePortrait(const char *truePortraitStr)
+{
+    LOGH("truePortraitStr =%s", truePortraitStr);
+    if (truePortraitStr != NULL) {
+        int value = lookupAttr(TRUE_PORTRAIT_MODES_MAP,
+                PARAM_MAP_SIZE(TRUE_PORTRAIT_MODES_MAP),
+                truePortraitStr);
+        if (value != NAME_NOT_FOUND) {
+            m_bTruePortraitOn = (value != 0);
+            updateParamEntry(KEY_QC_TRUE_PORTRAIT, truePortraitStr);
+            setFaceDetection(m_bFaceDetectionOn, false);
+            return NO_ERROR;
+        }
+    }
+    LOGH("Invalid true portrait value: %s",
+            (truePortraitStr == NULL) ? "NULL" : truePortraitStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setHDRMode
+ *
+ * DESCRIPTION: set hdr mode value
+ *
+ * PARAMETERS :
+ *   @hdrModeStr : hdr mode value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setHDRMode(const char *hdrModeStr)
+{
+    LOGH("hdrModeStr =%s", hdrModeStr);
+    if (hdrModeStr != NULL) {
+        int value = lookupAttr(HDR_MODES_MAP, PARAM_MAP_SIZE(HDR_MODES_MAP), hdrModeStr);
+        if (value != NAME_NOT_FOUND) {
+            const char *str = get(KEY_SCENE_MODE);
+
+            m_bHDRModeSensor = !strncmp(hdrModeStr, HDR_MODE_SENSOR, strlen(HDR_MODE_SENSOR));
+
+            updateParamEntry(KEY_QC_HDR_MODE, hdrModeStr);
+
+            // If hdr is already selected, need to deselect it in local cache
+            // So the new hdr mode will be applied
+            if (str && !strncmp(str, SCENE_MODE_HDR, strlen(SCENE_MODE_HDR))) {
+                updateParamEntry(KEY_SCENE_MODE, SCENE_MODE_AUTO);
+                m_bNeedRestart = true;
+            }
+
+            return NO_ERROR;
+        }
+    }
+    LOGH("Invalid hdr mode value: %s",
+            (hdrModeStr == NULL) ? "NULL" : hdrModeStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSeeMore
+ *
+ * DESCRIPTION: set see more value
+ *
+ * PARAMETERS :
+ *   @seeMoreStr : see more value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSeeMore(const char *seeMoreStr)
+{
+    int32_t rc = NO_ERROR;
+
+    LOGH("seeMoreStr =%s", seeMoreStr);
+    if (seeMoreStr != NULL) {
+        int value = lookupAttr(ON_OFF_MODES_MAP,
+                PARAM_MAP_SIZE(ON_OFF_MODES_MAP),
+                seeMoreStr);
+        if (value != NAME_NOT_FOUND) {
+            m_bSeeMoreOn = (value != 0);
+
+            // If SeeMore is enabled, enable StillMore for live snapshot
+            // and disable tone map
+            if (m_bSeeMoreOn) {
+                m_bStillMoreOn = TRUE;
+                if (!m_bLtmForSeeMoreEnabled) {
+                    rc = setToneMapMode(false, false);
+                }
+                if (rc != NO_ERROR) {
+                    LOGH("Failed to disable tone map during SeeMore");
+                }
+            } else {
+                m_bStillMoreOn = FALSE;
+                if (!m_bLtmForSeeMoreEnabled) {
+                    rc = setToneMapMode(true, false);
+                }
+                if (rc != NO_ERROR) {
+                    LOGH("Failed to enable tone map during SeeMore");
+                }
+            }
+            updateParamEntry(KEY_QC_SEE_MORE, seeMoreStr);
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid see more value: %s",
+            (seeMoreStr == NULL) ? "NULL" : seeMoreStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setStillMore
+ *
+ * DESCRIPTION: set still more value
+ *
+ * PARAMETERS :
+ *   @seeMoreStr : still more value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setStillMore(const char *stillMoreStr)
+{
+    LOGH("stillMoreStr =%s", stillMoreStr);
+    if (stillMoreStr != NULL) {
+        int value = lookupAttr(STILL_MORE_MODES_MAP, PARAM_MAP_SIZE(STILL_MORE_MODES_MAP),
+                stillMoreStr);
+        if (value != NAME_NOT_FOUND) {
+            m_bStillMoreOn = (value != 0);
+            updateParamEntry(KEY_QC_STILL_MORE, stillMoreStr);
+
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid still more value: %s",
+            (stillMoreStr == NULL) ? "NULL" : stillMoreStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setHDRNeed1x
+ *
+ * DESCRIPTION: set hdr need 1x value
+ *
+ * PARAMETERS :
+ *   @hdrModeStr : hdr need 1x value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setHDRNeed1x(const char *hdrNeed1xStr)
+{
+    LOGH("hdrNeed1xStr =%s", hdrNeed1xStr);
+    if (hdrNeed1xStr != NULL) {
+        int value = lookupAttr(TRUE_FALSE_MODES_MAP, PARAM_MAP_SIZE(TRUE_FALSE_MODES_MAP),
+                hdrNeed1xStr);
+        if (value != NAME_NOT_FOUND) {
+            updateParamEntry(KEY_QC_HDR_NEED_1X, hdrNeed1xStr);
+            m_bHDR1xFrameEnabled = !strncmp(hdrNeed1xStr, VALUE_TRUE, strlen(VALUE_TRUE));
+            m_bNeedRestart = true;
+
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_HDR_NEED_1X,
+                    m_bHDR1xFrameEnabled)) {
+                return BAD_VALUE;
+            }
+            return NO_ERROR;
+        }
+    }
+
+    LOGH("Invalid hdr need 1x value: %s",
+            (hdrNeed1xStr == NULL) ? "NULL" : hdrNeed1xStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAEBracketing
+ *
+ * DESCRIPTION: enables AE bracketing
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAEBracketing()
+{
+    int32_t rc = NO_ERROR;
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        LOGE("Failed to initialize group update table");
+        return BAD_TYPE;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_HDR, m_AEBracketingClient)) {
+        LOGE("Failed to update AE bracketing");
+        return BAD_VALUE;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        LOGE("Failed to configure AE bracketing");
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setHDRAEBracket
+ *
+ * DESCRIPTION: enables AE bracketing for HDR
+ *
+ * PARAMETERS :
+ *   @hdrBracket : HDR bracketing configuration
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setHDRAEBracket(cam_exp_bracketing_t hdrBracket)
+{
+    int32_t rc = NO_ERROR;
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        LOGE("Failed to initialize group update table");
+        return BAD_TYPE;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_HDR, hdrBracket)) {
+        LOGE("Failed to update table");
+        return BAD_TYPE;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        LOGE("Failed to configure HDR bracketing");
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setCacheVideoBuffers
+ *
+ * DESCRIPTION: set cache video buffers value
+ *
+ * PARAMETERS :
+ *   @cacheVideoStr : cache video buffer value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setCacheVideoBuffers(const char *cacheVideoBufStr)
+{
+    if (cacheVideoBufStr != NULL) {
+        int8_t cacheVideoBuf = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+                PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), cacheVideoBufStr);
+        char prop[PROPERTY_VALUE_MAX];
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.camera.mem.usecache", prop, "");
+        if (strlen(prop) > 0) {
+            cacheVideoBuf = atoi(prop);
+        }
+        if (cacheVideoBuf != NAME_NOT_FOUND) {
+            const char *cacheStr = (strlen(prop)>0) ? prop : cacheVideoBufStr;
+            LOGD("Setting video buffer %s",
+                    (cacheVideoBuf == 0) ? "UnCached" : "Cached");
+            return updateParamEntry(KEY_QC_CACHE_VIDEO_BUFFERS, cacheStr);
+        }
+        LOGE("Cache video buffers not set correctly");
+    }
+    return BAD_VALUE;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : setCacheVideoBuffers
+ *
+ * DESCRIPTION: Set buffers as Cache/Uncache Memory
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setCacheVideoBuffers(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_CACHE_VIDEO_BUFFERS);;
+    const char *prev_str = get(KEY_QC_CACHE_VIDEO_BUFFERS);
+
+    if (str != NULL) {
+        if (prev_str == NULL ||
+                strcmp(str, prev_str) != 0) {
+            return setCacheVideoBuffers(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : restoreAEBracket
+ *
+ * DESCRIPTION: restores client AE bracketing configuration after HDR is done
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::stopAEBracket()
+{
+  cam_exp_bracketing_t bracketing;
+
+  bracketing.mode = CAM_EXP_BRACKETING_OFF;
+
+  return setHDRAEBracket(bracketing);
+}
+
+/*===========================================================================
+ * FUNCTION   : updateFlash
+ *
+ * DESCRIPTION: restores client flash configuration or disables flash
+ *
+ * PARAMETERS :
+ *   @commitSettings : flag indicating whether settings need to be commited
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateFlash(bool commitSettings)
+{
+    int32_t rc = NO_ERROR;
+    int32_t value;
+
+    if (commitSettings) {
+      if(initBatchUpdate(m_pParamBuf) < 0 ) {
+          LOGE("Failed to initialize group update table");
+          return BAD_TYPE;
+      }
+    }
+
+    if (isHDREnabled() || m_bAeBracketingEnabled || m_bAFBracketingOn ||
+          m_bOptiZoomOn || m_bReFocusOn || m_LowLightLevel) {
+        value = CAM_FLASH_MODE_OFF;
+    } else if (m_bChromaFlashOn) {
+        value = CAM_FLASH_MODE_ON;
+    } else {
+        value = mFlashValue;
+    }
+
+    if (value != mFlashDaemonValue) {
+        LOGD("Setting Flash value %d", value);
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_LED_MODE, value)) {
+            LOGE("Failed to set led mode");
+            return BAD_VALUE;
+        }
+        mFlashDaemonValue = value;
+    } else {
+        rc = NO_ERROR;
+    }
+
+    if (commitSettings) {
+        rc = commitSetBatch();
+        if (rc != NO_ERROR) {
+            LOGE("Failed to configure HDR bracketing");
+            return rc;
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setRedeyeReduction
+ *
+ * DESCRIPTION: set red eye reduction value
+ *
+ * PARAMETERS :
+ *   @redeyeStr : red eye reduction value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRedeyeReduction(const char *redeyeStr)
+{
+    if (redeyeStr != NULL) {
+        int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+                PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), redeyeStr);
+        if (value != NAME_NOT_FOUND) {
+            LOGD("Setting RedEye Reduce value %s", redeyeStr);
+            updateParamEntry(KEY_QC_REDEYE_REDUCTION, redeyeStr);
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+                    CAM_INTF_PARM_REDEYE_REDUCTION, value)) {
+                return BAD_VALUE;
+            }
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid RedEye Reduce value: %s",
+           (redeyeStr == NULL) ? "NULL" : redeyeStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : getDenoiseProcessPlate
+ *
+ * DESCRIPTION: query denoise process plate
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : NR process plate vlaue
+ *==========================================================================*/
+cam_denoise_process_type_t
+        QCameraParameters::getDenoiseProcessPlate(cam_intf_parm_type_t type)
+{
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    cam_denoise_process_type_t processPlate = CAM_WAVELET_DENOISE_CBCR_ONLY;
+    if (CAM_INTF_PARM_WAVELET_DENOISE == type) {
+        property_get("persist.denoise.process.plates", prop, "");
+    } else if (CAM_INTF_PARM_TEMPORAL_DENOISE == type) {
+        property_get("persist.tnr.process.plates", prop, "");
+    } else {
+        LOGW("Type not supported");
+        prop[0] = '\0';
+    }
+    if (strlen(prop) > 0) {
+        switch(atoi(prop)) {
+        case 0:
+            processPlate = CAM_WAVELET_DENOISE_YCBCR_PLANE;
+            break;
+        case 1:
+            processPlate = CAM_WAVELET_DENOISE_CBCR_ONLY;
+            break;
+        case 2:
+            processPlate = CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
+            break;
+        case 3:
+            processPlate = CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
+            break;
+        default:
+            processPlate = CAM_WAVELET_DENOISE_CBCR_ONLY;
+            break;
+        }
+    }
+    return processPlate;
+}
+
+/*===========================================================================
+ * FUNCTION   : setWaveletDenoise
+ *
+ * DESCRIPTION: set wavelet denoise value
+ *
+ * PARAMETERS :
+ *   @wnrStr : wavelet denoise value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setWaveletDenoise(const char *wnrStr)
+{
+    if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_DENOISE2D) == 0){
+        LOGH("WNR is not supported");
+        return NO_ERROR;
+    }
+
+    if (wnrStr != NULL) {
+        int value = lookupAttr(DENOISE_ON_OFF_MODES_MAP,
+                PARAM_MAP_SIZE(DENOISE_ON_OFF_MODES_MAP), wnrStr);
+        if (value != NAME_NOT_FOUND) {
+            updateParamEntry(KEY_QC_DENOISE, wnrStr);
+
+            cam_denoise_param_t temp;
+            memset(&temp, 0, sizeof(temp));
+            temp.denoise_enable = (uint8_t)value;
+            m_bWNROn = (value != 0);
+            if (m_bWNROn) {
+                temp.process_plates = getDenoiseProcessPlate(CAM_INTF_PARM_WAVELET_DENOISE);
+            }
+            LOGD("Denoise enable=%d, plates=%d",
+                   temp.denoise_enable, temp.process_plates);
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_WAVELET_DENOISE, temp)) {
+                return BAD_VALUE;
+            }
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid Denoise value: %s", (wnrStr == NULL) ? "NULL" : wnrStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setRdiMode
+ *
+ * DESCRIPTION: set rdi mode value
+ *
+ * PARAMETERS :
+ *   @str     : rdi mode value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRdiMode(const char *str)
+{
+    LOGD("RDI_DEBUG  rdi mode value: %s", str);
+
+    if (str != NULL) {
+        int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+                PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), str);
+        if (value != NAME_NOT_FOUND) {
+            updateParamEntry(KEY_QC_RDI_MODE, str);
+            m_bRdiMode = (value == 0) ? false : true;
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_RDI_MODE, value)) {
+                return BAD_VALUE;
+            }
+            return NO_ERROR;
+        }
+    }
+    LOGE("Invalid rdi mode value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : setSecureMode
+ *
+ * DESCRIPTION: set secure mode value
+ *
+ * PARAMETERS :
+ *   @str     : secure mode value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSecureMode(const char *str)
+{
+  LOGD("Secure mode value: %s", str);
+
+  if (str != NULL) {
+    int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+            PARAM_MAP_SIZE(ENABLE_DISABLE_MODES_MAP), str);
+    if (value != NAME_NOT_FOUND) {
+        updateParamEntry(KEY_QC_SECURE_MODE, str);
+        m_bSecureMode = (value == 0)? false : true;
+        return NO_ERROR;
+    }
+  }
+  LOGE("Invalid Secure mode value: %s",
+     (str == NULL) ? "NULL" : str);
+  return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamRotation
+ *
+ * DESCRIPTION: get stream rotation by its type
+ *
+ * PARAMETERS :
+ *   @streamType        : stream type
+ *   @featureConfig     : stream feature config structure
+ *   @dim               : stream dimension
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getStreamRotation(cam_stream_type_t streamType,
+                                            cam_pp_feature_config_t &featureConfig,
+                                            cam_dimension_t &dim)
+{
+    int32_t ret = NO_ERROR;
+    const char *str = get(KEY_QC_VIDEO_ROTATION);
+    int rotationParam = lookupAttr(VIDEO_ROTATION_MODES_MAP,
+            PARAM_MAP_SIZE(VIDEO_ROTATION_MODES_MAP), str);
+    featureConfig.rotation = ROTATE_0;
+    int swapDim = 0;
+    switch (streamType) {
+        case CAM_STREAM_TYPE_VIDEO:
+            switch(rotationParam) {
+                case 90:
+                    featureConfig.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
+                    featureConfig.rotation = ROTATE_90;
+                    swapDim = 1;
+                    break;
+                case 180:
+                    featureConfig.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
+                    featureConfig.rotation = ROTATE_180;
+                    break;
+                case 270:
+                    featureConfig.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
+                    featureConfig.rotation = ROTATE_270;
+                    swapDim = 1;
+                    break;
+                default:
+                    featureConfig.rotation = ROTATE_0;
+            }
+            break;
+        case CAM_STREAM_TYPE_PREVIEW:
+        case CAM_STREAM_TYPE_POSTVIEW:
+        case CAM_STREAM_TYPE_SNAPSHOT:
+        case CAM_STREAM_TYPE_RAW:
+        case CAM_STREAM_TYPE_METADATA:
+        case CAM_STREAM_TYPE_OFFLINE_PROC:
+        case CAM_STREAM_TYPE_DEFAULT:
+        default:
+            break;
+    }
+
+    if (swapDim > 0) {
+        int w = 0;
+        w = dim.width;
+        dim.width = dim.height;
+        dim.height = w;
+    }
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamFormat
+ *
+ * DESCRIPTION: get stream format by its type
+ *
+ * PARAMETERS :
+ *   @streamType : [input] stream type
+ *   @format     : [output] stream format
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getStreamFormat(cam_stream_type_t streamType,
+                                            cam_format_t &format)
+{
+    int32_t ret = NO_ERROR;
+    format = CAM_FORMAT_MAX;
+    switch (streamType) {
+    case CAM_STREAM_TYPE_PREVIEW:
+        if (!isUBWCEnabled()) {
+#if VENUS_PRESENT
+            cam_dimension_t preview;
+            cam_dimension_t video;
+            getStreamDimension(CAM_STREAM_TYPE_VIDEO , video);
+            getStreamDimension(CAM_STREAM_TYPE_PREVIEW, preview);
+            if (getRecordingHintValue() == true &&
+                    video.width == preview.width &&
+                    video.height == preview.height &&
+                    mPreviewFormat == CAM_FORMAT_YUV_420_NV21) {
+                format = CAM_FORMAT_YUV_420_NV21_VENUS;
+            } else
+#endif
+            format = mPreviewFormat;
+        } else {
+            format = mPreviewFormat;
+        }
+        break;
+    case CAM_STREAM_TYPE_POSTVIEW:
+    case CAM_STREAM_TYPE_CALLBACK:
+        format = mAppPreviewFormat;
+        break;
+    case CAM_STREAM_TYPE_ANALYSIS:
+        cam_analysis_info_t analysisInfo;
+        cam_feature_mask_t featureMask;
+
+        featureMask = 0;
+        getStreamPpMask(CAM_STREAM_TYPE_ANALYSIS, featureMask);
+        ret = getAnalysisInfo(
+                ((getRecordingHintValue() == true) && fdModeInVideo()),
+                FALSE,
+                featureMask,
+                &analysisInfo);
+        if (ret != NO_ERROR) {
+            LOGE("getAnalysisInfo failed, ret = %d", ret);
+            return ret;
+        }
+
+        if (analysisInfo.hw_analysis_supported &&
+                analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY) {
+            format = analysisInfo.analysis_format;
+        } else {
+            if (analysisInfo.hw_analysis_supported) {
+                LOGW("Invalid analysis_format %d\n",
+                        analysisInfo.analysis_format);
+            }
+            format = mAppPreviewFormat;
+        }
+        break;
+    case CAM_STREAM_TYPE_SNAPSHOT:
+        if ( mPictureFormat == CAM_FORMAT_YUV_422_NV16 ) {
+            format = CAM_FORMAT_YUV_422_NV16;
+        } else {
+            char prop[PROPERTY_VALUE_MAX];
+            int snapshotFormat;
+            memset(prop, 0, sizeof(prop));
+            property_get("persist.camera.snap.format", prop, "0");
+            snapshotFormat = atoi(prop);
+            if(snapshotFormat == 1) {
+                format = CAM_FORMAT_YUV_422_NV61;
+            } else {
+                format = CAM_FORMAT_YUV_420_NV21;
+            }
+        }
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        if (isUBWCEnabled()) {
+            char prop[PROPERTY_VALUE_MAX];
+            int pFormat;
+            memset(prop, 0, sizeof(prop));
+            property_get("persist.camera.video.ubwc", prop, "1");
+            pFormat = atoi(prop);
+            if (pFormat == 1) {
+                format = CAM_FORMAT_YUV_420_NV12_UBWC;
+            } else {
+                format = CAM_FORMAT_YUV_420_NV21_VENUS;
+            }
+        } else {
+#if VENUS_PRESENT
+            format = CAM_FORMAT_YUV_420_NV21_VENUS;
+#else
+            format = CAM_FORMAT_YUV_420_NV21;
+#endif
+        }
+        break;
+    case CAM_STREAM_TYPE_RAW:
+        if ((isRdiMode()) || (getofflineRAW())) {
+            format = m_pCapability->rdi_mode_stream_fmt;
+        } else if (mPictureFormat >= CAM_FORMAT_YUV_RAW_8BIT_YUYV) {
+            format = (cam_format_t)mPictureFormat;
+        } else {
+            char raw_format[PROPERTY_VALUE_MAX];
+            int rawFormat;
+            memset(raw_format, 0, sizeof(raw_format));
+            /*Default value is CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG*/
+            property_get("persist.camera.raw.format", raw_format, "17");
+            rawFormat = atoi(raw_format);
+            format = (cam_format_t)rawFormat;
+            LOGH("Raw stream format %d bundled with snapshot",
+                    format);
+        }
+        break;
+    case CAM_STREAM_TYPE_METADATA:
+    case CAM_STREAM_TYPE_OFFLINE_PROC:
+    case CAM_STREAM_TYPE_DEFAULT:
+    default:
+        break;
+    }
+
+    LOGD("Stream type = %d Stream Format = %d", streamType, format);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFlipMode
+ *
+ * DESCRIPTION: get flip mode
+ *
+ * PARAMETERS :
+ *   @cam_intf_parm_type_t : [input] stream type
+ *
+ * RETURN     : int type of flip mode
+ *              0 - no filp
+ *              1 - FLIP_H
+ *              2 - FLIP_V
+ *              3 - FLIP_H | FLIP_V
+ *==========================================================================*/
+int QCameraParameters::getFlipMode(cam_stream_type_t type)
+{
+    const char *str = NULL;
+    int flipMode = 0; // no flip
+
+    switch(type){
+    case CAM_STREAM_TYPE_PREVIEW:
+        if (!isRdiMode()) {
+            str = get(KEY_QC_PREVIEW_FLIP);
+        }
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        str = get(KEY_QC_VIDEO_FLIP);
+        break;
+    case CAM_STREAM_TYPE_SNAPSHOT:
+    case CAM_STREAM_TYPE_POSTVIEW:
+        str = get(KEY_QC_SNAPSHOT_PICTURE_FLIP);
+        break;
+    default:
+        LOGD("No flip mode for stream type %d", type);
+        break;
+    }
+
+    if(str != NULL){
+        //Need give corresponding filp value based on flip mode strings
+        int value = lookupAttr(FLIP_MODES_MAP, PARAM_MAP_SIZE(FLIP_MODES_MAP), str);
+        if(value != NAME_NOT_FOUND)
+            flipMode = value;
+        }
+
+    LOGH("the filp mode of stream type %d is %d .", type, flipMode);
+    return flipMode;
+}
+
+/*===========================================================================
+ * FUNCTION   : isSnapshotFDNeeded
+ *
+ * DESCRIPTION: check whether Face Detection Metadata is needed
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : bool type of status
+ *              0 - need
+ *              1 - not need
+ *==========================================================================*/
+bool QCameraParameters::isSnapshotFDNeeded()
+{
+    return getInt(KEY_QC_SNAPSHOT_FD_DATA);
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamDimension
+ *
+ * DESCRIPTION: get stream dimension by its type
+ *
+ * PARAMETERS :
+ *   @streamType : [input] stream type
+ *   @dim        : [output] stream dimension
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getStreamDimension(cam_stream_type_t streamType,
+                                               cam_dimension_t &dim)
+{
+    int32_t ret = NO_ERROR;
+    memset(&dim, 0, sizeof(cam_dimension_t));
+
+    switch (streamType) {
+    case CAM_STREAM_TYPE_PREVIEW:
+    case CAM_STREAM_TYPE_CALLBACK:
+        getPreviewSize(&dim.width, &dim.height);
+        break;
+    case CAM_STREAM_TYPE_POSTVIEW:
+        getPreviewSize(&dim.width, &dim.height);
+        break;
+    case CAM_STREAM_TYPE_SNAPSHOT:
+        if (isPostProcScaling()) {
+            getMaxPicSize(dim);
+        } else if (getRecordingHintValue()) {
+            // live snapshot
+            getLiveSnapshotSize(dim);
+        } else {
+            getPictureSize(&dim.width, &dim.height);
+        }
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        getVideoSize(&dim.width, &dim.height);
+        break;
+    case CAM_STREAM_TYPE_RAW:
+        //dim = m_pCapability->raw_dim;
+        getRawSize(dim);
+        break;
+    case CAM_STREAM_TYPE_METADATA:
+        dim.width = (int32_t)sizeof(metadata_buffer_t);
+        dim.height = 1;
+        break;
+    case CAM_STREAM_TYPE_OFFLINE_PROC:
+        if (isPostProcScaling()) {
+            if (getRecordingHintValue()) {
+                // live snapshot
+                getLiveSnapshotSize(dim);
+            } else {
+                getPictureSize(&dim.width, &dim.height);
+            }
+        }
+        break;
+    case CAM_STREAM_TYPE_ANALYSIS:
+        cam_dimension_t prv_dim, max_dim;
+
+        /* Analysis stream need aspect ratio as preview stream */
+        getPreviewSize(&prv_dim.width, &prv_dim.height);
+
+        cam_analysis_info_t analysisInfo;
+        cam_feature_mask_t featureMask;
+
+        featureMask = 0;
+        getStreamPpMask(CAM_STREAM_TYPE_ANALYSIS, featureMask);
+        ret = getAnalysisInfo(
+                ((getRecordingHintValue() == true) && fdModeInVideo()),
+                FALSE,
+                featureMask,
+                &analysisInfo);
+        if (ret != NO_ERROR) {
+            LOGE("getAnalysisInfo failed, ret = %d", ret);
+            return ret;
+        }
+
+        max_dim.width = analysisInfo.analysis_max_res.width;
+        max_dim.height = analysisInfo.analysis_max_res.height;
+
+        if (prv_dim.width > max_dim.width || prv_dim.height > max_dim.height) {
+            double max_ratio, requested_ratio;
+
+            max_ratio = (double)max_dim.width / (double)max_dim.height;
+            requested_ratio = (double)prv_dim.width / (double)prv_dim.height;
+
+            if (max_ratio < requested_ratio) {
+                dim.width = max_dim.width;
+                dim.height = (int32_t)((double)dim.width / requested_ratio);
+            } else {
+                dim.height = max_dim.height;
+                dim.width = (int32_t)((double)max_dim.height * requested_ratio);
+            }
+            dim.width &= ~0x1;
+            dim.height &= ~0x1;
+        } else {
+            dim.width = prv_dim.width;
+            dim.height = prv_dim.height;
+        }
+      break;
+    case CAM_STREAM_TYPE_DEFAULT:
+    default:
+        LOGE("no dimension for unsupported stream type %d",
+               streamType);
+        ret = BAD_VALUE;
+        break;
+    }
+
+    LOGD("Stream type = %d Stream Dimension = %d X %d",
+             streamType, dim.width, dim.height);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : getParameters
+ *
+ * DESCRIPTION: Return a C string containing the parameters
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : a string containing parameter pairs
+ *==========================================================================*/
+char* QCameraParameters::getParameters()
+{
+    char* strParams = NULL;
+    String8 str;
+
+    int cur_width, cur_height;
+    //Need take care Scale picture size
+    if(m_reprocScaleParam.isScaleEnabled() &&
+        m_reprocScaleParam.isUnderScaling()){
+        int scale_width, scale_height;
+
+        m_reprocScaleParam.getPicSizeFromAPK(scale_width,scale_height);
+        getPictureSize(&cur_width, &cur_height);
+
+        String8 pic_size;
+        char buffer[32];
+        snprintf(buffer, sizeof(buffer), "%dx%d", scale_width, scale_height);
+        pic_size.append(buffer);
+        set(CameraParameters::KEY_PICTURE_SIZE, pic_size);
+    }
+
+    str = flatten();
+    strParams = (char *)malloc(sizeof(char)*(str.length()+1));
+    if(strParams != NULL){
+        memset(strParams, 0, sizeof(char)*(str.length()+1));
+        strlcpy(strParams, str.string(), str.length()+1);
+        strParams[str.length()] = 0;
+    }
+
+    if(m_reprocScaleParam.isScaleEnabled() &&
+        m_reprocScaleParam.isUnderScaling()){
+        //need set back picture size
+        String8 pic_size;
+        char buffer[32];
+        snprintf(buffer, sizeof(buffer), "%dx%d", cur_width, cur_height);
+        pic_size.append(buffer);
+        set(CameraParameters::KEY_PICTURE_SIZE, pic_size);
+    }
+    return strParams;
+}
+
+#ifdef TARGET_TS_MAKEUP
+/*===========================================================================
+ * FUNCTION   : getTsMakeupInfo
+ *
+ * DESCRIPTION: get TsMakeup info
+ *
+ * PARAMETERS :
+ *   @whiteLevel : [output] white level
+ *   @cleanLevel : [output] clean level
+
+ * RETURN     : Whether makeup is enabled or not
+ *==========================================================================*/
+bool QCameraParameters::getTsMakeupInfo(int &whiteLevel, int &cleanLevel) const
+{
+    const char* pch_makeup_enable = get(QCameraParameters::KEY_TS_MAKEUP);
+    if (pch_makeup_enable == NULL) {
+        LOGH("pch_makeup_enable = null");
+        return false;
+    }
+    bool enableMakeup =
+            (strcmp(pch_makeup_enable,"On") == 0);
+    if (enableMakeup) {
+        whiteLevel = getInt(QCameraParameters::KEY_TS_MAKEUP_WHITEN);
+        cleanLevel = getInt(QCameraParameters::KEY_TS_MAKEUP_CLEAN);
+    }
+    return enableMakeup;
+}
+#endif
+
+/*===========================================================================
+ * FUNCTION   : getPreviewHalPixelFormat
+ *
+ * DESCRIPTION: get preview HAL pixel format
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : HAL pixel format
+ *==========================================================================*/
+int QCameraParameters::getPreviewHalPixelFormat()
+{
+    int32_t halPixelFormat;
+    cam_format_t fmt;
+    getStreamFormat(CAM_STREAM_TYPE_PREVIEW,fmt);
+
+    switch (fmt) {
+    case CAM_FORMAT_YUV_420_NV12:
+        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
+        break;
+    case CAM_FORMAT_YUV_420_NV21:
+        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
+        break;
+    case CAM_FORMAT_YUV_420_NV21_ADRENO:
+        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
+        break;
+    case CAM_FORMAT_YUV_420_YV12:
+        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
+        break;
+    case CAM_FORMAT_YUV_420_NV12_VENUS:
+        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS;
+        break;
+    case CAM_FORMAT_YUV_420_NV21_VENUS:
+        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_VENUS;
+        break;
+    case CAM_FORMAT_YUV_420_NV12_UBWC:
+        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS_UBWC;
+        break;
+    case CAM_FORMAT_YUV_422_NV16:
+    case CAM_FORMAT_YUV_422_NV61:
+    default:
+        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
+        break;
+    }
+    LOGH("format %d\n", halPixelFormat);
+    return halPixelFormat;
+}
+
+/*===========================================================================
+ * FUNCTION   : getthumbnailSize
+ *
+ * DESCRIPTION: get thumbnail size
+ *
+ * PARAMETERS :
+ *   @width, height : [output] thumbnail width and height
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::getThumbnailSize(int *width, int *height) const
+{
+    *width = getInt(KEY_JPEG_THUMBNAIL_WIDTH);
+    *height = getInt(KEY_JPEG_THUMBNAIL_HEIGHT);
+}
+
+/*===========================================================================
+ * FUNCTION   : getZSLBurstInterval
+ *
+ * DESCRIPTION: get ZSL burst interval setting
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : ZSL burst interval value
+ *==========================================================================*/
+uint8_t QCameraParameters::getZSLBurstInterval()
+{
+    int interval = getInt(KEY_QC_ZSL_BURST_INTERVAL);
+    if (interval < 0) {
+        interval = 1;
+    }
+    return (uint8_t)interval;
+}
+
+/*===========================================================================
+ * FUNCTION   : getZSLQueueDepth
+ *
+ * DESCRIPTION: get ZSL queue depth
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : ZSL queue depth value
+ *==========================================================================*/
+uint8_t QCameraParameters::getZSLQueueDepth()
+{
+    int qdepth = getInt(KEY_QC_ZSL_QUEUE_DEPTH);
+    if (qdepth < 0) {
+        qdepth = 2;
+    }
+    if (isLowMemoryDevice()) {
+        qdepth = 1;
+    }
+    return (uint8_t)qdepth;
+}
+
+/*===========================================================================
+ * FUNCTION   : getZSLBackLookCount
+ *
+ * DESCRIPTION: get ZSL backlook count setting
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : ZSL backlook count value
+ *==========================================================================*/
+uint8_t QCameraParameters::getZSLBackLookCount()
+{
+    int look_back = getInt(KEY_QC_ZSL_BURST_LOOKBACK);
+    if (look_back < 0) {
+        look_back = 2;
+    }
+    if (isLowMemoryDevice()) {
+        look_back = 1;
+    }
+    return (uint8_t)look_back;
+}
+/*===========================================================================
+ * FUNCTION   : isVideoBuffersCached
+ *
+ * DESCRIPTION: Query buffers are cached /un cached
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : buffers are cached /un cached
+ *==========================================================================*/
+bool QCameraParameters::isVideoBuffersCached()
+{
+    const char *cached_mem  = get(KEY_QC_CACHE_VIDEO_BUFFERS);
+    if (cached_mem != NULL) {
+        if (strcmp(cached_mem, VALUE_DISABLE) != 0) {
+            return true;
+        }
+    }
+    return false;
+}
+/*===========================================================================
+ * FUNCTION   : getZSLMaxUnmatchedFrames
+ *
+ * DESCRIPTION: get allowed ZSL max unmatched frames number
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : ZSL backlook count value
+ *==========================================================================*/
+uint8_t QCameraParameters::getMaxUnmatchedFramesInQueue()
+{
+    return (uint8_t)(m_pCapability->min_num_pp_bufs);
+}
+
+/*===========================================================================
+ * FUNCTION   : setRecordingHintValue
+ *
+ * DESCRIPTION: set recording hint
+ *
+ * PARAMETERS :
+ *   @value   : video hint value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraParameters::setRecordingHintValue(int32_t value)
+{
+    LOGH("VideoHint = %d", value);
+    bool newValue = (value > 0)? true : false;
+
+    if ( m_bRecordingHint != newValue ) {
+        m_bNeedRestart = true;
+        m_bRecordingHint_new = newValue;
+    } else {
+        m_bRecordingHint_new = m_bRecordingHint;
+    }
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_RECORDING_HINT, value)) {
+        return BAD_VALUE;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumOfSnapshots
+ *
+ * DESCRIPTION: get number of snapshot per shutter
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of snapshot per shutter
+ *==========================================================================*/
+uint8_t QCameraParameters::getNumOfSnapshots()
+{
+    uint8_t numOfSnapshot = 1;
+    int val = getInt(KEY_QC_NUM_SNAPSHOT_PER_SHUTTER);
+    if (0 < val) {
+        numOfSnapshot = (uint8_t)val;
+    }
+
+    return (uint8_t)numOfSnapshot;
+}
+
+/*===========================================================================
+ * FUNCTION   : getBurstCountForAdvancedCapture
+ *
+ * DESCRIPTION: get burst count for advanced capture.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of snapshot required for advanced capture.
+ *==========================================================================*/
+uint8_t QCameraParameters::getBurstCountForAdvancedCapture()
+{
+    uint32_t burstCount = 0;
+    if (isUbiFocusEnabled()) {
+        //number of snapshots required for Ubi Focus.
+        burstCount = m_pCapability->ubifocus_af_bracketing_need.burst_count;
+    } else if (isUbiRefocus()) {
+        //number of snapshots required for Opti Zoom.
+        burstCount = m_pCapability->refocus_af_bracketing_need.burst_count;
+    } else if (isOptiZoomEnabled()) {
+        //number of snapshots required for Opti Zoom.
+        burstCount = m_pCapability->opti_zoom_settings_need.burst_count;
+    } else if (isChromaFlashEnabled()) {
+        //number of snapshots required for Chroma Flash.
+        burstCount = m_pCapability->chroma_flash_settings_need.burst_count;
+    } else if (isStillMoreEnabled()) {
+        //number of snapshots required for Still More.
+        if (isSeeMoreEnabled()) {
+            burstCount = 1;
+        } else if ((m_stillmore_config.burst_count >=
+                m_pCapability->stillmore_settings_need.min_burst_count) &&
+                (m_stillmore_config.burst_count <=
+                m_pCapability->stillmore_settings_need.max_burst_count)) {
+            burstCount = m_stillmore_config.burst_count;
+        } else {
+            burstCount = m_pCapability->stillmore_settings_need.burst_count;
+        }
+    } else if (isHDREnabled()) {
+        //number of snapshots required for HDR.
+        burstCount = m_pCapability->hdr_bracketing_setting.num_frames;
+    } else if (isAEBracketEnabled()) {
+      burstCount = 0;
+      const char *str_val = m_AEBracketingClient.values;
+      if ((str_val != NULL) && (strlen(str_val) > 0)) {
+          char prop[PROPERTY_VALUE_MAX];
+          memset(prop, 0, sizeof(prop));
+          strlcpy(prop, str_val, PROPERTY_VALUE_MAX);
+          char *saveptr = NULL;
+          char *token = strtok_r(prop, ",", &saveptr);
+          while (token != NULL) {
+              token = strtok_r(NULL, ",", &saveptr);
+              burstCount++;
+          }
+      }
+    }
+
+    if (burstCount <= 0) {
+        burstCount = getNumOfSnapshots();
+    }
+
+    LOGH("Snapshot burst count = %d", burstCount);
+    return (uint8_t)burstCount;
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumOfRetroSnapshots
+ *
+ * DESCRIPTION: get number of retro active snapshots per shutter
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of retro active snapshots per shutter
+ *==========================================================================*/
+uint8_t QCameraParameters::getNumOfRetroSnapshots()
+{
+    int numOfRetroSnapshots = getInt(KEY_QC_NUM_RETRO_BURST_PER_SHUTTER);
+    if (numOfRetroSnapshots < 0) {
+        numOfRetroSnapshots = 0;
+    }
+    LOGH("numOfRetroSnaps - %d", numOfRetroSnapshots);
+    return (uint8_t)numOfRetroSnapshots;
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumOfExtraHDRInBufsIfNeeded
+ *
+ * DESCRIPTION: get number of extra input buffers needed by HDR
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of extra buffers needed by HDR; 0 if not HDR enabled
+ *==========================================================================*/
+uint8_t QCameraParameters::getNumOfExtraHDRInBufsIfNeeded()
+{
+    unsigned int numOfBufs = 0;
+
+    if (isHDREnabled()) {
+        numOfBufs += m_pCapability->hdr_bracketing_setting.num_frames;
+        if (isHDR1xFrameEnabled() && isHDR1xExtraBufferNeeded()) {
+            numOfBufs++;
+        }
+        numOfBufs--; // Only additional buffers need to be returned
+    }
+
+    return (uint8_t)(numOfBufs);
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumOfExtraHDROutBufsIfNeeded
+ *
+ * DESCRIPTION: get number of extra output buffers needed by HDR
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of extra buffers needed by HDR; 0 if not HDR enabled
+ *==========================================================================*/
+uint8_t QCameraParameters::getNumOfExtraHDROutBufsIfNeeded()
+{
+    int numOfBufs = 0;
+
+    if (isHDREnabled() && isHDR1xFrameEnabled()) {
+        numOfBufs++;
+    }
+
+    return (uint8_t)(numOfBufs);
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegQuality
+ *
+ * DESCRIPTION: get jpeg encoding quality
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : jpeg encoding quality
+ *==========================================================================*/
+uint32_t QCameraParameters::getJpegQuality()
+{
+    int quality = getInt(KEY_JPEG_QUALITY);
+    if (quality < 0) {
+        quality = 85; // set to default quality value
+    }
+    return (uint32_t)quality;
+}
+
+/*===========================================================================
+ * FUNCTION   : getRotation
+ *
+ * DESCRIPTION: get application configured rotation
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : rotation value
+ *==========================================================================*/
+uint32_t QCameraParameters::getRotation() {
+    int rotation = 0;
+
+    //If exif rotation is set, do not rotate captured image
+    if (!useJpegExifRotation()) {
+        rotation = mRotation;
+        if (rotation < 0) {
+            rotation = 0;
+        }
+    }
+    return (uint32_t)rotation;
+}
+
+/*===========================================================================
+ * FUNCTION   : setJpegRotation
+ *
+ * DESCRIPTION: set jpeg rotation value configured internally
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : jpeg rotation value
+ *==========================================================================*/
+void QCameraParameters::setJpegRotation(int rotation) {
+    if (rotation == 0 || rotation == 90 ||
+            rotation == 180 || rotation == 270) {
+        mJpegRotation = (uint32_t)rotation;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getDeviceRotation
+ *
+ * DESCRIPTION: get device rotation value
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : device rotation value
+ *==========================================================================*/
+uint32_t QCameraParameters::getDeviceRotation() {
+    int rotation = 0;
+
+    rotation = mRotation;
+    if (rotation < 0) {
+        rotation = 0;
+    }
+
+    return (uint32_t)rotation;
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegExifRotation
+ *
+ * DESCRIPTION: get exif rotation value
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : rotation value
+ *==========================================================================*/
+uint32_t QCameraParameters::getJpegExifRotation() {
+    int rotation = 0;
+
+    if (useJpegExifRotation()) {
+        rotation = mRotation;
+        if (rotation < 0) {
+            rotation = 0;
+        }
+    }
+    return (uint32_t)rotation;
+}
+
+/*===========================================================================
+ * FUNCTION   : useJpegExifRotation
+ *
+ * DESCRIPTION: Check if jpeg exif rotation need to be used
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true if jpeg exif rotation need to be used
+ *==========================================================================*/
+bool QCameraParameters::useJpegExifRotation() {
+    char exifRotation[PROPERTY_VALUE_MAX];
+
+    property_get("persist.camera.exif.rotation", exifRotation, "off");
+
+    if (!strcmp(exifRotation, "on")) {
+        return true;
+    }
+
+    if (!(m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
+        return true;
+    }
+
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : getEffectValue
+ *
+ * DESCRIPTION: get effect value
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : effect value
+ *==========================================================================*/
+int32_t QCameraParameters::getEffectValue()
+{
+    uint32_t cnt = 0;
+    const char *effect = get(KEY_EFFECT);
+    if (effect) {
+        while (NULL != EFFECT_MODES_MAP[cnt].desc) {
+            if (!strcmp(EFFECT_MODES_MAP[cnt].desc, effect)) {
+                return EFFECT_MODES_MAP[cnt].val;
+            }
+            cnt++;
+        }
+    } else {
+        LOGW("Missing effect value");
+    }
+    return CAM_EFFECT_MODE_OFF;
+}
+
+/*===========================================================================
+ * FUNCTION   : parseGPSCoordinate
+ *
+ * DESCRIPTION: parse GPS coordinate string
+ *
+ * PARAMETERS :
+ *   @coord_str : [input] coordinate string
+ *   @coord     : [output]  ptr to struct to store coordinate
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraParameters::parseGPSCoordinate(const char *coord_str, rat_t* coord)
+{
+    if(coord == NULL) {
+        LOGE("error, invalid argument coord == NULL");
+        return BAD_VALUE;
+    }
+    double degF = atof(coord_str);
+    if (degF < 0) {
+        degF = -degF;
+    }
+    double minF = (degF - (double)(int) degF) * 60.0;
+    double secF = (minF - (double)(int) minF) * 60.0;
+
+    getRational(&coord[0], (int)degF, 1);
+    getRational(&coord[1], (int)minF, 1);
+    getRational(&coord[2], (int)(secF * 10000.0), 10000);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifDateTime
+ *
+ * DESCRIPTION: query exif date time
+ *
+ * PARAMETERS :
+ *   @dateTime    : String to store exif date time.
+ *                  Should be leaved unchanged in case of error.
+ *   @subsecTime  : String to store exif time nanoseconds.
+ *                  Should be leaved unchanged in case of error.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifDateTime(String8 &dateTime, String8 &subsecTime)
+{
+    int32_t ret = NO_ERROR;
+
+    //get time and date from system
+    struct timeval tv;
+    struct tm timeinfo_data;
+
+    int res = gettimeofday(&tv, NULL);
+    if (0 == res) {
+        struct tm *timeinfo = localtime_r(&tv.tv_sec, &timeinfo_data);
+        if (NULL != timeinfo) {
+            //Write datetime according to EXIF Spec
+            //"YYYY:MM:DD HH:MM:SS" (20 chars including \0)
+            dateTime = String8::format("%04d:%02d:%02d %02d:%02d:%02d",
+                    timeinfo->tm_year + 1900, timeinfo->tm_mon + 1,
+                    timeinfo->tm_mday, timeinfo->tm_hour,
+                    timeinfo->tm_min, timeinfo->tm_sec);
+            //Write subsec according to EXIF Sepc
+            subsecTime = String8::format("%06ld", tv.tv_usec);
+        } else {
+            LOGE("localtime_r() error");
+            ret = UNKNOWN_ERROR;
+        }
+    } else if (-1 == res) {
+        LOGE("gettimeofday() error: %s", strerror(errno));
+        ret = UNKNOWN_ERROR;
+    } else {
+        LOGE("gettimeofday() unexpected return code: %d", res);
+        ret = UNKNOWN_ERROR;
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : getRational
+ *
+ * DESCRIPTION: compose rational struct
+ *
+ * PARAMETERS :
+ *   @rat     : ptr to struct to store rational info
+ *   @num     :num of the rational
+ *   @denom   : denom of the rational
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getRational(rat_t *rat, int num, int denom)
+{
+    if ((0 > num) || (0 > denom)) {
+        LOGE("Negative values");
+        return BAD_VALUE;
+    }
+    if (NULL == rat) {
+        LOGE("NULL rat input");
+        return BAD_VALUE;
+    }
+    rat->num = (uint32_t)num;
+    rat->denom = (uint32_t)denom;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifFocalLength
+ *
+ * DESCRIPTION: get exif focal lenght
+ *
+ * PARAMETERS :
+ *   @focalLength : ptr to rational strcut to store focal lenght
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifFocalLength(rat_t *focalLength)
+{
+    int focalLengthValue =
+        (int)(getFloat(QCameraParameters::KEY_FOCAL_LENGTH) * FOCAL_LENGTH_DECIMAL_PRECISION);
+    return getRational(focalLength, focalLengthValue, FOCAL_LENGTH_DECIMAL_PRECISION);
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifIsoSpeed
+ *
+ * DESCRIPTION: get exif ISO speed
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : ISO speed value
+ *==========================================================================*/
+uint16_t QCameraParameters::getExifIsoSpeed()
+{
+    uint16_t isoSpeed = 0;
+    const char *iso_str = get(QCameraParameters::KEY_QC_ISO_MODE);
+    int iso_index = lookupAttr(ISO_MODES_MAP, PARAM_MAP_SIZE(ISO_MODES_MAP), iso_str);
+    switch (iso_index) {
+    case CAM_ISO_MODE_AUTO:
+        isoSpeed = 0;
+        break;
+    case CAM_ISO_MODE_DEBLUR:
+        isoSpeed = 1;
+        break;
+    case CAM_ISO_MODE_100:
+        isoSpeed = 100;
+        break;
+    case CAM_ISO_MODE_200:
+        isoSpeed = 200;
+        break;
+    case CAM_ISO_MODE_400:
+        isoSpeed = 400;
+        break;
+    case CAM_ISO_MODE_800:
+        isoSpeed = 800;
+        break;
+    case CAM_ISO_MODE_1600:
+        isoSpeed = 1600;
+        break;
+    case CAM_ISO_MODE_3200:
+        isoSpeed = 3200;
+        break;
+    }
+    return isoSpeed;
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifGpsProcessingMethod
+ *
+ * DESCRIPTION: get GPS processing method
+ *
+ * PARAMETERS :
+ *   @gpsProcessingMethod : string to store GPS process method
+ *   @count               : lenght of the string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifGpsProcessingMethod(char *gpsProcessingMethod,
+                                                      uint32_t &count)
+{
+    const char *str = get(KEY_GPS_PROCESSING_METHOD);
+    if(str != NULL) {
+        memcpy(gpsProcessingMethod, ExifAsciiPrefix, EXIF_ASCII_PREFIX_SIZE);
+        count = EXIF_ASCII_PREFIX_SIZE;
+        strlcpy(gpsProcessingMethod + EXIF_ASCII_PREFIX_SIZE, str, GPS_PROCESSING_METHOD_SIZE);
+        count += (uint32_t)strlen(str);
+        gpsProcessingMethod[count++] = '\0'; // increase 1 for the last NULL char
+        return NO_ERROR;
+    } else {
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifLatitude
+ *
+ * DESCRIPTION: get exif latitude
+ *
+ * PARAMETERS :
+ *   @latitude : ptr to rational struct to store latitude info
+ *   @ladRef   : charater to indicate latitude reference
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifLatitude(rat_t *latitude,
+                                           char *latRef)
+{
+    const char *str = get(KEY_GPS_LATITUDE);
+    if(str != NULL) {
+        parseGPSCoordinate(str, latitude);
+
+        //set Latitude Ref
+        float latitudeValue = getFloat(KEY_GPS_LATITUDE);
+        if(latitudeValue < 0.0f) {
+            latRef[0] = 'S';
+        } else {
+            latRef[0] = 'N';
+        }
+        latRef[1] = '\0';
+        return NO_ERROR;
+    }else{
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifLongitude
+ *
+ * DESCRIPTION: get exif longitude
+ *
+ * PARAMETERS :
+ *   @longitude : ptr to rational struct to store longitude info
+ *   @lonRef    : charater to indicate longitude reference
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifLongitude(rat_t *longitude,
+                                            char *lonRef)
+{
+    const char *str = get(KEY_GPS_LONGITUDE);
+    if(str != NULL) {
+        parseGPSCoordinate(str, longitude);
+
+        //set Longitude Ref
+        float longitudeValue = getFloat(KEY_GPS_LONGITUDE);
+        if(longitudeValue < 0.0f) {
+            lonRef[0] = 'W';
+        } else {
+            lonRef[0] = 'E';
+        }
+        lonRef[1] = '\0';
+        return NO_ERROR;
+    }else{
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifAltitude
+ *
+ * DESCRIPTION: get exif altitude
+ *
+ * PARAMETERS :
+ *   @altitude : ptr to rational struct to store altitude info
+ *   @altRef   : charater to indicate altitude reference
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifAltitude(rat_t *altitude,
+                                           char *altRef)
+{
+    const char *str = get(KEY_GPS_ALTITUDE);
+    if(str != NULL) {
+        double value = atof(str);
+        *altRef = 0;
+        if(value < 0){
+            *altRef = 1;
+            value = -value;
+        }
+        return getRational(altitude, (int)(value*1000), 1000);
+    }else{
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifGpsDateTimeStamp
+ *
+ * DESCRIPTION: get exif GPS date time stamp
+ *
+ * PARAMETERS :
+ *   @gpsDateStamp : GPS date time stamp string
+ *   @bufLen       : length of the string
+ *   @gpsTimeStamp : ptr to rational struct to store time stamp info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifGpsDateTimeStamp(char *gpsDateStamp,
+                                                   uint32_t bufLen,
+                                                   rat_t *gpsTimeStamp)
+{
+    const char *str = get(KEY_GPS_TIMESTAMP);
+    if(str != NULL) {
+        time_t unixTime = (time_t)atol(str);
+        struct tm *UTCTimestamp = gmtime(&unixTime);
+
+        if(!UTCTimestamp) {
+            LOGE("UTCTimestamp is null\n");
+            return BAD_VALUE;
+        }
+
+        strftime(gpsDateStamp, bufLen, "%Y:%m:%d", UTCTimestamp);
+
+        getRational(&gpsTimeStamp[0], UTCTimestamp->tm_hour, 1);
+        getRational(&gpsTimeStamp[1], UTCTimestamp->tm_min, 1);
+        getRational(&gpsTimeStamp[2], UTCTimestamp->tm_sec, 1);
+
+        return NO_ERROR;
+    } else {
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : updateFocusDistances
+ *
+ * DESCRIPTION: update focus distances
+ *
+ * PARAMETERS :
+ *   @focusDistances : ptr to focus distance info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateFocusDistances(cam_focus_distances_info_t *focusDistances)
+{
+    String8 str;
+    char buffer[32] = {0};
+    //set all distances to infinity if focus mode is infinity
+    if(mFocusMode == CAM_FOCUS_MODE_INFINITY) {
+        str.append("Infinity,Infinity,Infinity");
+    } else {
+        if (focusDistances->focus_distance[0] < FOCUS_PERCISION) {
+            str.append("Infinity");
+        } else {
+            snprintf(buffer, sizeof(buffer), "%f", 1.0/focusDistances->focus_distance[0]);
+            str.append(buffer);
+        }
+        if (focusDistances->focus_distance[1] < FOCUS_PERCISION) {
+            str.append(",Infinity");
+        } else {
+            snprintf(buffer, sizeof(buffer), ",%f", 1.0/focusDistances->focus_distance[1]);
+            str.append(buffer);
+        }
+        if (focusDistances->focus_distance[2] < FOCUS_PERCISION) {
+            str.append(",Infinity");
+        } else {
+            snprintf(buffer, sizeof(buffer), ",%f", 1.0/focusDistances->focus_distance[2]);
+            str.append(buffer);
+        }
+    }
+    LOGH("setting KEY_FOCUS_DISTANCES as %s", __FUNCTION__, str.string());
+    set(QCameraParameters::KEY_FOCUS_DISTANCES, str.string());
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : updateRecordingHintValue
+ *
+ * DESCRIPTION: update recording hint locally and to daemon
+ *
+ * PARAMETERS :
+ *   @value   : video hint value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateRecordingHintValue(int32_t value)
+{
+    int32_t rc = NO_ERROR;
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        LOGE("Failed to initialize group update table");
+        return BAD_TYPE;
+    }
+
+    rc = setRecordingHintValue(value);
+    if (rc != NO_ERROR) {
+        LOGE("Failed to update table");
+        return rc;
+    }
+
+    if(m_bDISEnabled && (value==1)) {
+        LOGH("%d: Setting DIS value again!!");
+        setDISValue(VALUE_ENABLE);
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        LOGE("Failed to update recording hint");
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setHistogram
+ *
+ * DESCRIPTION: set histogram
+ *
+ * PARAMETERS :
+ *   @enabled : if histogram is enabled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setHistogram(bool enabled)
+{
+    if(m_bHistogramEnabled == enabled) {
+        LOGH("histogram flag not changed, no ops here");
+        return NO_ERROR;
+    }
+
+    // set parm for histogram
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        LOGE("Failed to initialize group update table");
+        return BAD_TYPE;
+    }
+
+    int32_t value = enabled ? 1 : 0;
+    int32_t rc = NO_ERROR;
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_HISTOGRAM, value)) {
+        LOGE("Failed to update table");
+        return BAD_VALUE;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        LOGE("Failed to set histogram");
+        return rc;
+    }
+
+    m_bHistogramEnabled = enabled;
+
+    LOGH("Histogram -> %s", m_bHistogramEnabled ? "Enabled" : "Disabled");
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setIntEvent
+ *
+ * DESCRIPTION: set setIntEvent
+ *
+ * PARAMETERS :
+ *   @params : image size and dimensions
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setIntEvent(cam_int_evt_params_t params)
+{
+    int32_t rc = NO_ERROR;
+
+    if ( m_pParamBuf == NULL ) {
+        return NO_INIT;
+    }
+
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        LOGE("Failed to initialize group update table");
+        return BAD_TYPE;
+    }
+
+    //Sending snapshot taken notification back to Eztune"
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_INT_EVT, params)) {
+        LOGE("Failed to update table");
+        return BAD_VALUE;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        LOGE("Failed to set frameskip info parm");
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFaceDetectionOption
+ *
+ * DESCRIPTION: set if face detection is enabled by SendCommand
+ *
+ * PARAMETERS :
+ *   @enabled : bool flag if face detection should be enabled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+ int32_t QCameraParameters::setFaceDetectionOption(bool enabled)
+{
+    m_bFaceDetectionOn = enabled;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFaceDetection
+ *
+ * DESCRIPTION: set face detection
+ *
+ * PARAMETERS :
+ *   @enabled : if face detection is enabled
+ *   @initCommit : if configuration list need to be initialized and commited
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFaceDetection(bool enabled, bool initCommit)
+{
+    uint32_t faceProcMask = m_nFaceProcMask;
+    // set face detection mask
+    if (enabled) {
+        faceProcMask |= CAM_FACE_PROCESS_MASK_DETECTION;
+        if (getRecordingHintValue() > 0) {
+            faceProcMask = 0;
+            faceProcMask |= CAM_FACE_PROCESS_MASK_FOCUS;
+            if (fdModeInVideo() == CAM_FACE_PROCESS_MASK_DETECTION) {
+                faceProcMask |= CAM_FACE_PROCESS_MASK_DETECTION;
+            }
+        } else {
+            faceProcMask |= CAM_FACE_PROCESS_MASK_FOCUS;
+            faceProcMask |= CAM_FACE_PROCESS_MASK_DETECTION;
+        }
+        if (isTruePortraitEnabled()) {
+            LOGL("QCameraParameters::setFaceDetection trueportrait enabled");
+            faceProcMask |= CAM_FACE_PROCESS_MASK_GAZE;
+        } else {
+            LOGL("QCameraParameters::setFaceDetection trueportrait disabled");
+            faceProcMask &= ~CAM_FACE_PROCESS_MASK_GAZE;
+        }
+    } else {
+        faceProcMask &= ~(CAM_FACE_PROCESS_MASK_DETECTION
+                | CAM_FACE_PROCESS_MASK_FOCUS
+                | CAM_FACE_PROCESS_MASK_GAZE);
+    }
+
+    if(m_nFaceProcMask == faceProcMask) {
+        LOGH("face process mask not changed, no ops here");
+        return NO_ERROR;
+    }
+
+    m_nFaceProcMask = faceProcMask;
+
+    // set parm for face detection
+    uint32_t requested_faces = (uint32_t)getInt(KEY_QC_MAX_NUM_REQUESTED_FACES);
+    cam_fd_set_parm_t fd_set_parm;
+    memset(&fd_set_parm, 0, sizeof(cam_fd_set_parm_t));
+    fd_set_parm.fd_mode = faceProcMask;
+    fd_set_parm.num_fd = requested_faces;
+
+    LOGH("[KPI Perf]: PROFILE_FACE_DETECTION_VALUE = %d num_fd = %d",
+           faceProcMask,requested_faces);
+
+    if (initCommit) {
+        if(initBatchUpdate(m_pParamBuf) < 0 ) {
+            LOGE("Failed to initialize group update table");
+            return BAD_TYPE;
+        }
+    }
+
+    int32_t rc = NO_ERROR;
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_FD, fd_set_parm)) {
+        LOGE("Failed to update table");
+        return BAD_VALUE;
+    }
+
+    if (initCommit) {
+        rc = commitSetBatch();
+        if (rc != NO_ERROR) {
+            LOGE("Failed to set face detection parm");
+            return rc;
+        }
+    }
+
+    LOGH("FaceProcMask -> %d", m_nFaceProcMask);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFrameSkip
+ *
+ * DESCRIPTION: send ISP frame skip pattern to camera daemon
+ *
+ * PARAMETERS :
+ *   @pattern : skip pattern for ISP
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFrameSkip(enum msm_vfe_frame_skip_pattern pattern)
+{
+    int32_t rc = NO_ERROR;
+
+    if ( m_pParamBuf == NULL ) {
+        return NO_INIT;
+    }
+
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        LOGE("Failed to initialize group update table");
+        return BAD_TYPE;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_FRAMESKIP, (int32_t)pattern)) {
+        LOGE("Failed to update table");
+        return BAD_VALUE;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        LOGE("Failed to set frameskip info parm");
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : updateRAW
+ *
+ * DESCRIPTION: Query sensor output size based on maximum stream dimension
+ *
+ * PARAMETERS :
+ *   @max_dim : maximum stream dimension
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateRAW(cam_dimension_t max_dim)
+{
+    int32_t rc = NO_ERROR;
+    cam_dimension_t raw_dim, pic_dim;
+
+    // If offline raw is enabled, check the dimensions from Picture size since snapshot
+    // stream is not added but final JPEG is required of snapshot size
+    if (getofflineRAW()) {
+        getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT, pic_dim);
+        if (pic_dim.width > max_dim.width) {
+            max_dim.width = pic_dim.width;
+        }
+        if (pic_dim.height > max_dim.height) {
+            max_dim.height = pic_dim.height;
+        }
+    }
+
+    if (max_dim.width == 0 || max_dim.height == 0) {
+        max_dim = m_pCapability->raw_dim[0];
+    }
+
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        LOGE("Failed to initialize group update table");
+        return BAD_TYPE;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_MAX_DIMENSION, max_dim)) {
+        LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION ");
+        return BAD_VALUE;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        LOGE("Failed to set lock CAM_INTF_PARM_MAX_DIMENSION parm");
+        return rc;
+    }
+
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        LOGE("Failed to initialize group update table");
+        return BAD_TYPE;
+    }
+
+    ADD_GET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_RAW_DIMENSION);
+
+    rc = commitGetBatch();
+    if (rc != NO_ERROR) {
+        LOGE("Failed to get commit CAM_INTF_PARM_RAW_DIMENSION");
+        return rc;
+    }
+
+    READ_PARAM_ENTRY(m_pParamBuf, CAM_INTF_PARM_RAW_DIMENSION, raw_dim);
+
+    LOGH("RAW Dimension = %d X %d",raw_dim.width,raw_dim.height);
+    if (raw_dim.width == 0 || raw_dim.height == 0) {
+        LOGW("Error getting RAW size. Setting to Capability value");
+        raw_dim = m_pCapability->raw_dim[0];
+    }
+    setRawSize(raw_dim);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setHDRSceneEnable
+ *
+ * DESCRIPTION: sets hdr scene deteced flag
+ *
+ * PARAMETERS :
+ *   @bflag : hdr scene deteced
+ *
+ * RETURN     : nothing
+ *==========================================================================*/
+void QCameraParameters::setHDRSceneEnable(bool bflag)
+{
+    bool bupdate = false;
+    if (m_HDRSceneEnabled != bflag) {
+        bupdate = true;
+    }
+    m_HDRSceneEnabled = bflag;
+
+    if (bupdate) {
+        updateFlash(true);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getASDStateString
+ *
+ * DESCRIPTION: get ASD result in string format
+ *
+ * PARAMETERS :
+ *   @scene : selected scene mode
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+ const char *QCameraParameters::getASDStateString(cam_auto_scene_t scene)
+{
+    switch (scene) {
+      case S_NORMAL :
+        return "Normal";
+      case S_SCENERY:
+        return "Scenery";
+      case S_PORTRAIT:
+        return "Portrait";
+      case S_PORTRAIT_BACKLIGHT:
+        return "Portrait-Backlight";
+      case S_SCENERY_BACKLIGHT:
+        return "Scenery-Backlight";
+      case S_BACKLIGHT:
+        return "Backlight";
+      default:
+        return "<Unknown!>";
+      }
+}
+
+/*===========================================================================
+ * FUNCTION   : parseNDimVector
+ *
+ * DESCRIPTION: helper function to parse a string like "(1, 2, 3, 4, ..., N)"
+ *              into N-dimension vector
+ *
+ * PARAMETERS :
+ *   @str     : string to be parsed
+ *   @num     : output array of size N to store vector element values
+ *   @N       : number of dimension
+ *   @delim   : delimeter to seperete string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::parseNDimVector(const char *str, int *num, int N, char delim = ',')
+{
+    char *start, *end;
+    if (num == NULL) {
+        LOGE("Invalid output array (num == NULL)");
+        return BAD_VALUE;
+    }
+
+    //check if string starts and ends with parantheses
+    if(str[0] != '(' || str[strlen(str)-1] != ')') {
+        LOGE("Invalid format of string %s, valid format is (n1, n2, n3, n4 ...)",
+               str);
+        return BAD_VALUE;
+    }
+    start = (char*) str;
+    start++;
+    for(int i=0; i<N; i++) {
+        *(num+i) = (int) strtol(start, &end, 10);
+        if(*end != delim && i < N-1) {
+            LOGE("Cannot find delimeter '%c' in string \"%s\". end = %c",
+                   delim, str, *end);
+            return -1;
+        }
+        start = end+1;
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : parseCameraAreaString
+ *
+ * DESCRIPTION: helper function to parse a string of camera areas like
+ *              "(1, 2, 3, 4, 5),(1, 2, 3, 4, 5),..."
+ *
+ * PARAMETERS :
+ *   @str             : string to be parsed
+ *   @max_num_areas   : max number of areas
+ *   @pAreas          : ptr to struct to store areas
+ *   @num_areas_found : number of areas found
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::parseCameraAreaString(const char *str,
+                                                 int max_num_areas,
+                                                 cam_area_t *pAreas,
+                                                 int& num_areas_found)
+{
+    char area_str[32];
+    const char *start, *end, *p;
+    start = str; end = NULL;
+    int values[5], index=0;
+    num_areas_found = 0;
+
+    memset(values, 0, sizeof(values));
+    while(start != NULL) {
+       if(*start != '(') {
+            LOGE("error: Ill formatted area string: %s", str);
+            return BAD_VALUE;
+       }
+       end = strchr(start, ')');
+       if(end == NULL) {
+            LOGE("error: Ill formatted area string: %s", str);
+            return BAD_VALUE;
+       }
+       int i;
+       for (i=0,p=start; p<=end; p++, i++) {
+           area_str[i] = *p;
+       }
+       area_str[i] = '\0';
+       if(parseNDimVector(area_str, values, 5) < 0){
+            LOGE("error: Failed to parse the area string: %s", area_str);
+            return BAD_VALUE;
+       }
+       // no more areas than max_num_areas are accepted.
+       if(index >= max_num_areas) {
+            LOGE("error: too many areas specified %s", str);
+            return BAD_VALUE;
+       }
+       pAreas[index].rect.left = values[0];
+       pAreas[index].rect.top = values[1];
+       pAreas[index].rect.width = values[2] - values[0];
+       pAreas[index].rect.height = values[3] - values[1];
+       pAreas[index].weight = values[4];
+
+       index++;
+       start = strchr(end, '('); // serach for next '('
+    }
+    num_areas_found = index;
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : validateCameraAreas
+ *
+ * DESCRIPTION: helper function to validate camera areas within (-1000, 1000)
+ *
+ * PARAMETERS :
+ *   @areas     : ptr to array of areas
+ *   @num_areas : number of areas
+ *
+ * RETURN     : true --  area is in valid range
+ *              false -- not valid
+ *==========================================================================*/
+bool QCameraParameters::validateCameraAreas(cam_area_t *areas, int num_areas)
+{
+    // special case: default area
+    if (num_areas == 1 &&
+        areas[0].rect.left == 0 &&
+        areas[0].rect.top == 0 &&
+        areas[0].rect.width == 0 &&
+        areas[0].rect.height == 0 &&
+        areas[0].weight == 0) {
+        return true;
+    }
+
+    for(int i = 0; i < num_areas; i++) {
+        // left should be >= -1000
+        if(areas[i].rect.left < -1000) {
+            return false;
+        }
+
+        // top  should be >= -1000
+        if(areas[i].rect.top < -1000) {
+            return false;
+        }
+
+        // width or height should be > 0
+        if (areas[i].rect.width <= 0 || areas[i].rect.height <= 0) {
+            return false;
+        }
+
+        // right  should be <= 1000
+        if(areas[i].rect.left + areas[i].rect.width > 1000) {
+            return false;
+        }
+
+        // bottom should be <= 1000
+        if(areas[i].rect.top + areas[i].rect.height > 1000) {
+            return false;
+        }
+
+        // weight should be within (1, 1000)
+        if (areas[i].weight < 1 || areas[i].weight > 1000) {
+            return false;
+        }
+    }
+    return true;
+}
+
+/*===========================================================================
+ * FUNCTION   : isYUVFrameInfoNeeded
+ *
+ * DESCRIPTION: In AE-Bracket mode, we need set yuv buffer information for up-layer
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCameraParameters::isYUVFrameInfoNeeded()
+{
+    //In AE-Bracket mode, we need set raw buffer information for up-layer
+    if(!isNV21PictureFormat() && !isNV16PictureFormat()){
+        return false;
+    }
+    const char *aecBracketStr =  get(KEY_QC_AE_BRACKET_HDR);
+
+    int value = lookupAttr(BRACKETING_MODES_MAP, PARAM_MAP_SIZE(BRACKETING_MODES_MAP),
+            aecBracketStr);
+    LOGH("aecBracketStr=%s, value=%d.", aecBracketStr, value);
+    return (value == CAM_EXP_BRACKETING_ON);
+}
+
+/*===========================================================================
+ * FUNCTION   : getFrameFmtString
+ *
+ * DESCRIPTION: get string name of frame format
+ *
+ * PARAMETERS :
+ *   @frame   : frame format
+ *
+ * RETURN     : string name of frame format
+ *==========================================================================*/
+const char *QCameraParameters::getFrameFmtString(cam_format_t fmt)
+{
+    return lookupNameByValue(PICTURE_TYPES_MAP, PARAM_MAP_SIZE(PICTURE_TYPES_MAP), fmt);
+}
+
+/*===========================================================================
+ * FUNCTION   : setDcrf
+ *
+ * DESCRIPTION: Enable/Disable DCRF (dual-camera-range-finding)
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::setDcrf()
+{
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+
+    // Set DCRF to off by default (assuming single-camera mode)
+    m_bDcrfEnabled = 0;
+
+    // In dual-cam mode, get sysprop and set it to on by default
+    if(m_relCamSyncInfo.sync_control == CAM_SYNC_RELATED_SENSORS_ON) {
+        property_get("persist.camera.dcrf.enable", prop, "1");
+        m_bDcrfEnabled = atoi(prop);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setRelatedCamSyncInfo
+ *
+ * DESCRIPTION: set the related cam info parameters
+ * the related cam info is cached into params to make some decisions beforehand
+ *
+ * PARAMETERS :
+ *   @info  : ptr to related cam info parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRelatedCamSyncInfo(
+        cam_sync_related_sensors_event_info_t* info)
+{
+    if(info != NULL){
+        memcpy(&m_relCamSyncInfo, info,
+                sizeof(cam_sync_related_sensors_event_info_t));
+        return NO_ERROR;
+    } else {
+        LOGE("info buffer is null");
+        return UNKNOWN_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getRelatedCamSyncInfo
+ *
+ * DESCRIPTION:returns the related cam sync info for this HWI instance
+ *
+ * PARAMETERS :none
+ *
+ * RETURN     : const pointer to cam_sync_related_sensors_event_info_t
+ *==========================================================================*/
+const cam_sync_related_sensors_event_info_t*
+        QCameraParameters::getRelatedCamSyncInfo(void)
+{
+    return &m_relCamSyncInfo;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFrameSyncEnabled
+ *
+ * DESCRIPTION: sets whether frame sync is enabled
+ *
+ * PARAMETERS :
+ *   @enable  : flag whether to enable or disable frame sync
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFrameSyncEnabled(bool enable)
+{
+    m_bFrameSyncEnabled = enable;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : isFrameSyncEnabled
+ *
+ * DESCRIPTION: returns whether frame sync is enabled
+ *
+ * PARAMETERS :none
+ *
+ * RETURN     : bool indicating whether frame sync is enabled
+ *==========================================================================*/
+bool QCameraParameters::isFrameSyncEnabled(void)
+{
+    return m_bFrameSyncEnabled;
+}
+
+/*===========================================================================
+ * FUNCTION   : bundleRelatedCameras
+ *
+ * DESCRIPTION: send trigger for bundling related camera sessions in the server
+ *
+ * PARAMETERS :
+ *   @sync        :indicates whether syncing is On or Off
+ *   @sessionid  :session id for other camera session
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::bundleRelatedCameras(bool sync,
+        uint32_t sessionid)
+{
+    int32_t rc = NO_ERROR;
+
+    if (NULL == m_pCamOpsTbl) {
+        LOGE("Ops not initialized");
+        return NO_INIT;
+    }
+
+    LOGD("Sending Bundling cmd sync %d, SessionId %d ",
+            sync, sessionid);
+
+    if(m_pRelCamSyncBuf) {
+        if(sync) {
+            m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
+        }
+        else {
+            m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
+        }
+        m_pRelCamSyncBuf->mode = m_relCamSyncInfo.mode;
+        m_pRelCamSyncBuf->type = m_relCamSyncInfo.type;
+        m_pRelCamSyncBuf->related_sensor_session_id = sessionid;
+        rc = m_pCamOpsTbl->ops->sync_related_sensors(
+                m_pCamOpsTbl->camera_handle, m_pRelCamSyncBuf);
+    } else {
+        LOGE("Related Cam SyncBuffer not allocated", rc);
+        return NO_INIT;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getRelatedCamCalibration
+ *
+ * DESCRIPTION: fetch the related camera subsystem calibration data
+ *
+ * PARAMETERS :
+ *   @calib  : calibration data fetched
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getRelatedCamCalibration(
+        cam_related_system_calibration_data_t* calib)
+{
+    int32_t rc = NO_ERROR;
+
+    if(!calib) {
+        return BAD_TYPE;
+    }
+
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        LOGE("Failed to initialize group update table");
+        return BAD_TYPE;
+    }
+
+    ADD_GET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+            CAM_INTF_PARM_RELATED_SENSORS_CALIBRATION);
+
+    rc = commitGetBatch();
+    if (rc != NO_ERROR) {
+        LOGE("Failed to get related cam calibration info");
+        return rc;
+    }
+
+    READ_PARAM_ENTRY(m_pParamBuf,
+            CAM_INTF_PARM_RELATED_SENSORS_CALIBRATION, *calib);
+
+    LOGD("CALIB version %d ", calib->calibration_format_version);
+    LOGD("CALIB normalized_focal_length %f ",
+            calib->main_cam_specific_calibration.normalized_focal_length);
+    LOGD("CALIB native_sensor_resolution_width %d ",
+            calib->main_cam_specific_calibration.native_sensor_resolution_width);
+    LOGD("CALIB native_sensor_resolution_height %d ",
+            calib->main_cam_specific_calibration.native_sensor_resolution_height);
+    LOGD("CALIB sensor_resolution_width %d ",
+            calib->main_cam_specific_calibration.calibration_sensor_resolution_width);
+    LOGD("CALIB sensor_resolution_height %d ",
+            calib->main_cam_specific_calibration.calibration_sensor_resolution_height);
+    LOGD("CALIB focal_length_ratio %f ",
+            calib->main_cam_specific_calibration.focal_length_ratio);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : initBatchUpdate
+ *
+ * DESCRIPTION: init camera parameters buf entries
+ *
+ * PARAMETERS :
+ *   @p_table : ptr to parameter buffer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::initBatchUpdate(parm_buffer_t *p_table)
+{
+    m_tempMap.clear();
+    clear_metadata_buffer(p_table);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : commitSetBatch
+ *
+ * DESCRIPTION: commit all set parameters in the batch work to backend
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::commitSetBatch()
+{
+    int32_t rc = NO_ERROR;
+    int32_t i = 0;
+
+    if (NULL == m_pParamBuf) {
+        LOGE("Params not initialized");
+        return NO_INIT;
+    }
+
+    /* Loop to check if atleast one entry is valid */
+    for(i = 0; i < CAM_INTF_PARM_MAX; i++){
+        if(m_pParamBuf->is_valid[i])
+            break;
+    }
+
+    if (NULL == m_pCamOpsTbl) {
+        LOGE("Ops not initialized");
+        return NO_INIT;
+    }
+
+    if (i < CAM_INTF_PARM_MAX) {
+        rc = m_pCamOpsTbl->ops->set_parms(m_pCamOpsTbl->camera_handle, m_pParamBuf);
+    }
+    if (rc == NO_ERROR) {
+        // commit change from temp storage into param map
+        rc = commitParamChanges();
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : commitGetBatch
+ *
+ * DESCRIPTION: commit all get parameters in the batch work to backend
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::commitGetBatch()
+{
+    int32_t rc = NO_ERROR;
+    int32_t i = 0;
+
+    if (NULL == m_pParamBuf) {
+        LOGE("Params not initialized");
+        return NO_INIT;
+    }
+
+    /* Loop to check if atleast one entry is valid */
+    for(i = 0; i < CAM_INTF_PARM_MAX; i++){
+        if(m_pParamBuf->is_valid[i])
+            break;
+    }
+
+    if (NULL == m_pCamOpsTbl) {
+        LOGE("Ops not initialized");
+        return NO_INIT;
+    }
+
+    if (i < CAM_INTF_PARM_MAX) {
+        return m_pCamOpsTbl->ops->get_parms(m_pCamOpsTbl->camera_handle, m_pParamBuf);
+    } else {
+        return NO_ERROR;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : updateParamEntry
+ *
+ * DESCRIPTION: update a parameter entry in the local temp map obj
+ *
+ * PARAMETERS :
+ *   @key     : key of the entry
+ *   @value   : value of the entry
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateParamEntry(const char *key, const char *value)
+{
+    m_tempMap.replaceValueFor(String8(key), String8(value));
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : commitParamChanges
+ *
+ * DESCRIPTION: commit all changes in local temp map obj into parameter obj
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::commitParamChanges()
+{
+    size_t size = m_tempMap.size();
+    for (size_t i = 0; i < size; i++) {
+        String8 k, v;
+        k = m_tempMap.keyAt(i);
+        v = m_tempMap.valueAt(i);
+        set(k, v);
+    }
+    m_tempMap.clear();
+
+    // update local changes
+    m_bRecordingHint = m_bRecordingHint_new;
+    m_bZslMode = m_bZslMode_new;
+
+    /* After applying scene mode auto,
+      Camera effects need to be reapplied */
+    if ( m_bSceneTransitionAuto ) {
+        m_bUpdateEffects = true;
+        m_bSceneTransitionAuto = false;
+    }
+
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraReprocScaleParam
+ *
+ * DESCRIPTION: constructor of QCameraReprocScaleParam
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraParameters::QCameraReprocScaleParam::QCameraReprocScaleParam()
+  : mScaleEnabled(false),
+    mIsUnderScaling(false),
+    mNeedScaleCnt(0),
+    mSensorSizeTblCnt(0),
+    mSensorSizeTbl(NULL),
+    mTotalSizeTblCnt(0)
+{
+    mPicSizeFromAPK.width = 0;
+    mPicSizeFromAPK.height = 0;
+    mPicSizeSetted.width = 0;
+    mPicSizeSetted.height = 0;
+    memset(mNeedScaledSizeTbl, 0, sizeof(mNeedScaledSizeTbl));
+    memset(mTotalSizeTbl, 0, sizeof(mTotalSizeTbl));
+}
+
+/*===========================================================================
+ * FUNCTION   : ~~QCameraReprocScaleParam
+ *
+ * DESCRIPTION: destructor of QCameraReprocScaleParam
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraParameters::QCameraReprocScaleParam::~QCameraReprocScaleParam()
+{
+    //do nothing now.
+}
+
+/*===========================================================================
+ * FUNCTION   : setScaledSizeTbl
+ *
+ * DESCRIPTION: re-set picture size table with dimensions that need scaling if Reproc Scale is enabled
+ *
+ * PARAMETERS :
+ *   @scale_cnt   : count of picture sizes that want scale
+ *   @scale_tbl    : picture size table that want scale
+ *   @org_cnt     : sensor supported picture size count
+ *   @org_tbl      : sensor supported picture size table
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::QCameraReprocScaleParam::setScaleSizeTbl(size_t scale_cnt,
+        cam_dimension_t *scale_tbl, size_t org_cnt, cam_dimension_t *org_tbl)
+{
+    int32_t rc = NO_ERROR;
+    size_t i;
+    mNeedScaleCnt = 0;
+
+    if(!mScaleEnabled || scale_cnt <=0 || scale_tbl == NULL || org_cnt <=0 || org_tbl == NULL){
+        return BAD_VALUE;    // Do not need scale, so also need not reset picture size table
+    }
+
+    mSensorSizeTblCnt = org_cnt;
+    mSensorSizeTbl = org_tbl;
+    mNeedScaleCnt = checkScaleSizeTable(scale_cnt, scale_tbl, org_cnt, org_tbl);
+    if(mNeedScaleCnt <= 0){
+        LOGE("do not have picture sizes need scaling.");
+        return BAD_VALUE;
+    }
+
+    if(mNeedScaleCnt + org_cnt > MAX_SIZES_CNT){
+        LOGE("picture size list exceed the max count.");
+        return BAD_VALUE;
+    }
+
+    //get the total picture size table
+    mTotalSizeTblCnt = mNeedScaleCnt + org_cnt;
+
+    if (mNeedScaleCnt > MAX_SCALE_SIZES_CNT) {
+        LOGE("Error!! mNeedScaleCnt (%d) is more than MAX_SCALE_SIZES_CNT",
+                 mNeedScaleCnt);
+        return BAD_VALUE;
+    }
+
+    for(i = 0; i < mNeedScaleCnt; i++){
+        mTotalSizeTbl[i].width = mNeedScaledSizeTbl[i].width;
+        mTotalSizeTbl[i].height = mNeedScaledSizeTbl[i].height;
+        LOGH("scale picture size: i =%d, width=%d, height=%d.",
+            i, mTotalSizeTbl[i].width, mTotalSizeTbl[i].height);
+    }
+    for(; i < mTotalSizeTblCnt; i++){
+        mTotalSizeTbl[i].width = org_tbl[i-mNeedScaleCnt].width;
+        mTotalSizeTbl[i].height = org_tbl[i-mNeedScaleCnt].height;
+        LOGH("sensor supportted picture size: i =%d, width=%d, height=%d.",
+            i, mTotalSizeTbl[i].width, mTotalSizeTbl[i].height);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getScaledSizeTblCnt
+ *
+ * DESCRIPTION: get picture size cnt that need scale
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : uint8_t type of picture size count
+ *==========================================================================*/
+size_t QCameraParameters::QCameraReprocScaleParam::getScaleSizeTblCnt()
+{
+    return mNeedScaleCnt;
+}
+
+/*===========================================================================
+ * FUNCTION   : getScaledSizeTbl
+ *
+ * DESCRIPTION: get picture size table that need scale
+ *
+ * PARAMETERS :  none
+ *
+ * RETURN     : cam_dimension_t list of picture size table
+ *==========================================================================*/
+cam_dimension_t *QCameraParameters::QCameraReprocScaleParam::getScaledSizeTbl()
+{
+    if(!mScaleEnabled)
+        return NULL;
+
+    return mNeedScaledSizeTbl;
+}
+
+/*===========================================================================
+ * FUNCTION   : setScaleEnable
+ *
+ * DESCRIPTION: enable or disable Reproc Scale
+ *
+ * PARAMETERS :
+ *   @enabled : enable: 1; disable 0
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::QCameraReprocScaleParam::setScaleEnable(bool enabled)
+{
+    mScaleEnabled = enabled;
+}
+
+/*===========================================================================
+ * FUNCTION   : isScaleEnabled
+ *
+ * DESCRIPTION: check if Reproc Scale is enabled
+ *
+ * PARAMETERS :  none
+ *
+ * RETURN     : bool type of status
+ *==========================================================================*/
+bool QCameraParameters::QCameraReprocScaleParam::isScaleEnabled()
+{
+    return mScaleEnabled;
+}
+
+/*===========================================================================
+ * FUNCTION   : isScalePicSize
+ *
+ * DESCRIPTION: check if current picture size is from Scale Table
+ *
+ * PARAMETERS :
+ *   @width     : current picture width
+ *   @height    : current picture height
+ *
+ * RETURN     : bool type of status
+ *==========================================================================*/
+bool QCameraParameters::QCameraReprocScaleParam::isScalePicSize(int width, int height)
+{
+    //Check if the picture size is in scale table
+    if(mNeedScaleCnt <= 0)
+        return FALSE;
+
+    for (size_t i = 0; i < mNeedScaleCnt; i++) {
+        if ((mNeedScaledSizeTbl[i].width == width) && (mNeedScaledSizeTbl[i].height == height)) {
+            //found match
+            return TRUE;
+        }
+    }
+
+    LOGE("Not in scale picture size table.");
+    return FALSE;
+}
+
+/*===========================================================================
+ * FUNCTION   : isValidatePicSize
+ *
+ * DESCRIPTION: check if current picture size is validate
+ *
+ * PARAMETERS :
+ *   @width     : current picture width
+ *   @height    : current picture height
+ *
+ * RETURN     : bool type of status
+ *==========================================================================*/
+bool QCameraParameters::QCameraReprocScaleParam::isValidatePicSize(int width, int height)
+{
+    size_t i = 0;
+
+    for(i = 0; i < mSensorSizeTblCnt; i++){
+        if(mSensorSizeTbl[i].width == width
+            && mSensorSizeTbl[i].height== height){
+            return TRUE;
+        }
+    }
+
+    for(i = 0; i < mNeedScaleCnt; i++){
+        if(mNeedScaledSizeTbl[i].width == width
+            && mNeedScaledSizeTbl[i].height== height){
+            return TRUE;
+        }
+    }
+
+    LOGE("Invalidate input picture size.");
+    return FALSE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSensorSupportedPicSize
+ *
+ * DESCRIPTION: set sensor supported picture size.
+ *    For Snapshot stream size configuration, we need use sensor supported size.
+ *    We will use CPP to do Scaling based on output Snapshot stream.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::QCameraReprocScaleParam::setSensorSupportedPicSize()
+{
+    //will find a suitable picture size (here we leave a prossibility to add other scale requirement)
+    //Currently we only focus on upscaling, and checkScaleSizeTable() has guaranteed the dimension ratio.
+
+    if(!mIsUnderScaling || mSensorSizeTblCnt <= 0)
+        return BAD_VALUE;
+
+    //We just get the max sensor supported size here.
+    mPicSizeSetted.width = mSensorSizeTbl[0].width;
+    mPicSizeSetted.height = mSensorSizeTbl[0].height;
+
+    return NO_ERROR;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : setValidatePicSize
+ *
+ * DESCRIPTION: set sensor supported size and change scale status.
+ *
+ * PARAMETERS :
+ *   @width    : input picture width
+ *   @height   : input picture height
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::QCameraReprocScaleParam::setValidatePicSize(int &width,int &height)
+{
+    if(!mScaleEnabled)
+        return BAD_VALUE;
+
+    mIsUnderScaling = FALSE; //default: not under scale
+
+    if(isScalePicSize(width, height)){
+        // input picture size need scaling operation. Record size from APK and setted
+        mIsUnderScaling = TRUE;
+        mPicSizeFromAPK.width = width;
+        mPicSizeFromAPK.height = height;
+
+        if(setSensorSupportedPicSize() != NO_ERROR)
+            return BAD_VALUE;
+
+        //re-set picture size to sensor supported size
+        width = mPicSizeSetted.width;
+        height = mPicSizeSetted.height;
+        LOGH("mPicSizeFromAPK- with=%d, height=%d, mPicSizeSetted- with =%d, height=%d.",
+             mPicSizeFromAPK.width, mPicSizeFromAPK.height, mPicSizeSetted.width, mPicSizeSetted.height);
+    }else{
+        mIsUnderScaling = FALSE;
+        //no scale is needed for input picture size
+        if(!isValidatePicSize(width, height)){
+            LOGE("invalidate input picture size.");
+            return BAD_VALUE;
+        }
+        mPicSizeSetted.width = width;
+        mPicSizeSetted.height = height;
+    }
+
+    LOGH("X. mIsUnderScaling=%d, width=%d, height=%d.", mIsUnderScaling, width, height);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getPicSizeFromAPK
+ *
+ * DESCRIPTION: get picture size that get from APK
+ *
+ * PARAMETERS :
+ *   @width     : input width
+ *   @height    : input height
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::QCameraReprocScaleParam::getPicSizeFromAPK(int &width, int &height)
+{
+    if(!mIsUnderScaling)
+        return BAD_VALUE;
+
+    width = mPicSizeFromAPK.width;
+    height = mPicSizeFromAPK.height;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getPicSizeSetted
+ *
+ * DESCRIPTION: get picture size that setted into mm-camera
+ *
+ * PARAMETERS :
+ *   @width     : input width
+ *   @height    : input height
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::QCameraReprocScaleParam::getPicSizeSetted(int &width, int &height)
+{
+    width = mPicSizeSetted.width;
+    height = mPicSizeSetted.height;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : isUnderScaling
+ *
+ * DESCRIPTION: check if we are in Reproc Scaling requirment
+ *
+ * PARAMETERS :  none
+ *
+ * RETURN     : bool type of status
+ *==========================================================================*/
+bool QCameraParameters::QCameraReprocScaleParam::isUnderScaling()
+{
+    return mIsUnderScaling;
+}
+
+/*===========================================================================
+ * FUNCTION   : checkScaleSizeTable
+ *
+ * DESCRIPTION: check PICTURE_SIZE_NEED_SCALE to choose
+ *
+ * PARAMETERS :
+ *   @scale_cnt   : count of picture sizes that want scale
+ *   @scale_tbl    : picture size table that want scale
+ *   @org_cnt     : sensor supported picture size count
+ *   @org_tbl      : sensor supported picture size table
+ *
+ * RETURN     : bool type of status
+ *==========================================================================*/
+size_t QCameraParameters::QCameraReprocScaleParam::checkScaleSizeTable(size_t scale_cnt,
+        cam_dimension_t *scale_tbl, size_t org_cnt, cam_dimension_t *org_tbl)
+{
+    size_t stbl_cnt = 0;
+    size_t temp_cnt = 0;
+    ssize_t i = 0;
+    if(scale_cnt <=0 || scale_tbl == NULL || org_tbl == NULL || org_cnt <= 0)
+        return stbl_cnt;
+
+    //get validate scale size table. Currently we only support:
+    // 1. upscale. The scale size must larger than max sensor supported size
+    // 2. Scale dimension ratio must be same as the max sensor supported size.
+    temp_cnt = scale_cnt;
+    for (i = (ssize_t)(scale_cnt - 1); i >= 0; i--) {
+        if (scale_tbl[i].width > org_tbl[0].width ||
+                (scale_tbl[i].width == org_tbl[0].width &&
+                    scale_tbl[i].height > org_tbl[0].height)) {
+            //get the smallest scale size
+            break;
+        }
+        temp_cnt--;
+    }
+
+    //check dimension ratio
+    double supported_ratio = (double)org_tbl[0].width / (double)org_tbl[0].height;
+    for (i = 0; i < (ssize_t)temp_cnt; i++) {
+        double cur_ratio = (double)scale_tbl[i].width / (double)scale_tbl[i].height;
+        if (fabs(supported_ratio - cur_ratio) > ASPECT_TOLERANCE) {
+            continue;
+        }
+        mNeedScaledSizeTbl[stbl_cnt].width = scale_tbl[i].width;
+        mNeedScaledSizeTbl[stbl_cnt].height= scale_tbl[i].height;
+        stbl_cnt++;
+    }
+
+    return stbl_cnt;
+}
+
+/*===========================================================================
+ * FUNCTION   : getTotalSizeTblCnt
+ *
+ * DESCRIPTION: get total picture size count after adding dimensions that need scaling
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : uint8_t type of picture size count
+ *==========================================================================*/
+size_t QCameraParameters::QCameraReprocScaleParam::getTotalSizeTblCnt()
+{
+    return mTotalSizeTblCnt;
+}
+
+/*===========================================================================
+ * FUNCTION   : getTotalSizeTbl
+ *
+ * DESCRIPTION: get picture size table after adding dimensions that need scaling
+ *
+ * PARAMETERS :  none
+ *
+ * RETURN     : cam_dimension_t list of picture size table
+ *==========================================================================*/
+cam_dimension_t *QCameraParameters::QCameraReprocScaleParam::getTotalSizeTbl()
+{
+    if(!mScaleEnabled)
+        return NULL;
+
+    return mTotalSizeTbl;
+}
+
+/*===========================================================================
+ * FUNCTION   : setEztune
+ *
+ * DESCRIPTION: Enable/Disable EZtune
+ *
+ *==========================================================================*/
+int32_t QCameraParameters::setEztune()
+{
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.eztune.enable", prop, "0");
+    m_bEztuneEnabled = atoi(prop);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : isHDREnabled
+ *
+ * DESCRIPTION: if HDR is enabled
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCameraParameters::isHDREnabled()
+{
+    return ((m_bHDREnabled || m_HDRSceneEnabled));
+}
+
+/*===========================================================================
+ * FUNCTION   : isAVTimerEnabled
+ *
+ * DESCRIPTION: if AVTimer is enabled
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCameraParameters::isAVTimerEnabled()
+{
+    return m_bAVTimerEnabled;
+}
+
+/*===========================================================================
+* FUNCTION   : isDISEnabled
+*
+* DESCRIPTION: if DIS is enabled
+*
+* PARAMETERS : none
+*
+* RETURN    : true: needed
+*               false: no need
+*==========================================================================*/
+bool QCameraParameters::isDISEnabled()
+{
+    return m_bDISEnabled;
+}
+
+/*===========================================================================
+* FUNCTION   : getISType
+*
+* DESCRIPTION: returns IS type
+*
+* PARAMETERS : none
+*
+* RETURN     : IS type
+*
+*==========================================================================*/
+cam_is_type_t QCameraParameters::getISType()
+{
+    return mIsType;
+}
+
+/*===========================================================================
+* FUNCTION   : getPreviewISType
+*
+* DESCRIPTION: returns IS type for preview
+*
+* PARAMETERS : none
+*
+* RETURN     : IS type
+*
+*==========================================================================*/
+cam_is_type_t QCameraParameters::getPreviewISType()
+{
+    return mIsTypePreview;
+}
+
+/*===========================================================================
+ * FUNCTION   : MobicatMask
+ *
+ * DESCRIPTION: returns mobicat mask
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : mobicat mask
+ *
+ *==========================================================================*/
+uint8_t QCameraParameters::getMobicatMask()
+{
+    return m_MobiMask;
+}
+
+/*===========================================================================
+ * FUNCTION   : sendStreamConfigInfo
+ *
+ * DESCRIPTION: send Stream config info.
+ *
+ * PARAMETERS :
+ *   @stream_config_info: Stream config information
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+bool QCameraParameters::sendStreamConfigInfo(cam_stream_size_info_t &stream_config_info) {
+    int32_t rc = NO_ERROR;
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        LOGE("Failed to initialize group update table");
+        return BAD_TYPE;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+            CAM_INTF_META_STREAM_INFO, stream_config_info)) {
+        LOGE("Failed to update table");
+        return BAD_VALUE;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        LOGE("Failed to set stream info parm");
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setStreamConfigure
+ *
+ * DESCRIPTION: set stream type, stream dimension for all configured streams.
+ *
+ * PARAMETERS :
+ *   @isCapture: Whether this configureation is for an image capture
+ *   @previewAsPostview: Use preview as postview
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+bool QCameraParameters::setStreamConfigure(bool isCapture,
+        bool previewAsPostview, bool resetConfig) {
+
+    int32_t rc = NO_ERROR;
+    cam_stream_size_info_t stream_config_info;
+    char value[PROPERTY_VALUE_MAX];
+    bool raw_yuv = false;
+    bool raw_capture = false;
+
+    if ( m_pParamBuf == NULL ) {
+        return NO_INIT;
+    }
+
+    memset(&stream_config_info, 0, sizeof(stream_config_info));
+    stream_config_info.num_streams = 0;
+
+    if (resetConfig) {
+        LOGH("Reset stream config!!");
+        rc = sendStreamConfigInfo(stream_config_info);
+        LOGH("Done Resetting stream config!!");
+        return rc;
+    }
+
+    stream_config_info.hfr_mode       = static_cast<cam_hfr_mode_t>(mHfrMode);
+    stream_config_info.buf_alignment  = m_pCapability->buf_alignment;
+    stream_config_info.min_stride     = m_pCapability->min_stride;
+    stream_config_info.min_scanline   = m_pCapability->min_scanline;
+    stream_config_info.batch_size = getBufBatchCount();
+
+    property_get("persist.camera.raw_yuv", value, "0");
+    raw_yuv = atoi(value) > 0 ? true : false;
+
+    if (isZSLMode() && getRecordingHintValue() != true) {
+        stream_config_info.type[stream_config_info.num_streams] =
+            CAM_STREAM_TYPE_PREVIEW;
+        getStreamDimension(CAM_STREAM_TYPE_PREVIEW,
+                stream_config_info.stream_sizes[stream_config_info.num_streams]);
+        updatePpFeatureMask(CAM_STREAM_TYPE_PREVIEW);
+        stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+                mStreamPpMask[CAM_STREAM_TYPE_PREVIEW];
+        getStreamFormat(CAM_STREAM_TYPE_PREVIEW,
+                stream_config_info.format[stream_config_info.num_streams]);
+        stream_config_info.num_streams++;
+
+        stream_config_info.type[stream_config_info.num_streams] =
+                CAM_STREAM_TYPE_ANALYSIS;
+        updatePpFeatureMask(CAM_STREAM_TYPE_ANALYSIS);
+        getStreamDimension(CAM_STREAM_TYPE_ANALYSIS,
+                stream_config_info.stream_sizes[stream_config_info.num_streams]);
+        stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+                mStreamPpMask[CAM_STREAM_TYPE_ANALYSIS];
+        getStreamFormat(CAM_STREAM_TYPE_ANALYSIS,
+                stream_config_info.format[stream_config_info.num_streams]);
+        stream_config_info.num_streams++;
+
+        stream_config_info.type[stream_config_info.num_streams] =
+                CAM_STREAM_TYPE_SNAPSHOT;
+        getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT,
+                stream_config_info.stream_sizes[stream_config_info.num_streams]);
+        updatePpFeatureMask(CAM_STREAM_TYPE_SNAPSHOT);
+        stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+                mStreamPpMask[CAM_STREAM_TYPE_SNAPSHOT];
+        getStreamFormat(CAM_STREAM_TYPE_SNAPSHOT,
+                stream_config_info.format[stream_config_info.num_streams]);
+        stream_config_info.num_streams++;
+
+        if (isUBWCEnabled() && getRecordingHintValue() != true) {
+            cam_format_t fmt;
+            getStreamFormat(CAM_STREAM_TYPE_PREVIEW,fmt);
+            if (fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
+                stream_config_info.type[stream_config_info.num_streams] =
+                        CAM_STREAM_TYPE_CALLBACK;
+                getStreamDimension(CAM_STREAM_TYPE_CALLBACK,
+                        stream_config_info.stream_sizes[stream_config_info.num_streams]);
+                updatePpFeatureMask(CAM_STREAM_TYPE_CALLBACK);
+                stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+                        mStreamPpMask[CAM_STREAM_TYPE_CALLBACK];
+                getStreamFormat(CAM_STREAM_TYPE_CALLBACK,
+                        stream_config_info.format[stream_config_info.num_streams]);
+                stream_config_info.num_streams++;
+            }
+        }
+
+    } else if (!isCapture) {
+        if (m_bRecordingHint) {
+            if (m_bDISEnabled) {
+                char value[PROPERTY_VALUE_MAX];
+                // Make default value for IS_TYPE as IS_TYPE_EIS_2_0
+                property_get("persist.camera.is_type", value, "4");
+                mIsType = static_cast<cam_is_type_t>(atoi(value));
+                // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
+                property_get("persist.camera.is_type_preview", value, "4");
+                mIsTypePreview = static_cast<cam_is_type_t>(atoi(value));
+            } else {
+                mIsType = IS_TYPE_NONE;
+                mIsTypePreview = IS_TYPE_NONE;
+            }
+            stream_config_info.is_type[stream_config_info.num_streams] = mIsType;
+            stream_config_info.type[stream_config_info.num_streams] =
+                    CAM_STREAM_TYPE_SNAPSHOT;
+            getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT,
+                    stream_config_info.stream_sizes[stream_config_info.num_streams]);
+            updatePpFeatureMask(CAM_STREAM_TYPE_SNAPSHOT);
+            stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+                    mStreamPpMask[CAM_STREAM_TYPE_SNAPSHOT];
+            getStreamFormat(CAM_STREAM_TYPE_SNAPSHOT,
+                        stream_config_info.format[stream_config_info.num_streams]);
+            stream_config_info.num_streams++;
+            stream_config_info.is_type[stream_config_info.num_streams] = mIsType;
+            stream_config_info.type[stream_config_info.num_streams] =
+                    CAM_STREAM_TYPE_VIDEO;
+            getStreamDimension(CAM_STREAM_TYPE_VIDEO,
+                    stream_config_info.stream_sizes[stream_config_info.num_streams]);
+            updatePpFeatureMask(CAM_STREAM_TYPE_VIDEO);
+            stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+                    mStreamPpMask[CAM_STREAM_TYPE_VIDEO];
+            getStreamFormat(CAM_STREAM_TYPE_VIDEO,
+                    stream_config_info.format[stream_config_info.num_streams]);
+            stream_config_info.num_streams++;
+        }
+
+        /* Analysis stream is needed by DCRF regardless of recording hint */
+        if ((getDcrf() == true) ||
+                (getRecordingHintValue() != true) ||
+                (fdModeInVideo())) {
+            stream_config_info.type[stream_config_info.num_streams] =
+                    CAM_STREAM_TYPE_ANALYSIS;
+            updatePpFeatureMask(CAM_STREAM_TYPE_ANALYSIS);
+            getStreamDimension(CAM_STREAM_TYPE_ANALYSIS,
+                    stream_config_info.stream_sizes[stream_config_info.num_streams]);
+            stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+                    mStreamPpMask[CAM_STREAM_TYPE_ANALYSIS];
+            getStreamFormat(CAM_STREAM_TYPE_ANALYSIS,
+                    stream_config_info.format[stream_config_info.num_streams]);
+            stream_config_info.num_streams++;
+        }
+
+        stream_config_info.type[stream_config_info.num_streams] =
+                CAM_STREAM_TYPE_PREVIEW;
+        getStreamDimension(CAM_STREAM_TYPE_PREVIEW,
+                stream_config_info.stream_sizes[stream_config_info.num_streams]);
+        updatePpFeatureMask(CAM_STREAM_TYPE_PREVIEW);
+        stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+                mStreamPpMask[CAM_STREAM_TYPE_PREVIEW];
+        getStreamFormat(CAM_STREAM_TYPE_PREVIEW,
+                    stream_config_info.format[stream_config_info.num_streams]);
+        stream_config_info.is_type[stream_config_info.num_streams] = mIsTypePreview;
+        stream_config_info.num_streams++;
+
+        if (isUBWCEnabled() && getRecordingHintValue() != true) {
+            cam_format_t fmt;
+            getStreamFormat(CAM_STREAM_TYPE_PREVIEW,fmt);
+            if (fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
+                stream_config_info.type[stream_config_info.num_streams] =
+                        CAM_STREAM_TYPE_CALLBACK;
+                getStreamDimension(CAM_STREAM_TYPE_CALLBACK,
+                        stream_config_info.stream_sizes[stream_config_info.num_streams]);
+                updatePpFeatureMask(CAM_STREAM_TYPE_CALLBACK);
+                stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+                        mStreamPpMask[CAM_STREAM_TYPE_CALLBACK];
+                getStreamFormat(CAM_STREAM_TYPE_CALLBACK,
+                        stream_config_info.format[stream_config_info.num_streams]);
+                stream_config_info.is_type[stream_config_info.num_streams] = mIsType;
+                stream_config_info.num_streams++;
+            }
+        }
+
+    } else {
+        if (isJpegPictureFormat() || isNV16PictureFormat() || isNV21PictureFormat()) {
+            if (!getofflineRAW()) {
+                stream_config_info.type[stream_config_info.num_streams] =
+                        CAM_STREAM_TYPE_SNAPSHOT;
+                getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT,
+                        stream_config_info.stream_sizes[stream_config_info.num_streams]);
+                updatePpFeatureMask(CAM_STREAM_TYPE_SNAPSHOT);
+                stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+                        mStreamPpMask[CAM_STREAM_TYPE_SNAPSHOT];
+                getStreamFormat(CAM_STREAM_TYPE_SNAPSHOT,
+                        stream_config_info.format[stream_config_info.num_streams]);
+                stream_config_info.is_type[stream_config_info.num_streams] = mIsType;
+                stream_config_info.num_streams++;
+            }
+
+            if (previewAsPostview) {
+                stream_config_info.type[stream_config_info.num_streams] =
+                        CAM_STREAM_TYPE_PREVIEW;
+                getStreamDimension(CAM_STREAM_TYPE_PREVIEW,
+                        stream_config_info.stream_sizes[stream_config_info.num_streams]);
+                updatePpFeatureMask(CAM_STREAM_TYPE_PREVIEW);
+                stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+                        mStreamPpMask[CAM_STREAM_TYPE_PREVIEW];
+                getStreamFormat(CAM_STREAM_TYPE_PREVIEW,
+                        stream_config_info.format[stream_config_info.num_streams]);
+                stream_config_info.is_type[stream_config_info.num_streams] = mIsType;
+                stream_config_info.num_streams++;
+            } else {
+                stream_config_info.type[stream_config_info.num_streams] =
+                        CAM_STREAM_TYPE_POSTVIEW;
+                getStreamDimension(CAM_STREAM_TYPE_POSTVIEW,
+                        stream_config_info.stream_sizes[stream_config_info.num_streams]);
+                updatePpFeatureMask(CAM_STREAM_TYPE_POSTVIEW);
+                stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+                        mStreamPpMask[CAM_STREAM_TYPE_POSTVIEW];
+                getStreamFormat(CAM_STREAM_TYPE_POSTVIEW,
+                        stream_config_info.format[stream_config_info.num_streams]);
+                stream_config_info.is_type[stream_config_info.num_streams] = mIsType;
+                stream_config_info.num_streams++;
+            }
+        } else {
+            raw_capture = true;
+            stream_config_info.type[stream_config_info.num_streams] =
+                    CAM_STREAM_TYPE_RAW;
+            getStreamDimension(CAM_STREAM_TYPE_RAW,
+                    stream_config_info.stream_sizes[stream_config_info.num_streams]);
+            updatePpFeatureMask(CAM_STREAM_TYPE_RAW);
+            stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+                    mStreamPpMask[CAM_STREAM_TYPE_RAW];
+            getStreamFormat(CAM_STREAM_TYPE_RAW,
+                    stream_config_info.format[stream_config_info.num_streams]);
+            stream_config_info.is_type[stream_config_info.num_streams] = mIsType;
+            stream_config_info.num_streams++;
+        }
+    }
+
+    if ((!raw_capture) && ((getofflineRAW() && !getRecordingHintValue())
+            || (raw_yuv))) {
+        cam_dimension_t max_dim = {0,0};
+        // Find the Maximum dimension admong all the streams
+        for (uint32_t j = 0; j < stream_config_info.num_streams; j++) {
+            if (stream_config_info.stream_sizes[j].width > max_dim.width) {
+                max_dim.width = stream_config_info.stream_sizes[j].width;
+            }
+            if (stream_config_info.stream_sizes[j].height > max_dim.height) {
+                max_dim.height = stream_config_info.stream_sizes[j].height;
+            }
+        }
+        LOGH("Max Dimension = %d X %d", max_dim.width, max_dim.height);
+        updateRAW(max_dim);
+        stream_config_info.type[stream_config_info.num_streams] =
+                CAM_STREAM_TYPE_RAW;
+        getStreamDimension(CAM_STREAM_TYPE_RAW,
+                stream_config_info.stream_sizes[stream_config_info.num_streams]);
+        updatePpFeatureMask(CAM_STREAM_TYPE_RAW);
+        stream_config_info.postprocess_mask[stream_config_info.num_streams] =
+                mStreamPpMask[CAM_STREAM_TYPE_RAW];
+        getStreamFormat(CAM_STREAM_TYPE_RAW,
+                stream_config_info.format[stream_config_info.num_streams]);
+        stream_config_info.num_streams++;
+    }
+    for (uint32_t k = 0; k < stream_config_info.num_streams; k++) {
+        LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%llx Format = %d",
+                stream_config_info.type[k],
+                stream_config_info.stream_sizes[k].width,
+                stream_config_info.stream_sizes[k].height,
+                stream_config_info.postprocess_mask[k],
+                stream_config_info.format[k]);
+    }
+
+    rc = sendStreamConfigInfo(stream_config_info);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addOnlineRotation
+ *
+ * DESCRIPTION: send additional rotation information for specific stream
+ *
+ * PARAMETERS :
+ *   @rotation: rotation
+ *   @streamId: internal stream id
+ *   @device_rotation: device rotation
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::addOnlineRotation(uint32_t rotation, uint32_t streamId,
+        int32_t device_rotation)
+{
+    int32_t rc = NO_ERROR;
+    cam_rotation_info_t rotation_info;
+    memset(&rotation_info, 0, sizeof(cam_rotation_info_t));
+
+    /* Add jpeg rotation information */
+    if (rotation == 0) {
+        rotation_info.rotation = ROTATE_0;
+    } else if (rotation == 90) {
+        rotation_info.rotation = ROTATE_90;
+    } else if (rotation == 180) {
+        rotation_info.rotation = ROTATE_180;
+    } else if (rotation == 270) {
+        rotation_info.rotation = ROTATE_270;
+    } else {
+        rotation_info.rotation = ROTATE_0;
+    }
+    rotation_info.streamId = streamId;
+
+    /* Add device rotation information */
+    if (device_rotation == 0) {
+        rotation_info.device_rotation = ROTATE_0;
+    } else if (device_rotation == 90) {
+        rotation_info.device_rotation = ROTATE_90;
+    } else if (device_rotation == 180) {
+        rotation_info.device_rotation = ROTATE_180;
+    } else if (device_rotation == 270) {
+        rotation_info.device_rotation = ROTATE_270;
+    } else {
+        rotation_info.device_rotation = ROTATE_0;
+    }
+
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        LOGE("Failed to initialize group update table");
+        return BAD_TYPE;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_ROTATION, rotation_info)) {
+        LOGE("Failed to update table");
+        return BAD_VALUE;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        LOGE("Failed to set stream info parm");
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : needThumbnailReprocess
+ *
+ * DESCRIPTION: Check if thumbnail reprocessing is needed
+ *
+ * PARAMETERS : @pFeatureMask - feature mask
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCameraParameters::needThumbnailReprocess(cam_feature_mask_t *pFeatureMask)
+{
+    if (isUbiFocusEnabled() || isChromaFlashEnabled() ||
+            isOptiZoomEnabled() || isUbiRefocus() ||
+            isStillMoreEnabled() ||
+            (isHDREnabled() && !isHDRThumbnailProcessNeeded())
+            || isUBWCEnabled()) {
+        *pFeatureMask &= ~CAM_QCOM_FEATURE_CHROMA_FLASH;
+        *pFeatureMask &= ~CAM_QCOM_FEATURE_UBIFOCUS;
+        *pFeatureMask &= ~CAM_QCOM_FEATURE_REFOCUS;
+        *pFeatureMask &= ~CAM_QCOM_FEATURE_OPTIZOOM;
+        *pFeatureMask &= ~CAM_QCOM_FEATURE_STILLMORE;
+        *pFeatureMask &= ~CAM_QCOM_FEATURE_HDR;
+        return false;
+    } else {
+        cam_dimension_t thumb_dim;
+        getThumbnailSize(&(thumb_dim.width), &(thumb_dim.height));
+        if (thumb_dim.width == 0 || thumb_dim.height == 0) {
+            return false;
+        }
+        else {
+            return true;
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumOfExtraBuffersForImageProc
+ *
+ * DESCRIPTION: get number of extra input buffers needed by image processing
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of extra buffers needed by ImageProc;
+ *              0 if not ImageProc enabled
+ *==========================================================================*/
+uint8_t QCameraParameters::getNumOfExtraBuffersForImageProc()
+{
+    int numOfBufs = 0;
+
+    if (isUbiRefocus()) {
+        return (uint8_t)(m_pCapability->refocus_af_bracketing_need.burst_count - 1);
+    } else if (isUbiFocusEnabled()) {
+        numOfBufs += m_pCapability->ubifocus_af_bracketing_need.burst_count - 1;
+    } else if (m_bOptiZoomOn) {
+        numOfBufs += m_pCapability->opti_zoom_settings_need.burst_count - 1;
+    } else if (isChromaFlashEnabled()) {
+        numOfBufs += m_pCapability->chroma_flash_settings_need.burst_count - 1;
+    } else if (isStillMoreEnabled()) {
+        if (isSeeMoreEnabled()) {
+            m_stillmore_config.burst_count = 1;
+        } else if ((m_stillmore_config.burst_count >=
+                m_pCapability->stillmore_settings_need.min_burst_count) &&
+                (m_stillmore_config.burst_count <=
+                m_pCapability->stillmore_settings_need.max_burst_count)) {
+            numOfBufs += m_stillmore_config.burst_count - 1;
+        } else {
+            numOfBufs += m_pCapability->stillmore_settings_need.burst_count - 1;
+        }
+    } else if (isOEMFeatEnabled()) {
+        numOfBufs += 1;
+    }
+
+    return (uint8_t)(numOfBufs);
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifBufIndex
+ *
+ * DESCRIPTION: get index of metadata to be used for EXIF
+ *
+ * PARAMETERS : @captureIndex - index of current captured frame
+ *
+ * RETURN     : index of metadata to be used for EXIF
+ *==========================================================================*/
+uint32_t QCameraParameters::getExifBufIndex(uint32_t captureIndex)
+{
+    uint32_t index = captureIndex;
+
+    if (isUbiRefocus()) {
+        if (captureIndex < m_pCapability->refocus_af_bracketing_need.burst_count) {
+            index = captureIndex;
+        } else {
+            index = 0;
+        }
+    } else if (isChromaFlashEnabled()) {
+        index = m_pCapability->chroma_flash_settings_need.metadata_index;
+    } else if (isHDREnabled()) {
+        if (isHDR1xFrameEnabled() && isHDR1xExtraBufferNeeded()) {
+            index = m_pCapability->hdr_bracketing_setting.num_frames;
+        } else {
+            for (index = 0; index < m_pCapability->hdr_bracketing_setting.num_frames; index++) {
+                if (0 == m_pCapability->hdr_bracketing_setting.exp_val.values[index]) {
+                    break;
+                }
+            }
+            if (index == m_pCapability->hdr_bracketing_setting.num_frames) {
+                index = captureIndex;
+            }
+        }
+    }
+
+    return index;
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumberInBufsForSingleShot
+ *
+ * DESCRIPTION: get number of input buffers for single shot
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of input buffers for single shot
+ *==========================================================================*/
+uint32_t QCameraParameters::getNumberInBufsForSingleShot()
+{
+    uint32_t numOfBufs = 1;
+
+    if (isUbiRefocus()) {
+        numOfBufs = m_pCapability->refocus_af_bracketing_need.burst_count;
+    } else if (isUbiFocusEnabled()) {
+        numOfBufs = m_pCapability->ubifocus_af_bracketing_need.burst_count;
+    } else if (m_bOptiZoomOn) {
+        numOfBufs = m_pCapability->opti_zoom_settings_need.burst_count;
+    } else if (isChromaFlashEnabled()) {
+        numOfBufs = m_pCapability->chroma_flash_settings_need.burst_count;
+    } else if (isHDREnabled()) {
+        numOfBufs = m_pCapability->hdr_bracketing_setting.num_frames;
+        if (isHDR1xFrameEnabled() && isHDR1xExtraBufferNeeded()) {
+            numOfBufs++;
+        }
+    } else if (isStillMoreEnabled()) {
+        if (isSeeMoreEnabled()) {
+            m_stillmore_config.burst_count = 1;
+            numOfBufs = m_stillmore_config.burst_count;
+        } else if ((m_stillmore_config.burst_count >=
+                m_pCapability->stillmore_settings_need.min_burst_count) &&
+                (m_stillmore_config.burst_count <=
+                m_pCapability->stillmore_settings_need.max_burst_count)) {
+            numOfBufs = m_stillmore_config.burst_count;
+        } else {
+            numOfBufs = m_pCapability->stillmore_settings_need.burst_count;
+        }
+    }
+
+    return numOfBufs;
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumberOutBufsForSingleShot
+ *
+ * DESCRIPTION: get number of output buffers for single shot
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of output buffers for single shot
+ *==========================================================================*/
+uint32_t QCameraParameters::getNumberOutBufsForSingleShot()
+{
+    uint32_t numOfBufs = 1;
+
+    if (isUbiRefocus()) {
+        numOfBufs = m_pCapability->refocus_af_bracketing_need.output_count;
+    } else if (isHDREnabled()) {
+        if (isHDR1xFrameEnabled()) {
+            numOfBufs++;
+        }
+    }
+
+    return numOfBufs;
+}
+
+/*===========================================================================
+ * FUNCTION   : is4k2kVideoResolution
+ *
+ * DESCRIPTION: if resolution is 4k x 2k or true 4k x 2k
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: video resolution is 4k x 2k
+ *              false: video resolution is not 4k x 2k
+ *==========================================================================*/
+bool QCameraParameters::is4k2kVideoResolution()
+{
+   bool enabled = false;
+   cam_dimension_t resolution;
+   getVideoSize(&resolution.width, &resolution.height);
+   if (!(resolution.width < 3840 && resolution.height < 2160)) {
+      enabled = true;
+   }
+
+   return enabled;
+}
+
+/*===========================================================================
+ * FUNCTION   : isPreviewSeeMoreRequired
+ *
+ * DESCRIPTION: This function checks whether SeeMmore(SW TNR) needs to be applied for
+ *              preview stream depending on video resoluion and setprop
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: If SeeMore needs to apply
+ *              false: No need to apply
+ *==========================================================================*/
+bool QCameraParameters::isPreviewSeeMoreRequired()
+{
+   cam_dimension_t dim;
+   char prop[PROPERTY_VALUE_MAX];
+
+   getVideoSize(&dim.width, &dim.height);
+   memset(prop, 0, sizeof(prop));
+   property_get("persist.camera.preview.seemore", prop, "0");
+   int enable = atoi(prop);
+
+   // Enable SeeMore for preview stream if :
+   // 1. Video resolution <= (1920x1080)  (or)
+   // 2. persist.camera.preview.seemore is set
+   LOGD("width=%d, height=%d, enable=%d", dim.width, dim.height, enable);
+   return (((dim.width * dim.height) <= (1920 * 1080)) || enable);
+}
+
+/*===========================================================================
+ * FUNCTION   : updateDebugLevel
+ *
+ * DESCRIPTION: send CAM_INTF_PARM_UPDATE_DEBUG_LEVEL to backend
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : NO_ERROR --success
+ *              int32_t type of status
+ *==========================================================================*/
+int32_t QCameraParameters::updateDebugLevel()
+{
+    if ( m_pParamBuf == NULL ) {
+        return NO_INIT;
+    }
+
+    int32_t rc = initBatchUpdate(m_pParamBuf);
+    if ( rc != NO_ERROR ) {
+        LOGE("Failed to initialize group update table");
+        return rc;
+    }
+
+    uint32_t dummyDebugLevel = 0;
+    /* The value of dummyDebugLevel is irrelavent. On
+     * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, dummyDebugLevel)) {
+        LOGE("Parameters batch failed");
+        return BAD_VALUE;
+    }
+
+    rc = commitSetBatch();
+    if ( rc != NO_ERROR ) {
+        LOGE("Failed to commit batch parameters");
+        return rc;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setOfflineRAW
+ *
+ * DESCRIPTION: Function to decide Offline RAW feature.
+ *
+ * PARAMETERS :
+ *  @raw_value: offline raw value to set.
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::setOfflineRAW(bool raw_value)
+{
+    char value[PROPERTY_VALUE_MAX];
+    bool raw_yuv = false;
+    bool offlineRaw = false;
+
+    if (raw_value) {
+        mOfflineRAW = true;
+        LOGH("Offline Raw  %d", mOfflineRAW);
+        return;
+    }
+
+    property_get("persist.camera.raw_yuv", value, "0");
+    raw_yuv = atoi(value) > 0 ? true : false;
+    property_get("persist.camera.offlineraw", value, "0");
+    offlineRaw = atoi(value) > 0 ? true : false;
+    if ((raw_yuv || isRdiMode()) && offlineRaw) {
+        mOfflineRAW = true;
+    } else {
+        mOfflineRAW = false;
+    }
+    LOGH("Offline Raw  %d", mOfflineRAW);
+}
+
+/*===========================================================================
+ * FUNCTION   : updatePpFeatureMask
+ *
+ * DESCRIPTION: Updates the feature mask for a particular stream depending
+ *              on current client configuration.
+ *
+ * PARAMETERS :
+ *  @stream_type: Camera stream type
+ *
+ * RETURN     : NO_ERROR --success
+ *              int32_t type of status
+ *==========================================================================*/
+int32_t QCameraParameters::updatePpFeatureMask(cam_stream_type_t stream_type) {
+
+    cam_feature_mask_t feature_mask = 0;
+
+    if (stream_type >= CAM_STREAM_TYPE_MAX) {
+        LOGE("Error!! stream type: %d not valid", stream_type);
+        return -1;
+    }
+
+    // Update feature mask for SeeMore in video and video preview
+    if (isSeeMoreEnabled() && ((stream_type == CAM_STREAM_TYPE_VIDEO) ||
+            (stream_type == CAM_STREAM_TYPE_PREVIEW && getRecordingHintValue() &&
+            isPreviewSeeMoreRequired()))) {
+       feature_mask |= CAM_QCOM_FEATURE_LLVD;
+    }
+
+    if (isHighQualityNoiseReductionMode() &&
+            ((stream_type == CAM_STREAM_TYPE_VIDEO) ||
+            (stream_type == CAM_STREAM_TYPE_PREVIEW && getRecordingHintValue() &&
+            isPreviewSeeMoreRequired()))) {
+        feature_mask |= CAM_QTI_FEATURE_SW_TNR;
+    }
+
+    // Do not enable feature mask for ZSL/non-ZSL/liveshot snapshot except for 4K2k case
+    if ((getRecordingHintValue() &&
+            (stream_type == CAM_STREAM_TYPE_SNAPSHOT) && is4k2kVideoResolution()) ||
+            (stream_type != CAM_STREAM_TYPE_SNAPSHOT)) {
+        if ((m_nMinRequiredPpMask & CAM_QCOM_FEATURE_SHARPNESS) &&
+                !isOptiZoomEnabled()) {
+            feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
+        }
+
+        if (m_nMinRequiredPpMask & CAM_QCOM_FEATURE_EFFECT) {
+            feature_mask |= CAM_QCOM_FEATURE_EFFECT;
+        }
+        if (isWNREnabled()) {
+            feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
+        }
+
+        //Set flip mode based on Stream type;
+        int flipMode = getFlipMode(stream_type);
+        if (flipMode > 0) {
+            feature_mask |= CAM_QCOM_FEATURE_FLIP;
+        }
+    }
+
+    if ((isTNRVideoEnabled() && (CAM_STREAM_TYPE_VIDEO == stream_type))
+            || (isTNRPreviewEnabled() && (CAM_STREAM_TYPE_PREVIEW == stream_type))) {
+        feature_mask |= CAM_QCOM_FEATURE_CPP_TNR;
+    }
+    if (isEztuneEnabled() &&
+            ((CAM_STREAM_TYPE_PREVIEW == stream_type) ||
+            (CAM_STREAM_TYPE_SNAPSHOT == stream_type))) {
+        feature_mask |= CAM_QCOM_FEATURE_EZTUNE;
+    }
+
+    if ((getCDSMode() != CAM_CDS_MODE_OFF) &&
+            ((CAM_STREAM_TYPE_PREVIEW == stream_type) ||
+            (CAM_STREAM_TYPE_VIDEO == stream_type) ||
+            (CAM_STREAM_TYPE_CALLBACK == stream_type) ||
+            ((CAM_STREAM_TYPE_SNAPSHOT == stream_type) &&
+            getRecordingHintValue() && is4k2kVideoResolution()))) {
+         if (m_nMinRequiredPpMask & CAM_QCOM_FEATURE_DSDN) {
+             feature_mask |= CAM_QCOM_FEATURE_DSDN;
+         } else {
+             feature_mask |= CAM_QCOM_FEATURE_CDS;
+         }
+    }
+
+    if (isTNRSnapshotEnabled() && (CAM_STREAM_TYPE_SNAPSHOT == stream_type)
+            && (isZSLMode() || getRecordingHintValue())) {
+        feature_mask |= CAM_QCOM_FEATURE_CPP_TNR;
+    }
+
+    //Rotation could also have an effect on pp feature mask
+    cam_pp_feature_config_t config;
+    cam_dimension_t dim;
+    memset(&config, 0, sizeof(cam_pp_feature_config_t));
+    getStreamRotation(stream_type, config, dim);
+    feature_mask |= config.feature_mask;
+
+    // Dual Camera scenarios
+    // all feature masks are disabled for preview and analysis streams for aux session
+    // all required feature masks for aux session preview and analysis streams need
+    // to be enabled explicitly here
+    ///@note When aux camera is of bayer type, keep pp mask as is or we'd run
+    ///      into stream mapping problems. YUV sensor is marked as interleaved and has
+    ///      preferred mapping setup so we don't see any mapping issues.
+    if (m_relCamSyncInfo.sync_control == CAM_SYNC_RELATED_SENSORS_ON) {
+        if (((CAM_STREAM_TYPE_ANALYSIS == stream_type) ||
+                (CAM_STREAM_TYPE_PREVIEW == stream_type)) &&
+                (m_relCamSyncInfo.mode == CAM_MODE_SECONDARY) &&
+                (m_pCapability->sensor_type.sens_type == CAM_SENSOR_YUV)) {
+            LOGH("Disabling all pp feature masks for aux preview and "
+                    "analysis streams");
+            feature_mask = 0;
+        }
+
+        // all feature masks need to be enabled here
+        // enable DCRF feature mask on analysis stream in case of dual camera
+        if (m_bDcrfEnabled && (CAM_STREAM_TYPE_ANALYSIS == stream_type)) {
+            feature_mask |= CAM_QCOM_FEATURE_DCRF;
+        } else {
+            feature_mask &= ~CAM_QCOM_FEATURE_DCRF;
+        }
+    }
+
+    // Preview assisted autofocus needs to be supported for
+    // callback, preview, or video streams
+    cam_color_filter_arrangement_t filter_arrangement;
+    filter_arrangement = m_pCapability->color_arrangement;
+    switch (filter_arrangement) {
+    case CAM_FILTER_ARRANGEMENT_RGGB:
+    case CAM_FILTER_ARRANGEMENT_GRBG:
+    case CAM_FILTER_ARRANGEMENT_GBRG:
+    case CAM_FILTER_ARRANGEMENT_BGGR:
+        if ((stream_type == CAM_STREAM_TYPE_CALLBACK) ||
+            (stream_type == CAM_STREAM_TYPE_PREVIEW)) {
+            feature_mask |= CAM_QCOM_FEATURE_PAAF;
+        } else if (stream_type == CAM_STREAM_TYPE_VIDEO) {
+            if (getISType() != IS_TYPE_EIS_3_0)
+                feature_mask |= CAM_QCOM_FEATURE_PAAF;
+        }
+        break;
+    case CAM_FILTER_ARRANGEMENT_Y:
+        if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
+            feature_mask |= CAM_QCOM_FEATURE_PAAF;
+            LOGH("add PAAF mask to feature_mask for mono device");
+        }
+        break;
+    default:
+        break;
+    }
+
+    // Store stream feature mask
+    setStreamPpMask(stream_type, feature_mask);
+    LOGH("stream type: %d, pp_mask: 0x%llx", stream_type, feature_mask);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setStreamPpMask
+ *
+ * DESCRIPTION: Stores a particular feature mask for a given camera stream
+ *
+ * PARAMETERS :
+ *  @stream_type: Camera stream type
+ *  @pp_mask  : Feature mask
+ *
+ * RETURN     : NO_ERROR --success
+ *              int32_t type of status
+ *==========================================================================*/
+int32_t QCameraParameters::setStreamPpMask(cam_stream_type_t stream_type,
+        cam_feature_mask_t pp_mask) {
+
+    if(stream_type >= CAM_STREAM_TYPE_MAX) {
+        return BAD_TYPE;
+    }
+
+    mStreamPpMask[stream_type] = pp_mask;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamPpMask
+ *
+ * DESCRIPTION: Retrieves the feature mask for a given camera stream
+ *
+ * PARAMETERS :
+ *  @stream_type: Camera stream type
+ *  @pp_mask  : Feature mask
+ *
+ * RETURN     : NO_ERROR --success
+ *              int32_t type of status
+ *==========================================================================*/
+int32_t QCameraParameters::getStreamPpMask(cam_stream_type_t stream_type,
+        cam_feature_mask_t &pp_mask) {
+
+    if(stream_type >= CAM_STREAM_TYPE_MAX) {
+        return BAD_TYPE;
+    }
+
+    pp_mask = mStreamPpMask[stream_type];
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : isMultiPassReprocessing
+ *
+ * DESCRIPTION: Read setprop to enable/disable multipass
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : TRUE  -- If enabled
+ *              FALSE  -- disabled
+ *==========================================================================*/
+bool QCameraParameters::isMultiPassReprocessing()
+{
+    char value[PROPERTY_VALUE_MAX];
+    int multpass = 0;
+
+    property_get("persist.camera.multi_pass", value, "0");
+    multpass = atoi(value);
+
+    return (multpass == 0)? FALSE : TRUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setReprocCount
+ *
+ * DESCRIPTION: Set total reprocessing pass count
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraParameters::setReprocCount()
+{
+    mTotalPPCount = 1; //Default reprocessing Pass count
+
+    if (getManualCaptureMode() >=
+            CAM_MANUAL_CAPTURE_TYPE_3) {
+        LOGD("Additional post processing enabled for manual capture");
+        mTotalPPCount++;
+    }
+
+    if (!isMultiPassReprocessing()) {
+        return;
+    }
+
+    if ((getZoomLevel() != 0)
+            && (getBurstCountForAdvancedCapture()
+            == getNumOfSnapshots())) {
+        LOGD("2 Pass postprocessing enabled");
+        mTotalPPCount++;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : isUBWCEnabled
+ *
+ * DESCRIPTION: Function to get UBWC hardware support.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : TRUE -- UBWC format supported
+ *              FALSE -- UBWC is not supported.
+ *==========================================================================*/
+bool QCameraParameters::isUBWCEnabled()
+{
+#ifdef UBWC_PRESENT
+    char value[PROPERTY_VALUE_MAX];
+    int prop_value = 0;
+    memset(value, 0, sizeof(value));
+    property_get("debug.gralloc.gfx_ubwc_disable", value, "0");
+    prop_value = atoi(value);
+    if (prop_value) {
+        return FALSE;
+    }
+
+    //Disable UBWC if it is YUV sensor.
+    if ((m_pCapability != NULL) &&
+            (m_pCapability->sensor_type.sens_type == CAM_SENSOR_YUV)) {
+        return FALSE;
+    }
+
+    //Disable UBWC if Eztune is enabled
+    // Eztune works on CPP output and cannot understand UBWC buffer.
+    memset(value, 0, sizeof(value));
+    property_get("persist.camera.eztune.enable", value, "0");
+    prop_value = atoi(value);
+    if (prop_value) {
+        return FALSE;
+    }
+    return TRUE;
+#else
+    return FALSE;
+#endif
+}
+
+/*===========================================================================
+ * FUNCTION   : isPostProcScaling
+ *
+ * DESCRIPTION: is scaling to be done by CPP?
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : TRUE  : If CPP scaling enabled
+ *              FALSE : If VFE scaling enabled
+ *==========================================================================*/
+bool QCameraParameters::isPostProcScaling()
+{
+    char value[PROPERTY_VALUE_MAX];
+    bool cpp_scaling = FALSE;
+
+    if (getRecordingHintValue()) {
+        return FALSE;
+    }
+
+    property_get("persist.camera.pp_scaling", value, "0");
+    cpp_scaling = atoi(value) > 0 ? TRUE : FALSE;
+
+    LOGH("Post proc scaling enabled : %d",
+             cpp_scaling);
+    return cpp_scaling;
+}
+
+/*===========================================================================
+ * FUNCTION   : isLLNoiseEnabled
+ *
+ * DESCRIPTION: Low light noise change
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : TRUE  : If low light noise enabled
+ *              FALSE : If low light noise disabled
+ *==========================================================================*/
+bool QCameraParameters::isLLNoiseEnabled()
+{
+    char value[PROPERTY_VALUE_MAX];
+    bool llnoise = FALSE;
+
+    if (!isWNREnabled()) {
+        return FALSE;
+    }
+
+    property_get("persist.camera.llnoise", value, "0");
+    llnoise = atoi(value) > 0 ? TRUE : FALSE;
+
+    LOGH("Low light noise enabled : %d",
+             llnoise);
+    return llnoise;
+}
+
+/*===========================================================================
+ * FUNCTION   : setBufBatchCount
+ *
+ * DESCRIPTION: Function to configure batch buffer
+ *
+ * PARAMETERS : int8_t buf_cnt
+ *                     Buffer batch count
+ *
+ * RETURN     :  None
+ *==========================================================================*/
+void QCameraParameters::setBufBatchCount(int8_t buf_cnt)
+{
+    mBufBatchCnt = 0;
+    char value[PROPERTY_VALUE_MAX];
+    int8_t count = 0;
+
+    property_get("persist.camera.batchcount", value, "0");
+    count = atoi(value);
+
+    if (!(count != 0 || buf_cnt > CAMERA_MIN_BATCH_COUNT)) {
+        LOGH("Buffer batch count = %d", mBufBatchCnt);
+        set(KEY_QC_VIDEO_BATCH_SIZE, mBufBatchCnt);
+        return;
+    }
+
+    while((m_pCapability->max_batch_bufs_supported != 0)
+            && (m_pCapability->max_batch_bufs_supported < buf_cnt)) {
+        buf_cnt = buf_cnt / 2;
+    }
+
+    if (count > 0) {
+        mBufBatchCnt = count;
+        LOGH("Buffer batch count = %d", mBufBatchCnt);
+        set(KEY_QC_VIDEO_BATCH_SIZE, mBufBatchCnt);
+        return;
+    }
+
+    if (buf_cnt > CAMERA_MIN_BATCH_COUNT) {
+        mBufBatchCnt = buf_cnt;
+        LOGH("Buffer batch count = %d", mBufBatchCnt);
+        set(KEY_QC_VIDEO_BATCH_SIZE, mBufBatchCnt);
+        return;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setVideoBatch()
+ *
+ * DESCRIPTION: Function to batching for video.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     :  None
+ *==========================================================================*/
+void QCameraParameters::setVideoBatchSize()
+{
+    char value[PROPERTY_VALUE_MAX];
+    int8_t minBatchcnt = 2; //Batching enabled only if batch size if greater than 2;
+    int32_t width = 0, height = 0;
+    mVideoBatchSize = 0;
+
+    if (getBufBatchCount()) {
+        //We don't need HAL to HAL batching if camera batching enabled.
+        return;
+    }
+
+    getVideoSize(&width, &height);
+    if ((width > 1920) || (height > 1080)) {
+        //Cannot enable batch mode for video size bigger than 1080p
+        return;
+    }
+
+    //Batch size "6" is the recommended and gives optimal power saving.
+    property_get("persist.camera.video.batchsize", value, "0");
+    mVideoBatchSize = atoi(value);
+
+    if (mVideoBatchSize > CAMERA_MAX_CONSUMER_BATCH_BUFFER_SIZE) {
+        mVideoBatchSize = CAMERA_MAX_CONSUMER_BATCH_BUFFER_SIZE;
+    } else if (mVideoBatchSize <= minBatchcnt) {
+        //Batching enabled only if batch size is greater than 2.
+        mVideoBatchSize = 0;
+    }
+    LOGD("mVideoBatchSize = %d", mVideoBatchSize);
+    set(KEY_QC_VIDEO_BATCH_SIZE, mVideoBatchSize);
+}
+
+/*===========================================================================
+ * FUNCTION   : setCustomParams
+ *
+ * DESCRIPTION: Function to update OEM specific custom parameter
+ *
+ * PARAMETERS : params: Input Parameter object
+ *
+ * RETURN     :  error value
+ *==========================================================================*/
+int32_t QCameraParameters::setCustomParams(__unused const QCameraParameters& params)
+{
+    int32_t rc = NO_ERROR;
+
+    /* Application specific parameter can be read from "params" and update m_pParamBuf
+       We can also update internal OEM custom parameters in this funcion.
+       "CAM_CUSTOM_PARM_EXAMPLE" is used as a example */
+
+    /*Get the pointer of shared buffer for custom parameter*/
+    custom_parm_buffer_t *customParam =
+            (custom_parm_buffer_t *)POINTER_OF_META(CAM_INTF_PARM_CUSTOM, m_pParamBuf);
+
+
+    /*start updating custom parameter values*/
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(customParam, CAM_CUSTOM_PARM_EXAMPLE, 1)) {
+        LOGE("Failed to update CAM_CUSTOM_PARM_DUMMY");
+        return BAD_VALUE;
+    }
+
+    /*set custom parameter values to main parameter buffer. Update isvalid flag*/
+    ADD_GET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_CUSTOM);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : dump
+ *
+ * DESCRIPTION: Composes a string based on current configuration
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : Formatted string
+ *==========================================================================*/
+String8 QCameraParameters::dump()
+{
+    String8 str("\n");
+    char s[128];
+
+    snprintf(s, 128, "Preview Pixel Fmt: %d\n", getPreviewHalPixelFormat());
+    str += s;
+
+    snprintf(s, 128, "ZSL Burst Interval: %d\n", getZSLBurstInterval());
+    str += s;
+
+    snprintf(s, 128, "ZSL Queue Depth: %d\n", getZSLQueueDepth());
+    str += s;
+
+    snprintf(s, 128, "ZSL Back Look Count %d\n", getZSLBackLookCount());
+    str += s;
+
+    snprintf(s, 128, "Max Unmatched Frames In Queue: %d\n",
+        getMaxUnmatchedFramesInQueue());
+    str += s;
+
+    snprintf(s, 128, "Is ZSL Mode: %d\n", isZSLMode());
+    str += s;
+
+    snprintf(s, 128, "Is No Display Mode: %d\n", isNoDisplayMode());
+    str += s;
+
+    snprintf(s, 128, "Is WNR Enabled: %d\n", isWNREnabled());
+    str += s;
+
+    snprintf(s, 128, "isHfrMode: %d\n", isHfrMode());
+    str += s;
+
+    snprintf(s, 128, "getNumOfSnapshots: %d\n", getNumOfSnapshots());
+    str += s;
+
+    snprintf(s, 128, "getNumOfExtraHDRInBufsIfNeeded: %d\n",
+        getNumOfExtraHDRInBufsIfNeeded());
+    str += s;
+
+    snprintf(s, 128, "getNumOfExtraHDROutBufsIfNeeded: %d\n",
+        getNumOfExtraHDROutBufsIfNeeded());
+    str += s;
+
+    snprintf(s, 128, "getRecordingHintValue: %d\n", getRecordingHintValue());
+    str += s;
+
+    snprintf(s, 128, "getJpegQuality: %u\n", getJpegQuality());
+    str += s;
+
+    snprintf(s, 128, "getJpegRotation: %u\n", getJpegRotation());
+    str += s;
+
+    snprintf(s, 128, "isHistogramEnabled: %d\n", isHistogramEnabled());
+    str += s;
+
+    snprintf(s, 128, "isFaceDetectionEnabled: %d\n", isFaceDetectionEnabled());
+    str += s;
+
+    snprintf(s, 128, "isHDREnabled: %d\n", isHDREnabled());
+    str += s;
+
+    snprintf(s, 128, "isAutoHDREnabled: %d\n", isAutoHDREnabled());
+    str += s;
+
+    snprintf(s, 128, "isAVTimerEnabled: %d\n", isAVTimerEnabled());
+    str += s;
+
+    snprintf(s, 128, "getFocusMode: %d\n", getFocusMode());
+    str += s;
+
+    snprintf(s, 128, "isJpegPictureFormat: %d\n", isJpegPictureFormat());
+    str += s;
+
+    snprintf(s, 128, "isNV16PictureFormat: %d\n", isNV16PictureFormat());
+    str += s;
+
+    snprintf(s, 128, "isNV21PictureFormat: %d\n", isNV21PictureFormat());
+    str += s;
+
+    snprintf(s, 128, "isSnapshotFDNeeded: %d\n", isSnapshotFDNeeded());
+    str += s;
+
+    snprintf(s, 128, "isHDR1xFrameEnabled: %d\n", isHDR1xFrameEnabled());
+    str += s;
+
+    snprintf(s, 128, "isYUVFrameInfoNeeded: %d\n", isYUVFrameInfoNeeded());
+    str += s;
+
+    snprintf(s, 128, "isHDR1xExtraBufferNeeded: %d\n",
+        isHDR1xExtraBufferNeeded());
+    str += s;
+
+    snprintf(s, 128, "isHDROutputCropEnabled: %d\n", isHDROutputCropEnabled());
+    str += s;
+
+    snprintf(s, 128, "isPreviewFlipChanged: %d\n", isPreviewFlipChanged());
+    str += s;
+
+    snprintf(s, 128, "isVideoFlipChanged: %d\n", isVideoFlipChanged());
+    str += s;
+
+    snprintf(s, 128, "isSnapshotFlipChanged: %d\n", isSnapshotFlipChanged());
+    str += s;
+
+    snprintf(s, 128, "isHDRThumbnailProcessNeeded: %d\n",
+        isHDRThumbnailProcessNeeded());
+    str += s;
+
+    snprintf(s, 128, "getAutoFlickerMode: %d\n", getAutoFlickerMode());
+    str += s;
+
+    snprintf(s, 128, "getNumOfExtraBuffersForImageProc: %d\n",
+        getNumOfExtraBuffersForImageProc());
+    str += s;
+
+    snprintf(s, 128, "isUbiFocusEnabled: %d\n", isUbiFocusEnabled());
+    str += s;
+
+    snprintf(s, 128, "isChromaFlashEnabled: %d\n", isChromaFlashEnabled());
+    str += s;
+
+    snprintf(s, 128, "isOptiZoomEnabled: %d\n", isOptiZoomEnabled());
+    str += s;
+
+    snprintf(s, 128, "isStillMoreEnabled: %d\n", isStillMoreEnabled());
+    str += s;
+
+    snprintf(s, 128, "getBurstCountForAdvancedCapture: %d\n",
+        getBurstCountForAdvancedCapture());
+    str += s;
+
+    return str;
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumOfExtraBuffersForVideo
+ *
+ * DESCRIPTION: get number of extra buffers needed by image processing
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of extra buffers needed by ImageProc;
+ *              0 if not ImageProc enabled
+ *==========================================================================*/
+uint8_t QCameraParameters::getNumOfExtraBuffersForVideo()
+{
+    uint8_t numOfBufs = 0;
+
+    if (isSeeMoreEnabled() || isHighQualityNoiseReductionMode()) {
+        numOfBufs = 1;
+    }
+
+    return numOfBufs;
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumOfExtraBuffersForPreview
+ *
+ * DESCRIPTION: get number of extra buffers needed by image processing
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of extra buffers needed by ImageProc;
+ *              0 if not ImageProc enabled
+ *==========================================================================*/
+uint8_t QCameraParameters::getNumOfExtraBuffersForPreview()
+{
+    uint8_t numOfBufs = 0;
+
+    if ((isSeeMoreEnabled() || isHighQualityNoiseReductionMode())
+            && !isZSLMode() && getRecordingHintValue()) {
+        numOfBufs = 1;
+    }
+
+    return numOfBufs;
+}
+
+/*===========================================================================
+ * FUNCTION   : setToneMapMode
+ *
+ * DESCRIPTION: enable or disable tone map
+ *
+ * PARAMETERS :
+ *   @enable : enable: 1; disable 0
+ *   @initCommit: if configuration list needs to be initialized and commited
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setToneMapMode(uint32_t enable, bool initCommit)
+{
+    int32_t rc = NO_ERROR;
+    LOGH("tone map mode %d ", enable);
+
+    if (initCommit) {
+        if (initBatchUpdate(m_pParamBuf) < 0) {
+            LOGE("Failed to initialize group update table");
+            return FAILED_TRANSACTION;
+        }
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_TONE_MAP_MODE, enable)) {
+        LOGE("Failed to update tone map mode");
+        return BAD_VALUE;
+    }
+
+    if (initCommit) {
+        rc = commitSetBatch();
+        if (rc != NO_ERROR) {
+            LOGE("Failed to commit tone map mode");
+            return rc;
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getLongshotStages
+ *
+ * DESCRIPTION: get number of stages for longshot
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of stages
+ *==========================================================================*/
+uint8_t QCameraParameters::getLongshotStages()
+{
+    uint8_t numStages =
+            isLowMemoryDevice() ? CAMERA_MIN_LONGSHOT_STAGES : CAMERA_DEFAULT_LONGSHOT_STAGES;
+
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.longshot.stages", prop, "0");
+    uint8_t propStages = atoi(prop);
+    if (propStages > 0 && propStages <= CAMERA_DEFAULT_LONGSHOT_STAGES) {
+        numStages = propStages;
+    }
+    return numStages;
+}
+
+/*===========================================================================
+ * FUNCTION   : setCDSMode
+ *
+ * DESCRIPTION: set CDS mode
+ *
+ * PARAMETERS :
+ *   @cds_mode : cds mode
+ *   @initCommit: if configuration list needs to be initialized and commited
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setCDSMode(int32_t cds_mode, bool initCommit)
+{
+    if (initCommit) {
+        if (initBatchUpdate(m_pParamBuf) < 0) {
+            LOGE("Failed to initialize group update table");
+            return FAILED_TRANSACTION;
+        }
+    }
+
+    int32_t rc = NO_ERROR;
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_CDS_MODE, cds_mode)) {
+        LOGE("Failed to update cds mode");
+        return BAD_VALUE;
+    }
+
+    if (initCommit) {
+        rc = commitSetBatch();
+        if (NO_ERROR != rc) {
+            LOGE("Failed to set cds mode");
+            return rc;
+        }
+    }
+
+    LOGH("cds mode -> %d", cds_mode);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setLowLightCapture
+ *
+ * DESCRIPTION: Function to enable low light capture
+ *==========================================================================*/
+void QCameraParameters::setLowLightCapture()
+{
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.llc", prop, "0");
+    m_LLCaptureEnabled = (atoi(prop) > 0) ? TRUE : FALSE;
+
+    if (!m_LLCaptureEnabled) {
+        m_LowLightLevel = CAM_LOW_LIGHT_OFF;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : fdModeInVideo
+ *
+ * DESCRIPTION: FD in Video change
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : FD Mode in Video
+ *              0 : If FD in Video disabled
+ *              1 : If FD in Video enabled for Detection, focus
+ *              2 : If FD in Video enabled only for focus
+ *==========================================================================*/
+uint8_t QCameraParameters::fdModeInVideo()
+{
+    char value[PROPERTY_VALUE_MAX];
+    uint8_t fdvideo = 0;
+
+    property_get("persist.camera.fdvideo", value, "0");
+    fdvideo = (atoi(value) > 0) ? atoi(value) : 0;
+
+    LOGD("FD mode in Video : %d", fdvideo);
+    return fdvideo;
+}
+
+/*===========================================================================
+ * FUNCTION   : setManualCaptureMode
+ *
+ * DESCRIPTION: Function to set Manual capture modes
+ *
+ * PARAMETERS :
+ *   @mode : Capture mode configured
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setManualCaptureMode(QCameraManualCaptureModes mode)
+{
+    int32_t rc = NO_ERROR;
+    char value[PROPERTY_VALUE_MAX];
+    int8_t count = 0;
+
+    property_get("persist.camera.manual.capture", value, "0");
+    count = atoi(value);
+
+    if (count) {
+        if (mode == CAM_MANUAL_CAPTURE_TYPE_OFF) {
+            m_ManualCaptureMode = CAM_MANUAL_CAPTURE_TYPE_1;
+        } else {
+            m_ManualCaptureMode = mode;
+        }
+    } else {
+        m_ManualCaptureMode = CAM_MANUAL_CAPTURE_TYPE_OFF;
+    }
+
+    if (m_ManualCaptureMode == CAM_MANUAL_CAPTURE_TYPE_2) {
+        setOfflineRAW(FALSE);
+    } else if (m_ManualCaptureMode >= CAM_MANUAL_CAPTURE_TYPE_3) {
+        setOfflineRAW(TRUE);
+    } else {
+        setOfflineRAW(FALSE);
+    }
+    setReprocCount();
+    LOGH("Manual capture mode - %d", m_ManualCaptureMode);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : isReprocScaleEnabled
+ *
+ * DESCRIPTION: Whether reprocess scale is enabled or not
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : TRUE  : Reprocess scale is enabled
+ *              FALSE : Reprocess scale is not enabled
+ *==========================================================================*/
+bool QCameraParameters::isReprocScaleEnabled()
+{
+    return m_reprocScaleParam.isScaleEnabled();
+}
+
+/*===========================================================================
+ * FUNCTION   : isUnderReprocScaling
+ *
+ * DESCRIPTION: Whether image is under reprocess scaling
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : TRUE  : Image is under reprocess scaling
+ *              FALSE : Image is not under reprocess scaling
+ *==========================================================================*/
+bool QCameraParameters::isUnderReprocScaling()
+{
+    return m_reprocScaleParam.isUnderScaling();
+}
+
+/*===========================================================================
+ * FUNCTION   : getPicSizeFromAPK
+ *
+ * DESCRIPTION: Get picture size set from application.
+ *
+ * PARAMETERS :
+ *   @width   : with set by application
+ *   @height  : height set by application
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getPicSizeFromAPK(int &width, int &height)
+{
+    return m_reprocScaleParam.getPicSizeFromAPK(width, height);
+}
+
+
+
+/*===========================================================================
+ * FUNCTION   : setDualLedCalibration
+ *
+ * DESCRIPTION: set dual led calibration
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setDualLedCalibration(
+        __unused const QCameraParameters& params)
+{
+    char value[PROPERTY_VALUE_MAX];
+    int32_t calibration = 0;
+
+    memset(value, 0, sizeof(value));
+    property_get("persist.camera.dual_led_calib", value, "0");
+    calibration = atoi(value);
+    if (calibration != m_dualLedCalibration) {
+      m_dualLedCalibration = calibration;
+      LOGD("%s:updating calibration=%d m_dualLedCalibration=%d",
+        __func__, calibration, m_dualLedCalibration);
+
+      if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+               CAM_INTF_PARM_DUAL_LED_CALIBRATION,
+               m_dualLedCalibration)) {
+          LOGE("%s:Failed to update dual led calibration param", __func__);
+          return BAD_VALUE;
+      }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setinstantAEC
+ *
+ * DESCRIPTION: set instant AEC value to backend
+ *
+ * PARAMETERS :
+ *   @value : instant aec enabled or not.
+ *            0 - disable
+ *            1 - Enable and set agressive AEC algo to the backend
+ *            2 - Enable and set fast AEC algo to the backend
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setInstantAEC(uint8_t value, bool initCommit)
+{
+    if (initCommit) {
+        if (initBatchUpdate(m_pParamBuf) < 0) {
+            LOGE("Failed to initialize group update table");
+            return FAILED_TRANSACTION;
+        }
+    }
+
+    int32_t rc = NO_ERROR;
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf, CAM_INTF_PARM_INSTANT_AEC, value)) {
+        LOGE("Failed to instant aec value");
+        return BAD_VALUE;
+    }
+
+    if (initCommit) {
+        rc = commitSetBatch();
+        if (NO_ERROR != rc) {
+            LOGE("Failed to instant aec value");
+            return rc;
+        }
+    }
+
+    LOGD(" Instant AEC value set to backend %d", value);
+    m_bInstantAEC = value;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAdvancedCaptureMode
+ *
+ * DESCRIPTION: set advanced capture mode
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAdvancedCaptureMode()
+{
+    uint8_t value = isAdvCamFeaturesEnabled();
+    LOGD("updating advanced capture mode value to %d",value);
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(m_pParamBuf,
+            CAM_INTF_PARM_ADV_CAPTURE_MODE, value)) {
+        LOGE("Failed to set advanced capture mode param");
+        return BAD_VALUE;
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getAnalysisInfo
+ *
+ * DESCRIPTION: Get the Analysis information based on
+ *     current mode and feature mask
+ *
+ * PARAMETERS :
+ *   @fdVideoEnabled : Whether fdVideo enabled currently
+ *   @videoEnabled   : Whether hal3 or hal1
+ *   @featureMask    : Feature mask
+ *   @analysis_info  : Analysis info to be filled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getAnalysisInfo(
+        bool fdVideoEnabled,
+        bool hal3,
+        cam_feature_mask_t featureMask,
+        cam_analysis_info_t *pAnalysisInfo)
+{
+    return mCommon.getAnalysisInfo(fdVideoEnabled, hal3, featureMask, pAnalysisInfo);
+}
+
+}; // namespace qcamera
diff --git a/msmcobalt/QCamera2/HAL/QCameraParameters.h b/msmcobalt/QCamera2/HAL/QCameraParameters.h
new file mode 100644
index 0000000..e0877d8
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/QCameraParameters.h
@@ -0,0 +1,1229 @@
+/*
+** Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+** Not a Contribution. Apache license notifications and license are
+** retained for attribution purposes only.
+**
+** Copyright 2008, The Android Open Source Project
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_HARDWARE_QCAMERA_PARAMETERS_H
+#define ANDROID_HARDWARE_QCAMERA_PARAMETERS_H
+
+// To remove
+#include <cutils/properties.h>
+
+// System dependencies
+#include <camera/CameraParameters.h>
+#include <utils/Errors.h>
+
+// Camera dependencies
+#include "camera.h"
+#include "QCameraMem.h"
+#include "QCameraParametersIntf.h"
+#include "QCameraThermalAdapter.h"
+#include "QCameraCommon.h"
+
+extern "C" {
+#include "mm_jpeg_interface.h"
+}
+
+using namespace android;
+
+namespace qcamera {
+
+//EXIF globals
+static const char ExifAsciiPrefix[] = { 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0 };          // "ASCII\0\0\0"
+static const char ExifUndefinedPrefix[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 };   // "\0\0\0\0\0\0\0\0"
+
+#define FOCAL_LENGTH_DECIMAL_PRECISION   100
+
+#define CAMERA_MIN_BATCH_COUNT           4
+
+#define QCAMERA_MAX_EXP_TIME_LEVEL1      100
+#define QCAMERA_MAX_EXP_TIME_LEVEL2      500
+#define QCAMERA_MAX_EXP_TIME_LEVEL3      1000
+#define QCAMERA_MAX_EXP_TIME_LEVEL4      10000
+
+class QCameraParameters: private CameraParameters
+{
+
+private:
+
+    class QCameraReprocScaleParam{
+    public:
+
+        QCameraReprocScaleParam();
+        ~QCameraReprocScaleParam();
+
+        void setScaleEnable(bool enabled);
+        int32_t setScaleSizeTbl(size_t scale_cnt,
+                cam_dimension_t *scale_tbl, size_t org_cnt,
+                cam_dimension_t *org_tbl);
+        int32_t setValidatePicSize(int &width, int &height);
+
+        bool isScaleEnabled();
+        bool isUnderScaling();
+
+        size_t getScaleSizeTblCnt();
+        cam_dimension_t *getScaledSizeTbl();
+        size_t getTotalSizeTblCnt();
+        cam_dimension_t *getTotalSizeTbl();
+        int32_t getPicSizeFromAPK(int &width, int &height);
+        int32_t getPicSizeSetted(int &width, int &height);
+
+    private:
+        bool isScalePicSize(int width, int height);
+        bool isValidatePicSize(int width, int height);
+        int32_t setSensorSupportedPicSize();
+        size_t checkScaleSizeTable(size_t scale_cnt, cam_dimension_t *scale_tbl,
+                size_t org_cnt, cam_dimension_t *org_tbl);
+
+        bool mScaleEnabled;
+        bool mIsUnderScaling;   //if in scale status
+
+        // picture size cnt that need scale operation
+        size_t mNeedScaleCnt;
+        cam_dimension_t mNeedScaledSizeTbl[MAX_SCALE_SIZES_CNT];
+
+        // sensor supported size cnt and table
+        size_t mSensorSizeTblCnt;
+        cam_dimension_t *mSensorSizeTbl;
+
+        // Total size cnt (sensor supported + need scale cnt)
+        size_t mTotalSizeTblCnt;
+        cam_dimension_t mTotalSizeTbl[MAX_SIZES_CNT];
+
+        cam_dimension_t mPicSizeFromAPK;   // dimension that APK is expected
+        cam_dimension_t mPicSizeSetted;    // dimension that config vfe
+    };
+
+    // Supported PREVIEW/RECORDING SIZES IN HIGH FRAME RATE recording, sizes in pixels.
+    // Example value: "800x480,432x320". Read only.
+    static const char KEY_QC_SUPPORTED_HFR_SIZES[];
+    // The mode of preview frame rate.
+    // Example value: "frame-rate-auto, frame-rate-fixed".
+    static const char KEY_QC_PREVIEW_FRAME_RATE_MODE[];
+    static const char KEY_QC_SUPPORTED_PREVIEW_FRAME_RATE_MODES[];
+    static const char KEY_QC_PREVIEW_FRAME_RATE_AUTO_MODE[];
+    static const char KEY_QC_PREVIEW_FRAME_RATE_FIXED_MODE[];
+    static const char KEY_QC_SUPPORTED_SKIN_TONE_ENHANCEMENT_MODES[] ;
+
+    // Supported live snapshot sizes
+    static const char KEY_QC_SUPPORTED_LIVESNAPSHOT_SIZES[];
+
+    // Supported Raw formats
+    static const char KEY_QC_SUPPORTED_RAW_FORMATS[];
+    static const char KEY_QC_RAW_FORMAT[];
+
+    //Touch Af/AEC settings.
+    static const char KEY_QC_TOUCH_AF_AEC[];
+    static const char KEY_QC_SUPPORTED_TOUCH_AF_AEC[];
+    //Touch Index for AEC.
+    static const char KEY_QC_TOUCH_INDEX_AEC[];
+    //Touch Index for AF.
+    static const char KEY_QC_TOUCH_INDEX_AF[];
+    // Current auto scene detection mode.
+    // Example value: "off" or "on" constants. Read/write.
+    static const char KEY_QC_SCENE_DETECT[];
+    // Supported auto scene detection settings.
+    // Example value: "off,on". Read only.
+    static const char KEY_QC_SUPPORTED_SCENE_DETECT[];
+    static const char KEY_QC_SELECTABLE_ZONE_AF[];
+
+    static const char KEY_QC_ISO_MODE[];
+    static const char KEY_QC_CONTINUOUS_ISO[];
+    static const char KEY_QC_MIN_ISO[];
+    static const char KEY_QC_MAX_ISO[];
+    static const char KEY_QC_SUPPORTED_ISO_MODES[];
+    static const char KEY_QC_EXPOSURE_TIME[];
+    static const char KEY_QC_MIN_EXPOSURE_TIME[];
+    static const char KEY_QC_MAX_EXPOSURE_TIME[];
+    static const char KEY_QC_LENSSHADE[] ;
+    static const char KEY_QC_SUPPORTED_LENSSHADE_MODES[] ;
+    static const char KEY_QC_AUTO_EXPOSURE[];
+    static const char KEY_QC_SUPPORTED_AUTO_EXPOSURE[];
+
+    static const char KEY_QC_GPS_LATITUDE_REF[];
+    static const char KEY_QC_GPS_LONGITUDE_REF[];
+    static const char KEY_QC_GPS_ALTITUDE_REF[];
+    static const char KEY_QC_GPS_STATUS[];
+    static const char KEY_QC_MEMORY_COLOR_ENHANCEMENT[];
+    static const char KEY_QC_SUPPORTED_MEM_COLOR_ENHANCE_MODES[];
+    static const char KEY_QC_DIS[];
+    static const char KEY_QC_OIS[];
+    static const char KEY_QC_SUPPORTED_DIS_MODES[];
+    static const char KEY_QC_SUPPORTED_OIS_MODES[];
+
+    static const char KEY_QC_ZSL[];
+    static const char KEY_QC_SUPPORTED_ZSL_MODES[];
+    static const char KEY_QC_ZSL_BURST_INTERVAL[];
+    static const char KEY_QC_ZSL_BURST_LOOKBACK[];
+    static const char KEY_QC_ZSL_QUEUE_DEPTH[];
+
+    static const char KEY_QC_CAMERA_MODE[];
+    static const char KEY_QC_ORIENTATION[];
+
+    static const char KEY_QC_VIDEO_HIGH_FRAME_RATE[];
+    static const char KEY_QC_VIDEO_HIGH_SPEED_RECORDING[];
+    static const char KEY_QC_SUPPORTED_VIDEO_HIGH_FRAME_RATE_MODES[];
+    static const char KEY_QC_HIGH_DYNAMIC_RANGE_IMAGING[];
+    static const char KEY_QC_SUPPORTED_HDR_IMAGING_MODES[];
+    static const char KEY_QC_AE_BRACKET_HDR[];
+    static const char KEY_QC_SUPPORTED_AE_BRACKET_MODES[];
+    static const char KEY_QC_CAPTURE_BURST_EXPOSURE[];
+    static const char KEY_QC_NUM_SNAPSHOT_PER_SHUTTER[];
+    static const char KEY_QC_NUM_RETRO_BURST_PER_SHUTTER[];
+    static const char KEY_QC_SNAPSHOT_BURST_LED_ON_PERIOD[];
+    static const char KEY_QC_SNAPSHOT_BURST_NUM[];
+    static const char KEY_QC_NO_DISPLAY_MODE[];
+    static const char KEY_QC_RAW_PICUTRE_SIZE[];
+    static const char KEY_QC_TINTLESS_ENABLE[];
+    static const char KEY_QC_SCENE_SELECTION[];
+    static const char KEY_QC_CDS_MODE[];
+    static const char KEY_QC_VIDEO_CDS_MODE[];
+    static const char KEY_QC_SUPPORTED_CDS_MODES[];
+    static const char KEY_QC_SUPPORTED_VIDEO_CDS_MODES[];
+    static const char KEY_QC_TNR_MODE[];
+    static const char KEY_QC_VIDEO_TNR_MODE[];
+    static const char KEY_QC_SUPPORTED_TNR_MODES[];
+    static const char KEY_QC_SUPPORTED_VIDEO_TNR_MODES[];
+
+    static const char KEY_INTERNAL_PERVIEW_RESTART[];
+    static const char KEY_QC_WB_MANUAL_CCT[];
+    static const char KEY_QC_MIN_WB_CCT[];
+    static const char KEY_QC_MAX_WB_CCT[];
+    static const char KEY_QC_MANUAL_WB_GAINS[];
+    static const char KEY_QC_MIN_WB_GAIN[];
+    static const char KEY_QC_MAX_WB_GAIN[];
+    static const char WHITE_BALANCE_MANUAL[];
+    static const char FOCUS_MODE_MANUAL_POSITION[];
+    static const char KEY_QC_LONG_SHOT[];
+    static const char KEY_QC_INITIAL_EXPOSURE_INDEX[];
+    static const char KEY_QC_INSTANT_AEC[];
+    static const char KEY_QC_INSTANT_CAPTURE[];
+    static const char KEY_QC_INSTANT_AEC_SUPPORTED_MODES[];
+    static const char KEY_QC_INSTANT_CAPTURE_SUPPORTED_MODES[];
+
+    static const char KEY_QC_MANUAL_FOCUS_POSITION[];
+    static const char KEY_QC_MANUAL_FOCUS_POS_TYPE[];
+    static const char KEY_QC_MIN_FOCUS_POS_INDEX[];
+    static const char KEY_QC_MAX_FOCUS_POS_INDEX[];
+    static const char KEY_QC_MIN_FOCUS_POS_DAC[];
+    static const char KEY_QC_MAX_FOCUS_POS_DAC[];
+    static const char KEY_QC_MIN_FOCUS_POS_RATIO[];
+    static const char KEY_QC_MAX_FOCUS_POS_RATIO[];
+    static const char KEY_QC_MIN_FOCUS_POS_DIOPTER[];
+    static const char KEY_QC_MAX_FOCUS_POS_DIOPTER[];
+    static const char KEY_QC_FOCUS_POSITION_SCALE[];
+    static const char KEY_QC_FOCUS_POSITION_DIOPTER[];
+
+    static const char KEY_QC_SUPPORTED_MANUAL_FOCUS_MODES[];
+    static const char KEY_QC_SUPPORTED_MANUAL_EXPOSURE_MODES[];
+    static const char KEY_QC_SUPPORTED_MANUAL_WB_MODES[];
+    static const char KEY_QC_FOCUS_SCALE_MODE[];
+    static const char KEY_QC_FOCUS_DIOPTER_MODE[];
+    static const char KEY_QC_ISO_PRIORITY[];
+    static const char KEY_QC_EXP_TIME_PRIORITY[];
+    static const char KEY_QC_USER_SETTING[];
+    static const char KEY_QC_WB_CCT_MODE[];
+    static const char KEY_QC_WB_GAIN_MODE[];
+    static const char KEY_QC_MANUAL_WB_TYPE[];
+    static const char KEY_QC_MANUAL_WB_VALUE[];
+    static const char KEY_QC_CURRENT_EXPOSURE_TIME[];
+    static const char KEY_QC_CURRENT_ISO[];
+    static const char KEY_QC_CACHE_VIDEO_BUFFERS[];
+
+    // DENOISE
+    static const char KEY_QC_DENOISE[];
+    static const char KEY_QC_SUPPORTED_DENOISE[];
+
+    //Selectable zone AF.
+    static const char KEY_QC_FOCUS_ALGO[];
+    static const char KEY_QC_SUPPORTED_FOCUS_ALGOS[];
+
+    //Face Detection
+    static const char KEY_QC_FACE_DETECTION[];
+    static const char KEY_QC_SUPPORTED_FACE_DETECTION[];
+
+    //Face Recognition
+    static const char KEY_QC_FACE_RECOGNITION[];
+    static const char KEY_QC_SUPPORTED_FACE_RECOGNITION[];
+
+    //Indicates number of faces requested by the application.
+    //This value will be rejected if the requested faces
+    //greater than supported by hardware.
+    //Write only.
+    static const char KEY_QC_MAX_NUM_REQUESTED_FACES[];
+
+    //preview flip
+    static const char KEY_QC_PREVIEW_FLIP[];
+    //video flip
+    static const char KEY_QC_VIDEO_FLIP[];
+    //snapshot picture flip
+    static const char KEY_QC_SNAPSHOT_PICTURE_FLIP[];
+
+    static const char KEY_QC_SUPPORTED_FLIP_MODES[];
+
+    //Face Detection, Facial processing requirement
+    static const char KEY_QC_SNAPSHOT_FD_DATA[];
+
+    //Auto HDR enable
+    static const char KEY_QC_AUTO_HDR_ENABLE[];
+    // video rotation
+    static const char KEY_QC_VIDEO_ROTATION[];
+    static const char KEY_QC_SUPPORTED_VIDEO_ROTATION_VALUES[];
+
+    //Redeye Reduction
+    static const char KEY_QC_REDEYE_REDUCTION[];
+    static const char KEY_QC_SUPPORTED_REDEYE_REDUCTION[];
+    static const char EFFECT_EMBOSS[];
+    static const char EFFECT_SKETCH[];
+    static const char EFFECT_NEON[];
+    static const char EFFECT_BEAUTY[];
+
+    //AF Bracketing
+    static const char KEY_QC_AF_BRACKET[];
+    static const char KEY_QC_SUPPORTED_AF_BRACKET_MODES[];
+
+    //Refocus
+    static const char KEY_QC_RE_FOCUS[];
+    static const char KEY_QC_SUPPORTED_RE_FOCUS_MODES[];
+
+    //Chroma Flash
+    static const char KEY_QC_CHROMA_FLASH[];
+    static const char KEY_QC_SUPPORTED_CHROMA_FLASH_MODES[];
+
+    //Opti Zoom
+    static const char KEY_QC_OPTI_ZOOM[];
+    static const char KEY_QC_SUPPORTED_OPTI_ZOOM_MODES[];
+
+    // Auto HDR supported
+    static const char KEY_QC_AUTO_HDR_SUPPORTED[];
+
+    // HDR modes
+    static const char KEY_QC_HDR_MODE[];
+    static const char KEY_QC_SUPPORTED_KEY_QC_HDR_MODES[];
+
+    //True Portrait
+    static const char KEY_QC_TRUE_PORTRAIT[];
+    static const char KEY_QC_SUPPORTED_TRUE_PORTRAIT_MODES[];
+
+    //See more
+    static const char KEY_QC_SEE_MORE[];
+    static const char KEY_QC_SUPPORTED_SEE_MORE_MODES[];
+
+    //Still more
+    static const char KEY_QC_STILL_MORE[];
+    static const char KEY_QC_SUPPORTED_STILL_MORE_MODES[];
+
+    //Noise reduction mode
+    static const char KEY_QC_NOISE_REDUCTION_MODE[];
+    static const char KEY_QC_NOISE_REDUCTION_MODE_VALUES[];
+
+    //Longshot
+    static const char KEY_QC_LONGSHOT_SUPPORTED[];
+
+    //ZSL+HDR
+    static const char KEY_QC_ZSL_HDR_SUPPORTED[];
+
+    // Values for Touch AF/AEC
+    static const char TOUCH_AF_AEC_OFF[];
+    static const char TOUCH_AF_AEC_ON[];
+
+    // Values for Scene mode
+    static const char SCENE_MODE_ASD[];
+    static const char SCENE_MODE_BACKLIGHT[];
+    static const char SCENE_MODE_FLOWERS[];
+    static const char SCENE_MODE_AR[];
+    static const char SCENE_MODE_HDR[];
+    static const char PIXEL_FORMAT_YUV420SP_ADRENO[]; // ADRENO
+    static const char PIXEL_FORMAT_YV12[]; // NV12
+    static const char PIXEL_FORMAT_NV12[]; //NV12
+    static const char QC_PIXEL_FORMAT_NV12_VENUS[]; //NV12 VENUS
+
+    // Values for raw picture format
+    static const char QC_PIXEL_FORMAT_YUV_RAW_8BIT_YUYV[];
+    static const char QC_PIXEL_FORMAT_YUV_RAW_8BIT_YVYU[];
+    static const char QC_PIXEL_FORMAT_YUV_RAW_8BIT_UYVY[];
+    static const char QC_PIXEL_FORMAT_YUV_RAW_8BIT_VYUY[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_14GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_14GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_14RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_14BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_14GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_14GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_14RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_14BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_14GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_14GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_14RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_14BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_14GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_14GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_14RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_14BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_14GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_14GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_14RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_14BGGR[];
+
+    // ISO values
+    static const char ISO_AUTO[];
+    static const char ISO_HJR[];
+    static const char ISO_100[];
+    static const char ISO_200[];
+    static const char ISO_400[];
+    static const char ISO_800[];
+    static const char ISO_1600[];
+    static const char ISO_3200[];
+    static const char ISO_MANUAL[];
+
+    // Values for auto exposure settings.
+    static const char AUTO_EXPOSURE_FRAME_AVG[];
+    static const char AUTO_EXPOSURE_CENTER_WEIGHTED[];
+    static const char AUTO_EXPOSURE_SPOT_METERING[];
+    static const char AUTO_EXPOSURE_SMART_METERING[];
+    static const char AUTO_EXPOSURE_USER_METERING[];
+    static const char AUTO_EXPOSURE_SPOT_METERING_ADV[];
+    static const char AUTO_EXPOSURE_CENTER_WEIGHTED_ADV[];
+
+    // Values for instant AEC modes
+    static const char KEY_QC_INSTANT_AEC_DISABLE[];
+    static const char KEY_QC_INSTANT_AEC_AGGRESSIVE_AEC[];
+    static const char KEY_QC_INSTANT_AEC_FAST_AEC[];
+
+    // Values for instant capture modes
+    static const char KEY_QC_INSTANT_CAPTURE_DISABLE[];
+    static const char KEY_QC_INSTANT_CAPTURE_AGGRESSIVE_AEC[];
+    static const char KEY_QC_INSTANT_CAPTURE_FAST_AEC[];
+
+    static const char KEY_QC_SHARPNESS[];
+    static const char KEY_QC_MIN_SHARPNESS[];
+    static const char KEY_QC_MAX_SHARPNESS[];
+    static const char KEY_QC_SHARPNESS_STEP[];
+    static const char KEY_QC_CONTRAST[];
+    static const char KEY_QC_MIN_CONTRAST[];
+    static const char KEY_QC_MAX_CONTRAST[];
+    static const char KEY_QC_CONTRAST_STEP[];
+    static const char KEY_QC_SATURATION[];
+    static const char KEY_QC_MIN_SATURATION[];
+    static const char KEY_QC_MAX_SATURATION[];
+    static const char KEY_QC_SATURATION_STEP[];
+    static const char KEY_QC_BRIGHTNESS[];
+    static const char KEY_QC_MIN_BRIGHTNESS[];
+    static const char KEY_QC_MAX_BRIGHTNESS[];
+    static const char KEY_QC_BRIGHTNESS_STEP[];
+    static const char KEY_QC_SCE_FACTOR[];
+    static const char KEY_QC_MIN_SCE_FACTOR[];
+    static const char KEY_QC_MAX_SCE_FACTOR[];
+    static const char KEY_QC_SCE_FACTOR_STEP[];
+
+    static const char KEY_QC_HISTOGRAM[] ;
+    static const char KEY_QC_SUPPORTED_HISTOGRAM_MODES[] ;
+    static const char KEY_QC_SUPPORTED_HDR_NEED_1X[];
+    static const char KEY_QC_HDR_NEED_1X[];
+    static const char KEY_QC_VIDEO_HDR[];
+    static const char KEY_QC_VT_ENABLE[];
+    static const char KEY_QC_SUPPORTED_VIDEO_HDR_MODES[];
+    static const char KEY_QC_SENSOR_HDR[];
+    static const char KEY_QC_SUPPORTED_SENSOR_HDR_MODES[];
+    static const char KEY_QC_RDI_MODE[];
+    static const char KEY_QC_SUPPORTED_RDI_MODES[];
+    static const char KEY_QC_SECURE_MODE[];
+    static const char KEY_QC_SUPPORTED_SECURE_MODES[];
+
+    // Values for SKIN TONE ENHANCEMENT
+    static const char SKIN_TONE_ENHANCEMENT_ENABLE[] ;
+    static const char SKIN_TONE_ENHANCEMENT_DISABLE[] ;
+
+    // Values for Denoise
+    static const char DENOISE_OFF[] ;
+    static const char DENOISE_ON[] ;
+
+    // Values for auto exposure settings.
+    static const char FOCUS_ALGO_AUTO[];
+    static const char FOCUS_ALGO_SPOT_METERING[];
+    static const char FOCUS_ALGO_CENTER_WEIGHTED[];
+    static const char FOCUS_ALGO_FRAME_AVERAGE[];
+
+    // Values for AE Bracketing settings.
+    static const char AE_BRACKET_OFF[];
+    static const char AE_BRACKET[];
+
+    // Values for AF Bracketing settings.
+    static const char AF_BRACKET_OFF[];
+    static const char AF_BRACKET_ON[];
+
+    // Values for Refocus settings.
+    static const char RE_FOCUS_OFF[];
+    static const char RE_FOCUS_ON[];
+
+    // Values for Chroma Flash settings.
+    static const char CHROMA_FLASH_OFF[];
+    static const char CHROMA_FLASH_ON[];
+
+    // Values for Opti Zoom settings.
+    static const char OPTI_ZOOM_OFF[];
+    static const char OPTI_ZOOM_ON[];
+
+    // Values for Still More settings.
+    static const char STILL_MORE_OFF[];
+    static const char STILL_MORE_ON[];
+
+    // Values for HDR mode settings.
+    static const char HDR_MODE_SENSOR[];
+    static const char HDR_MODE_MULTI_FRAME[];
+
+    // Values for True Portrait settings.
+    static const char TRUE_PORTRAIT_OFF[];
+    static const char TRUE_PORTRAIT_ON[];
+
+    // Values for HFR settings.
+    static const char VIDEO_HFR_OFF[];
+    static const char VIDEO_HFR_2X[];
+    static const char VIDEO_HFR_3X[];
+    static const char VIDEO_HFR_4X[];
+    static const char VIDEO_HFR_5X[];
+    static const char VIDEO_HFR_6X[];
+    static const char VIDEO_HFR_7X[];
+    static const char VIDEO_HFR_8X[];
+    static const char VIDEO_HFR_9X[];
+
+    // Values for feature on/off settings.
+    static const char VALUE_OFF[];
+    static const char VALUE_ON[];
+
+    // Values for feature enable/disable settings.
+    static const char VALUE_ENABLE[];
+    static const char VALUE_DISABLE[];
+
+    // Values for feature true/false settings.
+    static const char VALUE_FALSE[];
+    static const char VALUE_TRUE[];
+
+    //Values for flip settings
+    static const char FLIP_MODE_OFF[];
+    static const char FLIP_MODE_V[];
+    static const char FLIP_MODE_H[];
+    static const char FLIP_MODE_VH[];
+
+    //Values for CDS Mode
+    static const char CDS_MODE_OFF[];
+    static const char CDS_MODE_ON[];
+    static const char CDS_MODE_AUTO[];
+
+    static const char VALUE_FAST[];
+    static const char VALUE_HIGH_QUALITY[];
+
+    static const char KEY_SELECTED_AUTO_SCENE[];
+
+    // Values for Video rotation
+    static const char VIDEO_ROTATION_0[];
+    static const char VIDEO_ROTATION_90[];
+    static const char VIDEO_ROTATION_180[];
+    static const char VIDEO_ROTATION_270[];
+
+#ifdef TARGET_TS_MAKEUP
+    static const char KEY_TS_MAKEUP[];
+    static const char KEY_TS_MAKEUP_WHITEN[];
+    static const char KEY_TS_MAKEUP_CLEAN[];
+#endif
+    //param key for HFR batch size
+    static const char KEY_QC_VIDEO_BATCH_SIZE[];
+    enum {
+        CAMERA_ORIENTATION_UNKNOWN = 0,
+        CAMERA_ORIENTATION_PORTRAIT = 1,
+        CAMERA_ORIENTATION_LANDSCAPE = 2,
+    };
+
+    template <typename valueType> struct QCameraMap {
+        const char *const desc;
+        valueType val;
+    };
+
+public:
+    QCameraParameters();
+    QCameraParameters(const String8 &params);
+    ~QCameraParameters();
+
+    int32_t allocate();
+    int32_t init(cam_capability_t *,
+                 mm_camera_vtbl_t *,
+                 QCameraAdjustFPS *);
+    void deinit();
+    int32_t initDefaultParameters();
+    int32_t updateParameters(const String8& params, bool &needRestart);
+    int32_t commitParameters();
+
+    char* getParameters();
+    void getPreviewFpsRange(int *min_fps, int *max_fps) const {
+            CameraParameters::getPreviewFpsRange(min_fps, max_fps);
+    }
+#ifdef TARGET_TS_MAKEUP
+    bool getTsMakeupInfo(int &whiteLevel, int &cleanLevel) const;
+#endif
+
+    int getPreviewHalPixelFormat();
+    int32_t getStreamRotation(cam_stream_type_t streamType,
+                               cam_pp_feature_config_t &featureConfig,
+                               cam_dimension_t &dim);
+    int32_t getStreamFormat(cam_stream_type_t streamType,
+                             cam_format_t &format);
+    int32_t getStreamDimension(cam_stream_type_t streamType,
+            cam_dimension_t &dim);
+    void getThumbnailSize(int *width, int *height) const;
+
+
+    uint8_t getZSLBurstInterval();
+    uint8_t getZSLQueueDepth();
+    uint8_t getZSLBackLookCount();
+    uint8_t getMaxUnmatchedFramesInQueue();
+    bool isZSLMode() {return m_bZslMode;};
+    bool isRdiMode() {return m_bRdiMode;};
+    bool isSecureMode() {return m_bSecureMode;};
+    bool isNoDisplayMode() {return m_bNoDisplayMode;};
+    bool isWNREnabled() {return m_bWNROn;};
+    bool isTNRSnapshotEnabled() {return m_bTNRSnapshotOn;};
+    int32_t getCDSMode() {return mCds_mode;};
+    bool isLTMForSeeMoreEnabled() {return m_bLtmForSeeMoreEnabled;};
+    bool isHfrMode() {return m_bHfrMode;};
+    void getHfrFps(cam_fps_range_t &pFpsRange) { pFpsRange = m_hfrFpsRange;};
+    uint8_t getNumOfSnapshots();
+    uint8_t getNumOfRetroSnapshots();
+    uint8_t getNumOfExtraHDRInBufsIfNeeded();
+    uint8_t getNumOfExtraHDROutBufsIfNeeded();
+
+    bool getRecordingHintValue() {return m_bRecordingHint;}; // return local copy of video hint
+    uint32_t getJpegQuality();
+    uint32_t getRotation();
+    uint32_t getDeviceRotation();
+    uint32_t getJpegExifRotation();
+    bool useJpegExifRotation();
+    int32_t getEffectValue();
+    bool isInstantAECEnabled() {return m_bInstantAEC;};
+    bool isInstantCaptureEnabled() {return m_bInstantCapture;};
+    uint8_t getAecFrameBoundValue() {return mAecFrameBound;};
+    uint8_t getAecSkipDisplayFrameBound() {return mAecSkipDisplayFrameBound;};
+
+    int32_t getExifDateTime(String8 &dateTime, String8 &subsecTime);
+    int32_t getExifFocalLength(rat_t *focalLenght);
+    uint16_t getExifIsoSpeed();
+    int32_t getExifGpsProcessingMethod(char *gpsProcessingMethod, uint32_t &count);
+    int32_t getExifLatitude(rat_t *latitude, char *latRef);
+    int32_t getExifLongitude(rat_t *longitude, char *lonRef);
+    int32_t getExifAltitude(rat_t *altitude, char *altRef);
+    int32_t getExifGpsDateTimeStamp(char *gpsDateStamp, uint32_t bufLen, rat_t *gpsTimeStamp);
+    bool isVideoBuffersCached();
+    int32_t updateFocusDistances(cam_focus_distances_info_t *focusDistances);
+
+    bool isAEBracketEnabled();
+    int32_t setAEBracketing();
+    bool isFpsDebugEnabled() {return m_bDebugFps;};
+    bool isHistogramEnabled() {return m_bHistogramEnabled;};
+    bool isSceneSelectionEnabled() {return m_bSceneSelection;};
+    int32_t setSelectedScene(cam_scene_mode_type scene);
+    cam_scene_mode_type getSelectedScene();
+    bool isFaceDetectionEnabled() {return ((m_nFaceProcMask &
+            (CAM_FACE_PROCESS_MASK_DETECTION | CAM_FACE_PROCESS_MASK_FOCUS)) != 0);};
+    int32_t setFaceDetectionOption(bool enabled);
+    int32_t setHistogram(bool enabled);
+    int32_t setFaceDetection(bool enabled, bool initCommit);
+    int32_t setFrameSkip(enum msm_vfe_frame_skip_pattern pattern);
+    qcamera_thermal_mode getThermalMode() {return m_ThermalMode;};
+    int32_t updateRecordingHintValue(int32_t value);
+    int32_t setHDRAEBracket(cam_exp_bracketing_t hdrBracket);
+    bool isHDREnabled();
+    bool isAutoHDREnabled();
+    int32_t stopAEBracket();
+    int32_t updateRAW(cam_dimension_t max_dim);
+    bool isDISEnabled();
+    cam_is_type_t getISType();
+    cam_is_type_t getPreviewISType();
+    uint8_t getMobicatMask();
+
+    cam_focus_mode_type getFocusMode() const {return mFocusMode;};
+    int32_t setNumOfSnapshot();
+    int32_t adjustPreviewFpsRange(cam_fps_range_t *fpsRange);
+    bool isJpegPictureFormat() {return (mPictureFormat == CAM_FORMAT_JPEG);};
+    bool isNV16PictureFormat() {return (mPictureFormat == CAM_FORMAT_YUV_422_NV16);};
+    bool isNV21PictureFormat() {return (mPictureFormat == CAM_FORMAT_YUV_420_NV21);};
+    cam_denoise_process_type_t getDenoiseProcessPlate(cam_intf_parm_type_t type);
+    int32_t getMaxPicSize(cam_dimension_t &dim) { dim = m_maxPicSize; return NO_ERROR; };
+    int getFlipMode(cam_stream_type_t streamType);
+    bool isSnapshotFDNeeded();
+
+    bool isHDR1xFrameEnabled() {return m_bHDR1xFrameEnabled;}
+    bool isSupportedSensorHdrSize(const QCameraParameters& params);
+    bool isYUVFrameInfoNeeded();
+    const char*getFrameFmtString(cam_format_t fmt);
+    bool isHDR1xExtraBufferNeeded() {return m_bHDR1xExtraBufferNeeded;}
+    bool isHDROutputCropEnabled() {return m_bHDROutputCropEnabled;}
+
+    bool isPreviewFlipChanged() { return m_bPreviewFlipChanged; };
+    bool isVideoFlipChanged() { return m_bVideoFlipChanged; };
+    bool isSnapshotFlipChanged() { return m_bSnapshotFlipChanged; };
+    void setHDRSceneEnable(bool bflag);
+    int32_t updateAWBParams(cam_awb_params_t &awb_params);
+
+    const char *getASDStateString(cam_auto_scene_t scene);
+    bool isHDRThumbnailProcessNeeded() { return m_bHDRThumbnailProcessNeeded; };
+    void setMinPpMask(cam_feature_mask_t min_pp_mask) { m_nMinRequiredPpMask = min_pp_mask; };
+    bool setStreamConfigure(bool isCapture, bool previewAsPostview, bool resetConfig);
+    int32_t addOnlineRotation(uint32_t rotation, uint32_t streamId, int32_t device_rotation);
+    uint8_t getNumOfExtraBuffersForImageProc();
+    uint8_t getNumOfExtraBuffersForVideo();
+    uint8_t getNumOfExtraBuffersForPreview();
+    uint32_t getExifBufIndex(uint32_t captureIndex);
+    bool needThumbnailReprocess(cam_feature_mask_t *pFeatureMask);
+    inline bool isUbiFocusEnabled() {return m_bAFBracketingOn && !m_bReFocusOn;};
+    inline bool isChromaFlashEnabled() {return m_bChromaFlashOn;};
+    inline bool isHighQualityNoiseReductionMode() {return m_bHighQualityNoiseReductionMode;};
+    inline bool isTruePortraitEnabled() {return m_bTruePortraitOn;};
+    inline size_t getTPMaxMetaSize() {
+        return m_pCapability->true_portrait_settings_need.meta_max_size;};
+    inline bool isSeeMoreEnabled() {return m_bSeeMoreOn;};
+    inline bool isStillMoreEnabled() {return m_bStillMoreOn;};
+    bool isOptiZoomEnabled();
+
+    int32_t commitAFBracket(cam_af_bracketing_t afBracket);
+    int32_t set3ALock(bool lock3A);
+    int32_t setAndCommitZoom(int zoom_level);
+    uint8_t getBurstCountForAdvancedCapture();
+    uint32_t getNumberInBufsForSingleShot();
+    uint32_t getNumberOutBufsForSingleShot();
+    int32_t setLongshotEnable(bool enable);
+    String8 dump();
+    inline bool isUbiRefocus() {return m_bReFocusOn &&
+            (m_pCapability->refocus_af_bracketing_need.output_count > 1);};
+    inline uint32_t getRefocusMaxMetaSize() {
+            return m_pCapability->refocus_af_bracketing_need.meta_max_size;};
+    inline uint8_t getRefocusOutputCount() {
+            return m_pCapability->refocus_af_bracketing_need.output_count;};
+    inline bool generateThumbFromMain() {return isUbiFocusEnabled() ||
+            isChromaFlashEnabled() || isOptiZoomEnabled() || isUbiRefocus()
+            || isHDREnabled() || isStillMoreEnabled() || isTruePortraitEnabled(); }
+    void updateCurrentFocusPosition(cam_focus_pos_info_t &cur_pos_info);
+    void updateAEInfo(cam_3a_params_t &ae_params);
+    bool isAdvCamFeaturesEnabled() {return isUbiFocusEnabled() ||
+            isChromaFlashEnabled() || m_bOptiZoomOn || isHDREnabled() ||
+            isAEBracketEnabled() || isStillMoreEnabled() || isUbiRefocus();}
+    int32_t setAecLock(const char *aecStr);
+    int32_t updateDebugLevel();
+    bool is4k2kVideoResolution();
+    bool isUBWCEnabled();
+
+    int getBrightness();
+    int32_t updateOisValue(bool oisValue);
+    int32_t setIntEvent(cam_int_evt_params_t params);
+    bool getofflineRAW() {return mOfflineRAW;}
+    int32_t updatePpFeatureMask(cam_stream_type_t stream_type);
+    int32_t getStreamPpMask(cam_stream_type_t stream_type, cam_feature_mask_t &pp_mask);
+    int32_t getSharpness() {return m_nSharpness;};
+    int32_t getEffect() {return mParmEffect;};
+    int32_t updateFlashMode(cam_flash_mode_t flash_mode);
+    int32_t configureAEBracketing(cam_capture_frame_config_t &frame_config);
+    int32_t configureHDRBracketing(cam_capture_frame_config_t &frame_config);
+    int32_t configFrameCapture(bool commitSettings);
+    int32_t resetFrameCapture(bool commitSettings);
+    cam_still_more_t getStillMoreSettings() {return m_stillmore_config;};
+    void setStillMoreSettings(cam_still_more_t stillmore_config)
+            {m_stillmore_config = stillmore_config;};
+    cam_still_more_t getStillMoreCapability()
+            {return m_pCapability->stillmore_settings_need;};
+    cam_dyn_img_data_t getDynamicImgData() { return m_DynamicImgData; }
+    void setDynamicImgData(cam_dyn_img_data_t d) { m_DynamicImgData = d; }
+
+    int32_t getParmZoomLevel(){return mParmZoomLevel;};
+    int8_t  getReprocCount(){return mTotalPPCount;};
+    bool isMultiPassReprocessing();
+    int8_t  getCurPPCount(){return mCurPPCount;};
+    void    setReprocCount();
+    bool    isPostProcScaling();
+    bool    isLLNoiseEnabled();
+    void    setCurPPCount(int8_t count) {mCurPPCount = count;};
+    int32_t setToneMapMode(uint32_t value, bool initCommit);
+    void setTintless(bool enable);
+    uint8_t getLongshotStages();
+    int8_t  getBufBatchCount() {return mBufBatchCnt;};
+    int8_t  getVideoBatchSize() {return mVideoBatchSize;};
+
+    int32_t setManualCaptureMode(
+            QCameraManualCaptureModes value = CAM_MANUAL_CAPTURE_TYPE_OFF);
+    QCameraManualCaptureModes getManualCaptureMode()
+            {return m_ManualCaptureMode;};
+    int64_t getExposureTime() {return m_expTime;};
+
+    cam_capture_frame_config_t getCaptureFrameConfig()
+            { return m_captureFrameConfig; };
+    void setJpegRotation(int rotation);
+    uint32_t getJpegRotation() { return mJpegRotation;};
+
+    void setLowLightLevel(cam_low_light_mode_t value)
+            { m_LowLightLevel = value; };
+    cam_low_light_mode_t getLowLightLevel() {return m_LowLightLevel;};
+    bool getLowLightCapture() { return m_LLCaptureEnabled; };
+
+    /* Dual camera specific */
+    bool getDcrf() { return m_bDcrfEnabled; }
+    int32_t setRelatedCamSyncInfo(
+            cam_sync_related_sensors_event_info_t* info);
+    const cam_sync_related_sensors_event_info_t*
+            getRelatedCamSyncInfo(void);
+    int32_t setFrameSyncEnabled(bool enable);
+    bool isFrameSyncEnabled(void);
+    int32_t getRelatedCamCalibration(
+            cam_related_system_calibration_data_t* calib);
+    int32_t bundleRelatedCameras(bool sync, uint32_t sessionid);
+    uint8_t fdModeInVideo();
+    bool isOEMFeatEnabled() { return m_bOEMFeatEnabled; }
+
+    int32_t setZslMode(bool value);
+    int32_t updateZSLModeValue(bool value);
+
+    bool isReprocScaleEnabled();
+    bool isUnderReprocScaling();
+    int32_t getPicSizeFromAPK(int &width, int &height);
+
+    int32_t checkFeatureConcurrency();
+    int32_t setInstantAEC(uint8_t enable, bool initCommit);
+
+    int32_t getAnalysisInfo(
+        bool fdVideoEnabled,
+        bool hal3,
+        cam_feature_mask_t featureMask,
+        cam_analysis_info_t *pAnalysisInfo);
+private:
+    int32_t setPreviewSize(const QCameraParameters& );
+    int32_t setVideoSize(const QCameraParameters& );
+    int32_t setPictureSize(const QCameraParameters& );
+    int32_t setLiveSnapshotSize(const QCameraParameters& );
+    int32_t setPreviewFormat(const QCameraParameters& );
+    int32_t setPictureFormat(const QCameraParameters& );
+    int32_t setOrientation(const QCameraParameters& );
+    int32_t setJpegThumbnailSize(const QCameraParameters& );
+    int32_t setJpegQuality(const QCameraParameters& );
+    int32_t setPreviewFpsRange(const QCameraParameters& );
+    int32_t setPreviewFrameRate(const QCameraParameters& );
+    int32_t setAutoExposure(const QCameraParameters& );
+    int32_t setEffect(const QCameraParameters& );
+    int32_t setBrightness(const QCameraParameters& );
+    int32_t setFocusMode(const QCameraParameters& );
+    int32_t setFocusPosition(const QCameraParameters& );
+    int32_t setSharpness(const QCameraParameters& );
+    int32_t setSaturation(const QCameraParameters& );
+    int32_t setContrast(const QCameraParameters& );
+    int32_t setSkinToneEnhancement(const QCameraParameters& );
+    int32_t setSceneDetect(const QCameraParameters& );
+    int32_t setVideoHDR(const QCameraParameters& );
+    int32_t setVtEnable(const QCameraParameters& );
+    int32_t setZoom(const QCameraParameters& );
+    int32_t setISOValue(const QCameraParameters& );
+    int32_t setContinuousISO(const QCameraParameters& );
+    int32_t setExposureTime(const QCameraParameters& );
+    int32_t setRotation(const QCameraParameters& );
+    int32_t setVideoRotation(const QCameraParameters& );
+    int32_t setFlash(const QCameraParameters& );
+    int32_t setAecLock(const QCameraParameters& );
+    int32_t setAwbLock(const QCameraParameters& );
+    int32_t setMCEValue(const QCameraParameters& );
+    int32_t setDISValue(const QCameraParameters& params);
+    int32_t setLensShadeValue(const QCameraParameters& );
+    int32_t setExposureCompensation(const QCameraParameters& );
+    int32_t setWhiteBalance(const QCameraParameters& );
+    int32_t setManualWhiteBalance(const QCameraParameters& );
+    int32_t setAntibanding(const QCameraParameters& );
+    int32_t setFocusAreas(const QCameraParameters& );
+    int32_t setMeteringAreas(const QCameraParameters& );
+    int32_t setSceneMode(const QCameraParameters& );
+    int32_t setSelectableZoneAf(const QCameraParameters& );
+    int32_t setAEBracket(const QCameraParameters& );
+    int32_t setAFBracket(const QCameraParameters& );
+    int32_t setReFocus(const QCameraParameters& );
+    int32_t setChromaFlash(const QCameraParameters& );
+    int32_t setOptiZoom(const QCameraParameters& );
+    int32_t setHDRMode(const QCameraParameters& );
+    int32_t setHDRNeed1x(const QCameraParameters& );
+    int32_t setTruePortrait(const QCameraParameters& );
+    int32_t setSeeMore(const QCameraParameters& );
+    int32_t setStillMore(const QCameraParameters& );
+    int32_t setNoiseReductionMode(const QCameraParameters& );
+    int32_t setRedeyeReduction(const QCameraParameters& );
+    int32_t setGpsLocation(const QCameraParameters& );
+    int32_t setRecordingHint(const QCameraParameters& );
+    int32_t setNoDisplayMode(const QCameraParameters& );
+    int32_t setWaveletDenoise(const QCameraParameters& );
+    int32_t setTemporalDenoise(const QCameraParameters&);
+    int32_t setZslMode(const QCameraParameters& );
+    int32_t setZslAttributes(const QCameraParameters& );
+    int32_t setAutoHDR(const QCameraParameters& params);
+    int32_t setCameraMode(const QCameraParameters& );
+    int32_t setSceneSelectionMode(const QCameraParameters& params);
+    int32_t setFaceRecognition(const QCameraParameters& );
+    int32_t setFlip(const QCameraParameters& );
+    int32_t setRetroActiveBurstNum(const QCameraParameters& params);
+    int32_t setBurstLEDOnPeriod(const QCameraParameters& params);
+    int32_t setSnapshotFDReq(const QCameraParameters& );
+    int32_t setStatsDebugMask();
+    int32_t setPAAF();
+    int32_t setTintlessValue(const QCameraParameters& params);
+    int32_t setCDSMode(const QCameraParameters& params);
+    int32_t setInitialExposureIndex(const QCameraParameters& params);
+    int32_t setInstantCapture(const QCameraParameters& params);
+    int32_t setInstantAEC(const QCameraParameters& params);
+    int32_t setMobicat(const QCameraParameters& params);
+    int32_t setRdiMode(const QCameraParameters& );
+    int32_t setSecureMode(const QCameraParameters& );
+    int32_t setCacheVideoBuffers(const QCameraParameters& params);
+    int32_t setCustomParams(const QCameraParameters& params);
+    int32_t setAutoExposure(const char *autoExp);
+    int32_t setPreviewFpsRange(int min_fps,int max_fps,
+            int vid_min_fps,int vid_max_fps);
+    int32_t setEffect(const char *effect);
+    int32_t setBrightness(int brightness);
+    int32_t setFocusMode(const char *focusMode);
+    int32_t setFocusPosition(const char *typeStr, const char *posStr);
+    int32_t setSharpness(int sharpness);
+    int32_t setSaturation(int saturation);
+    int32_t setContrast(int contrast);
+    int32_t setSkinToneEnhancement(int sceFactor);
+    int32_t setSceneDetect(const char *scendDetect);
+    int32_t setVideoHDR(const char *videoHDR);
+    int32_t setSensorSnapshotHDR(const char *snapshotHDR);
+    int32_t setVtEnable(const char *vtEnable);
+    int32_t setZoom(int zoom_level);
+    int32_t setISOValue(const char *isoValue);
+    int32_t setContinuousISO(const char *isoValue);
+    int32_t setExposureTime(const char *expTimeStr);
+    int32_t setFlash(const char *flashStr);
+    int32_t setAwbLock(const char *awbStr);
+    int32_t setMCEValue(const char *mceStr);
+    int32_t setDISValue(const char *disStr);
+    int32_t setHighFrameRate(const int32_t hfrMode);
+    int32_t setLensShadeValue(const char *lensShadeStr);
+    int32_t setExposureCompensation(int expComp);
+    int32_t setWhiteBalance(const char *wbStr);
+    int32_t setWBManualCCT(const char *cctStr);
+    int32_t setManualWBGains(const char *gainStr);
+    int32_t setAntibanding(const char *antiBandingStr);
+    int32_t setFocusAreas(const char *focusAreasStr);
+    int32_t setMeteringAreas(const char *meteringAreasStr);
+    int32_t setSceneMode(const char *sceneModeStr);
+    int32_t setSelectableZoneAf(const char *selZoneAFStr);
+    int32_t setAEBracket(const char *aecBracketStr);
+    int32_t setAFBracket(const char *afBracketStr);
+    int32_t setReFocus(const char *reFocusStr);
+    int32_t setChromaFlash(const char *chromaFlashStr);
+    int32_t setOptiZoom(const char *optiZoomStr);
+    int32_t setHDRMode(const char *optiZoomStr);
+    int32_t setHDRNeed1x(const char *optiZoomStr);
+    int32_t setTruePortrait(const char *truePortraitStr);
+    int32_t setSeeMore(const char *SeeMoreStr);
+    int32_t setStillMore(const char *StillMoreStr);
+    int32_t setNoiseReductionMode(const char *noiseReductionModeStr);
+    int32_t setRedeyeReduction(const char *redeyeStr);
+    int32_t setWaveletDenoise(const char *wnrStr);
+    int32_t setFaceRecognition(const char *faceRecog, uint32_t maxFaces);
+    int32_t setTintlessValue(const char *tintStr);
+    bool UpdateHFRFrameRate(const QCameraParameters& params);
+    int32_t setRdiMode(const char *str);
+    int32_t setSecureMode(const char *str);
+    int32_t setLongshotParam(const QCameraParameters& params);
+    int32_t parseGains(const char *gainStr, double &r_gain,
+            double &g_gain, double &b_gain);
+    int32_t setCacheVideoBuffers(const char *cacheVideoBufStr);
+    int32_t setCDSMode(int32_t cds_mode, bool initCommit);
+    int32_t setEztune();
+    void setLowLightCapture();
+    int setRecordingHintValue(int32_t value); // set local copy of video hint and send to server
+                                              // no change in parameters value
+    int32_t updateFlash(bool commitSettings);
+    int32_t setRawSize(cam_dimension_t &dim);
+    int32_t setMaxPicSize(cam_dimension_t &dim) { m_maxPicSize = dim; return NO_ERROR; };
+    void setBufBatchCount(int8_t buf_cnt);
+    void setVideoBatchSize();
+    void setDcrf();
+    int32_t setStreamPpMask(cam_stream_type_t stream_type, cam_feature_mask_t pp_mask);
+    void setOfflineRAW(bool value = 0);
+    int32_t configureFlash(cam_capture_frame_config_t &frame_config);
+    int32_t configureLowLight(cam_capture_frame_config_t &frame_config);
+    int32_t configureManualCapture(cam_capture_frame_config_t &frame_config);
+
+    bool isTNRPreviewEnabled() {return m_bTNRPreviewOn;};
+    bool isTNRVideoEnabled() {return m_bTNRVideoOn;};
+    bool getFaceDetectionOption() { return  m_bFaceDetectionOn;}
+    bool isAVTimerEnabled();
+    void getLiveSnapshotSize(cam_dimension_t &dim);
+    int32_t getRawSize(cam_dimension_t &dim) {dim = m_rawSize; return NO_ERROR;};
+    int getAutoFlickerMode();
+    bool sendStreamConfigInfo(cam_stream_size_info_t &stream_config_info);
+    inline bool isLowMemoryDevice() {return m_bIsLowMemoryDevice;};
+    bool isPreviewSeeMoreRequired();
+    bool isEztuneEnabled() { return m_bEztuneEnabled; };
+    int32_t getZoomLevel(){return mZoomLevel;};
+    int32_t parse_pair(const char *str, int *first, int *second,
+                       char delim, char **endptr);
+    void parseSizesList(const char *sizesStr, Vector<Size> &sizes);
+    int32_t parseNDimVector(const char *str, int *num, int N, char delim);
+    int32_t parseCameraAreaString(const char *str, int max_num_areas,
+                                  cam_area_t *pAreas, int& num_areas_found);
+    bool validateCameraAreas(cam_area_t *areas, int num_areas);
+    int parseGPSCoordinate(const char *coord_str, rat_t *coord);
+    int32_t getRational(rat_t *rat, int num, int denom);
+    String8 createSizesString(const cam_dimension_t *sizes, size_t len);
+    String8 createHfrValuesString(const cam_hfr_info_t *values, size_t len,
+            const QCameraMap<cam_hfr_mode_t> *map, size_t map_len);
+    String8 createHfrSizesString(const cam_hfr_info_t *values, size_t len);
+    String8 createFpsRangeString(const cam_fps_range_t *fps,
+            size_t len, int &default_fps_index);
+    String8 createFpsString(cam_fps_range_t &fps);
+    String8 createZoomRatioValuesString(uint32_t *zoomRatios, size_t length);
+    int32_t setDualLedCalibration(const QCameraParameters& params);
+    int32_t setAdvancedCaptureMode();
+
+    // ops for batch set/get params with server
+    int32_t initBatchUpdate(parm_buffer_t *p_table);
+    int32_t commitSetBatch();
+    int32_t commitGetBatch();
+
+    // ops to tempororily update parameter entries and commit
+    int32_t updateParamEntry(const char *key, const char *value);
+    int32_t commitParamChanges();
+    void updateViewAngles();
+
+    // Map from strings to values
+    static const cam_dimension_t THUMBNAIL_SIZES_MAP[];
+    static const QCameraMap<cam_auto_exposure_mode_type> AUTO_EXPOSURE_MAP[];
+    static const QCameraMap<cam_aec_convergence_type> INSTANT_CAPTURE_MODES_MAP[];
+    static const QCameraMap<cam_aec_convergence_type> INSTANT_AEC_MODES_MAP[];
+    static const QCameraMap<cam_format_t> PREVIEW_FORMATS_MAP[];
+    static const QCameraMap<cam_format_t> PICTURE_TYPES_MAP[];
+    static const QCameraMap<cam_focus_mode_type> FOCUS_MODES_MAP[];
+    static const QCameraMap<cam_effect_mode_type> EFFECT_MODES_MAP[];
+    static const QCameraMap<cam_scene_mode_type> SCENE_MODES_MAP[];
+    static const QCameraMap<cam_flash_mode_t> FLASH_MODES_MAP[];
+    static const QCameraMap<cam_focus_algorithm_type> FOCUS_ALGO_MAP[];
+    static const QCameraMap<cam_wb_mode_type> WHITE_BALANCE_MODES_MAP[];
+    static const QCameraMap<cam_antibanding_mode_type> ANTIBANDING_MODES_MAP[];
+    static const QCameraMap<cam_iso_mode_type> ISO_MODES_MAP[];
+    static const QCameraMap<cam_hfr_mode_t> HFR_MODES_MAP[];
+    static const QCameraMap<cam_bracket_mode> BRACKETING_MODES_MAP[];
+    static const QCameraMap<int> ON_OFF_MODES_MAP[];
+    static const QCameraMap<int> ENABLE_DISABLE_MODES_MAP[];
+    static const QCameraMap<int> DENOISE_ON_OFF_MODES_MAP[];
+    static const QCameraMap<int> TRUE_FALSE_MODES_MAP[];
+    static const QCameraMap<int> TOUCH_AF_AEC_MODES_MAP[];
+    static const QCameraMap<cam_flip_t> FLIP_MODES_MAP[];
+    static const QCameraMap<int> AF_BRACKETING_MODES_MAP[];
+    static const QCameraMap<int> RE_FOCUS_MODES_MAP[];
+    static const QCameraMap<int> CHROMA_FLASH_MODES_MAP[];
+    static const QCameraMap<int> OPTI_ZOOM_MODES_MAP[];
+    static const QCameraMap<int> TRUE_PORTRAIT_MODES_MAP[];
+    static const QCameraMap<cam_cds_mode_type_t> CDS_MODES_MAP[];
+    static const QCameraMap<int> HDR_MODES_MAP[];
+    static const QCameraMap<int> VIDEO_ROTATION_MODES_MAP[];
+    static const QCameraMap<int> SEE_MORE_MODES_MAP[];
+    static const QCameraMap<int> STILL_MORE_MODES_MAP[];
+    static const QCameraMap<int> NOISE_REDUCTION_MODES_MAP[];
+
+    QCameraReprocScaleParam m_reprocScaleParam;
+    QCameraCommon           mCommon;
+
+    cam_capability_t *m_pCapability;
+    mm_camera_vtbl_t *m_pCamOpsTbl;
+    QCameraHeapMemory *m_pParamHeap;
+    parm_buffer_t     *m_pParamBuf;  // ptr to param buf in m_pParamHeap
+    /* heap for mapping dual cam event info */
+    QCameraHeapMemory *m_pRelCamSyncHeap;
+    /* ptr to sync buffer in m_pRelCamSyncHeap */
+    cam_sync_related_sensors_event_info_t *m_pRelCamSyncBuf;
+    cam_sync_related_sensors_event_info_t m_relCamSyncInfo;
+    bool m_bFrameSyncEnabled;
+    cam_is_type_t mIsType;
+    cam_is_type_t mIsTypePreview;
+
+    bool m_bZslMode;                // if ZSL is enabled
+    bool m_bZslMode_new;
+    bool m_bForceZslMode;
+    bool m_bRecordingHint;          // local copy of recording hint
+    bool m_bRecordingHint_new;
+    bool m_bHistogramEnabled;       // if histogram is enabled
+    bool m_bLongshotEnabled;       // if longshot is enabled
+    uint32_t m_nFaceProcMask;       // face process mask
+    bool m_bFaceDetectionOn;        //  if face Detection turned on by user
+    bool m_bDebugFps;               // if FPS need to be logged
+    cam_focus_mode_type mFocusMode;
+    cam_format_t mPreviewFormat;
+    cam_format_t mAppPreviewFormat;
+    int32_t mPictureFormat;         // could be CAMERA_PICTURE_TYPE_JPEG or cam_format_t
+    bool m_bNeedRestart;            // if preview needs restart after parameters updated
+    bool m_bNoDisplayMode;
+    bool m_bWNROn;
+    bool m_bTNRPreviewOn;
+    bool m_bTNRVideoOn;
+    bool m_bTNRSnapshotOn;
+    bool m_bInited;
+    int m_nRetroBurstNum;
+    int m_nBurstLEDOnPeriod;
+    cam_exp_bracketing_t m_AEBracketingClient;
+    bool m_bUpdateEffects;          // Cause reapplying of effects
+    bool m_bSceneTransitionAuto;    // Indicate that scene has changed to Auto
+    bool m_bPreviewFlipChanged;        // if flip setting for preview changed
+    bool m_bVideoFlipChanged;          // if flip setting for video changed
+    bool m_bSnapshotFlipChanged;       // if flip setting for snapshot changed
+    bool m_bFixedFrameRateSet;      // Indicates that a fixed frame rate is set
+    qcamera_thermal_mode m_ThermalMode; // adjust fps vs adjust frameskip
+    cam_dimension_t m_LiveSnapshotSize; // live snapshot size
+    cam_dimension_t m_rawSize; // live snapshot size
+    cam_dimension_t m_maxPicSize;
+    bool m_bHDREnabled;             // if HDR is enabled
+    bool m_bLocalHDREnabled;   // This flag tells whether HDR enabled or not regarless of APP mode
+    bool m_bAVTimerEnabled;    //if AVTimer is enabled
+    bool m_bDISEnabled;
+    bool m_bOISEnabled;
+    cam_still_more_t m_stillmore_config;
+
+    uint8_t m_MobiMask;
+    QCameraAdjustFPS *m_AdjustFPS;
+    bool m_bHDR1xFrameEnabled;          // if frame with exposure compensation 0 during HDR is enabled
+    bool m_HDRSceneEnabled; // Auto HDR indication
+    bool m_bHDRThumbnailProcessNeeded;        // if thumbnail need to be processed for HDR
+    bool m_bHDR1xExtraBufferNeeded;     // if extra frame with exposure compensation 0 during HDR is needed
+    bool m_bHDROutputCropEnabled;     // if HDR output frame need to be scaled to user resolution
+    DefaultKeyedVector<String8,String8> m_tempMap; // map for temororily store parameters to be set
+    cam_fps_range_t m_default_fps_range;
+    bool m_bAFBracketingOn;
+    bool m_bReFocusOn;
+    bool m_bChromaFlashOn;
+    bool m_bOptiZoomOn;
+    bool m_bSceneSelection;
+    Mutex m_SceneSelectLock;
+    cam_scene_mode_type m_SelectedScene;
+    bool m_bSeeMoreOn;
+    bool m_bStillMoreOn;
+    bool m_bHighQualityNoiseReductionMode;
+    cam_fps_range_t m_hfrFpsRange;
+    bool m_bHfrMode;
+    bool m_bSensorHDREnabled;             // if HDR is enabled
+    bool m_bRdiMode;                // if RDI mode
+    bool m_bSecureMode;
+    bool m_bAeBracketingEnabled;
+    int32_t mFlashValue;
+    int32_t mFlashDaemonValue;
+    int32_t mHfrMode;
+    bool m_bHDRModeSensor;
+    bool mOfflineRAW;
+    bool m_bTruePortraitOn;
+    cam_feature_mask_t m_nMinRequiredPpMask;
+    cam_feature_mask_t mStreamPpMask[CAM_STREAM_TYPE_MAX];
+    int32_t m_nSharpness;
+    int8_t mTotalPPCount;
+    int8_t mCurPPCount;
+    int32_t mZoomLevel;
+    int32_t mParmZoomLevel;
+    bool m_bIsLowMemoryDevice;
+    int32_t mCds_mode;
+    int32_t mParmEffect;
+    cam_capture_frame_config_t m_captureFrameConfig;
+    int8_t mBufBatchCnt;
+    bool m_bEztuneEnabled;
+    bool m_bDcrfEnabled;
+    uint32_t mRotation;
+    uint32_t mJpegRotation;
+    int8_t mVideoBatchSize;
+    bool m_LLCaptureEnabled;
+    cam_low_light_mode_t m_LowLightLevel;
+    bool m_bLtmForSeeMoreEnabled;
+    int64_t m_expTime;
+    bool m_bOEMFeatEnabled;
+    int32_t m_isoValue;
+    QCameraManualCaptureModes m_ManualCaptureMode;
+    cam_dyn_img_data_t m_DynamicImgData;
+    int32_t m_dualLedCalibration;
+    // Param to trigger instant AEC.
+    bool m_bInstantAEC;
+    // Param to trigger instant capture.
+    bool m_bInstantCapture;
+    // Number of frames, camera interface will wait for getting the instant capture frame.
+    uint8_t mAecFrameBound;
+    // Number of preview frames, that HAL will hold without displaying, for instant AEC mode.
+    uint8_t mAecSkipDisplayFrameBound;
+};
+
+}; // namespace qcamera
+
+#endif
diff --git a/msmcobalt/QCamera2/HAL/QCameraParametersIntf.cpp b/msmcobalt/QCamera2/HAL/QCameraParametersIntf.cpp
new file mode 100644
index 0000000..926d5c4
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/QCameraParametersIntf.cpp
@@ -0,0 +1,1407 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#define LOG_TAG "QCameraParametersIntf"
+
+// System dependencies
+#include <utils/Mutex.h>
+
+// Camera dependencies
+#include "QCameraParameters.h"
+#include "QCameraParametersIntf.h"
+#include "QCameraTrace.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+namespace qcamera {
+
+#define CHECK_PARAM_INTF(impl) LOG_ALWAYS_FATAL_IF(((impl) == NULL), "impl is NULL!")
+
+QCameraParametersIntf::QCameraParametersIntf() :
+        mImpl(NULL)
+{
+}
+
+QCameraParametersIntf::~QCameraParametersIntf()
+{
+    {
+        Mutex::Autolock lock(mLock);
+        if (mImpl) {
+            delete mImpl;
+            mImpl = NULL;
+        }
+    }
+}
+
+
+int32_t QCameraParametersIntf::allocate()
+{
+    Mutex::Autolock lock(mLock);
+    mImpl = new QCameraParameters();
+    if (!mImpl) {
+        LOGE("Out of memory");
+        return NO_MEMORY;
+    }
+
+    return mImpl->allocate();
+}
+
+int32_t QCameraParametersIntf::init(cam_capability_t *capabilities,
+                                mm_camera_vtbl_t *mmOps,
+                                QCameraAdjustFPS *adjustFPS)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->init(capabilities, mmOps, adjustFPS);
+}
+
+void QCameraParametersIntf::deinit()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    mImpl->deinit();
+}
+
+int32_t QCameraParametersIntf::updateParameters(const String8& params, bool &needRestart)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->updateParameters(params, needRestart);
+}
+
+int32_t QCameraParametersIntf::commitParameters()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->commitParameters();
+}
+
+char* QCameraParametersIntf::QCameraParametersIntf::getParameters()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getParameters();
+}
+
+void QCameraParametersIntf::getPreviewFpsRange(int *min_fps, int *max_fps) const
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    mImpl->getPreviewFpsRange(min_fps, max_fps);
+}
+
+#ifdef TARGET_TS_MAKEUP
+bool QCameraParametersIntf::getTsMakeupInfo(int &whiteLevel, int &cleanLevel) const
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getTsMakeupInfo(whiteLevel, cleanLevel);
+}
+#endif
+
+int QCameraParametersIntf::getPreviewHalPixelFormat()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getPreviewHalPixelFormat();
+}
+
+int32_t QCameraParametersIntf::getStreamRotation(cam_stream_type_t streamType,
+                                            cam_pp_feature_config_t &featureConfig,
+                                            cam_dimension_t &dim)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getStreamRotation(streamType, featureConfig, dim);
+
+}
+
+int32_t QCameraParametersIntf::getStreamFormat(cam_stream_type_t streamType,
+                                            cam_format_t &format)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getStreamFormat(streamType, format);
+}
+
+int32_t QCameraParametersIntf::getStreamDimension(cam_stream_type_t streamType,
+                                               cam_dimension_t &dim)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getStreamDimension(streamType, dim);
+}
+
+void QCameraParametersIntf::getThumbnailSize(int *width, int *height) const
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    mImpl->getThumbnailSize(width, height);
+}
+
+uint8_t QCameraParametersIntf::getZSLBurstInterval()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getZSLBurstInterval();
+}
+
+uint8_t QCameraParametersIntf::getZSLQueueDepth()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getZSLQueueDepth();
+}
+
+uint8_t QCameraParametersIntf::getZSLBackLookCount()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getZSLBackLookCount();
+}
+
+uint8_t QCameraParametersIntf::getMaxUnmatchedFramesInQueue()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getMaxUnmatchedFramesInQueue();
+}
+
+bool QCameraParametersIntf::isZSLMode()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isZSLMode();
+}
+
+bool QCameraParametersIntf::isRdiMode()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isRdiMode();
+}
+
+bool QCameraParametersIntf::isSecureMode()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isSecureMode();
+}
+
+bool QCameraParametersIntf::isNoDisplayMode()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isNoDisplayMode();
+}
+
+bool QCameraParametersIntf::isWNREnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isWNREnabled();
+}
+
+bool QCameraParametersIntf::isTNRSnapshotEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isTNRSnapshotEnabled();
+}
+
+int32_t QCameraParametersIntf::getCDSMode()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getCDSMode();
+}
+
+bool QCameraParametersIntf::isLTMForSeeMoreEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isLTMForSeeMoreEnabled();
+}
+
+bool QCameraParametersIntf::isHfrMode()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isHfrMode();
+}
+
+void QCameraParametersIntf::getHfrFps(cam_fps_range_t &pFpsRange)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    mImpl->getHfrFps(pFpsRange);
+}
+
+uint8_t QCameraParametersIntf::getNumOfSnapshots()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getNumOfSnapshots();
+}
+
+uint8_t QCameraParametersIntf::getNumOfRetroSnapshots()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getNumOfRetroSnapshots();
+}
+
+uint8_t QCameraParametersIntf::getNumOfExtraHDRInBufsIfNeeded()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getNumOfExtraHDRInBufsIfNeeded();
+}
+
+uint8_t QCameraParametersIntf::getNumOfExtraHDROutBufsIfNeeded()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getNumOfExtraHDROutBufsIfNeeded();
+}
+
+bool QCameraParametersIntf::getRecordingHintValue()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getRecordingHintValue();
+}
+
+uint32_t QCameraParametersIntf::getJpegQuality()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getJpegQuality();
+}
+
+uint32_t QCameraParametersIntf::getRotation()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getRotation();
+}
+
+uint32_t QCameraParametersIntf::getDeviceRotation()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getDeviceRotation();
+}
+
+uint32_t QCameraParametersIntf::getJpegExifRotation()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getJpegExifRotation();
+}
+
+bool QCameraParametersIntf::useJpegExifRotation()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->useJpegExifRotation();
+}
+
+int32_t QCameraParametersIntf::getEffectValue()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getEffectValue();
+}
+
+bool QCameraParametersIntf::isInstantAECEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isInstantAECEnabled();
+}
+
+bool QCameraParametersIntf::isInstantCaptureEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isInstantCaptureEnabled();
+}
+
+uint8_t QCameraParametersIntf::getAecFrameBoundValue()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getAecFrameBoundValue();
+}
+
+uint8_t QCameraParametersIntf::getAecSkipDisplayFrameBound()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getAecSkipDisplayFrameBound();
+}
+
+int32_t QCameraParametersIntf::getExifDateTime(
+        String8 &dateTime, String8 &subsecTime)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getExifDateTime(dateTime, subsecTime);
+}
+
+int32_t QCameraParametersIntf::getExifFocalLength(rat_t *focalLength)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getExifFocalLength(focalLength);
+}
+
+uint16_t QCameraParametersIntf::getExifIsoSpeed()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getExifIsoSpeed();
+}
+
+int32_t QCameraParametersIntf::getExifGpsProcessingMethod(char *gpsProcessingMethod, uint32_t &count)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getExifGpsProcessingMethod(gpsProcessingMethod, count);
+}
+
+int32_t QCameraParametersIntf::getExifLatitude(rat_t *latitude, char *latRef)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getExifLatitude(latitude, latRef);
+}
+
+int32_t QCameraParametersIntf::getExifLongitude(rat_t *longitude, char *lonRef)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getExifLongitude(longitude, lonRef);
+}
+
+int32_t QCameraParametersIntf::getExifAltitude(rat_t *altitude, char *altRef)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getExifAltitude(altitude, altRef);
+}
+
+int32_t QCameraParametersIntf::getExifGpsDateTimeStamp(char *gpsDateStamp, uint32_t bufLen, rat_t *gpsTimeStamp)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getExifGpsDateTimeStamp(gpsDateStamp, bufLen, gpsTimeStamp);
+}
+
+bool QCameraParametersIntf::isVideoBuffersCached()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isVideoBuffersCached();
+}
+
+int32_t QCameraParametersIntf::updateFocusDistances(cam_focus_distances_info_t *focusDistances)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->updateFocusDistances(focusDistances);
+}
+
+bool QCameraParametersIntf::isAEBracketEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isAEBracketEnabled();
+}
+
+int32_t QCameraParametersIntf::setAEBracketing()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->setAEBracketing();
+}
+
+bool QCameraParametersIntf::isFpsDebugEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isFpsDebugEnabled();
+}
+
+bool QCameraParametersIntf::isHistogramEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isHistogramEnabled();
+}
+
+bool QCameraParametersIntf::isSceneSelectionEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isSceneSelectionEnabled();
+}
+
+int32_t QCameraParametersIntf::setSelectedScene(cam_scene_mode_type scene)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->setSelectedScene(scene);
+}
+
+cam_scene_mode_type QCameraParametersIntf::getSelectedScene()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getSelectedScene();
+}
+
+bool QCameraParametersIntf::isFaceDetectionEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isFaceDetectionEnabled();
+}
+
+int32_t QCameraParametersIntf::setFaceDetectionOption(bool enabled)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->setFaceDetectionOption(enabled);
+}
+
+int32_t QCameraParametersIntf::setHistogram(bool enabled)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->setHistogram(enabled);
+}
+
+int32_t QCameraParametersIntf::setFaceDetection(bool enabled, bool initCommit)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->setFaceDetection(enabled, initCommit);
+}
+
+int32_t QCameraParametersIntf::setFrameSkip(enum msm_vfe_frame_skip_pattern pattern)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->setFrameSkip(pattern);
+}
+
+qcamera_thermal_mode QCameraParametersIntf::getThermalMode()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getThermalMode();
+}
+
+int32_t QCameraParametersIntf::updateRecordingHintValue(int32_t value)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->updateRecordingHintValue(value);
+}
+
+int32_t QCameraParametersIntf::setHDRAEBracket(cam_exp_bracketing_t hdrBracket)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->setHDRAEBracket(hdrBracket);
+}
+
+bool QCameraParametersIntf::isHDREnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isHDREnabled();
+}
+
+bool QCameraParametersIntf::isAutoHDREnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isAutoHDREnabled();
+}
+
+int32_t QCameraParametersIntf::stopAEBracket()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->stopAEBracket();
+}
+
+int32_t QCameraParametersIntf::updateRAW(cam_dimension_t max_dim)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->updateRAW(max_dim);
+}
+
+bool QCameraParametersIntf::isDISEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isDISEnabled();
+}
+
+cam_is_type_t QCameraParametersIntf::getISType()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getISType();
+}
+
+cam_is_type_t QCameraParametersIntf::getPreviewISType()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getPreviewISType();
+}
+
+uint8_t QCameraParametersIntf::getMobicatMask()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getMobicatMask();
+}
+
+cam_focus_mode_type QCameraParametersIntf::getFocusMode() const
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getFocusMode();
+}
+
+int32_t QCameraParametersIntf::setNumOfSnapshot()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->setNumOfSnapshot();
+}
+
+int32_t QCameraParametersIntf::adjustPreviewFpsRange(cam_fps_range_t *fpsRange)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->adjustPreviewFpsRange(fpsRange);
+}
+
+bool QCameraParametersIntf::isJpegPictureFormat()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isJpegPictureFormat();
+}
+
+bool QCameraParametersIntf::isNV16PictureFormat()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isNV16PictureFormat();
+}
+
+bool QCameraParametersIntf::isNV21PictureFormat()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isNV21PictureFormat();
+}
+
+cam_denoise_process_type_t QCameraParametersIntf::getDenoiseProcessPlate(
+        cam_intf_parm_type_t type)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getDenoiseProcessPlate(type);
+}
+
+int32_t QCameraParametersIntf::getMaxPicSize(cam_dimension_t &dim)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getMaxPicSize(dim);
+}
+
+int QCameraParametersIntf::getFlipMode(cam_stream_type_t streamType)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getFlipMode(streamType);
+}
+
+bool QCameraParametersIntf::isSnapshotFDNeeded()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isSnapshotFDNeeded();
+}
+
+bool QCameraParametersIntf::isHDR1xFrameEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isHDR1xFrameEnabled();
+}
+
+bool QCameraParametersIntf::isYUVFrameInfoNeeded()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isYUVFrameInfoNeeded();
+}
+
+const char* QCameraParametersIntf::getFrameFmtString(cam_format_t fmt)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getFrameFmtString(fmt);
+}
+
+bool QCameraParametersIntf::isHDR1xExtraBufferNeeded()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isHDR1xExtraBufferNeeded();
+}
+
+bool QCameraParametersIntf::isHDROutputCropEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isHDROutputCropEnabled();
+}
+
+bool QCameraParametersIntf::isPreviewFlipChanged()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isPreviewFlipChanged();
+}
+
+bool QCameraParametersIntf::isVideoFlipChanged()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isVideoFlipChanged();
+}
+
+bool QCameraParametersIntf::isSnapshotFlipChanged()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isSnapshotFlipChanged();
+}
+
+void QCameraParametersIntf::setHDRSceneEnable(bool bflag)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    mImpl->setHDRSceneEnable(bflag);
+}
+
+int32_t QCameraParametersIntf::updateAWBParams(cam_awb_params_t &awb_params)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->updateAWBParams(awb_params);
+}
+
+const char * QCameraParametersIntf::getASDStateString(cam_auto_scene_t scene)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getASDStateString(scene);
+}
+
+bool QCameraParametersIntf::isHDRThumbnailProcessNeeded()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isHDRThumbnailProcessNeeded();
+}
+
+void QCameraParametersIntf::setMinPpMask(cam_feature_mask_t min_pp_mask)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    mImpl->setMinPpMask(min_pp_mask);
+}
+
+bool QCameraParametersIntf::setStreamConfigure(bool isCapture,
+        bool previewAsPostview, bool resetConfig)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->setStreamConfigure(isCapture,
+            previewAsPostview, resetConfig);
+}
+
+int32_t QCameraParametersIntf::addOnlineRotation(uint32_t rotation,
+        uint32_t streamId, int32_t device_rotation)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->addOnlineRotation(rotation, streamId, device_rotation);
+}
+
+uint8_t QCameraParametersIntf::getNumOfExtraBuffersForImageProc()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getNumOfExtraBuffersForImageProc();
+}
+
+uint8_t QCameraParametersIntf::getNumOfExtraBuffersForVideo()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getNumOfExtraBuffersForVideo();
+}
+
+uint8_t QCameraParametersIntf::getNumOfExtraBuffersForPreview()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getNumOfExtraBuffersForPreview();
+}
+
+uint32_t QCameraParametersIntf::getExifBufIndex(uint32_t captureIndex)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getExifBufIndex(captureIndex);
+}
+
+bool QCameraParametersIntf::needThumbnailReprocess(cam_feature_mask_t *pFeatureMask)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->needThumbnailReprocess(pFeatureMask);
+}
+
+bool QCameraParametersIntf::isUbiFocusEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isUbiFocusEnabled();
+}
+
+bool QCameraParametersIntf::isChromaFlashEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isChromaFlashEnabled();
+}
+
+bool QCameraParametersIntf::isHighQualityNoiseReductionMode()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isHighQualityNoiseReductionMode();
+}
+
+bool QCameraParametersIntf::isTruePortraitEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isTruePortraitEnabled();
+}
+
+size_t QCameraParametersIntf::getTPMaxMetaSize()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getTPMaxMetaSize();
+}
+
+bool QCameraParametersIntf::isSeeMoreEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isSeeMoreEnabled();
+}
+
+bool QCameraParametersIntf::isStillMoreEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isStillMoreEnabled();
+}
+
+bool QCameraParametersIntf::isOptiZoomEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isOptiZoomEnabled();
+}
+
+int32_t QCameraParametersIntf::commitAFBracket(cam_af_bracketing_t afBracket)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->commitAFBracket(afBracket);
+}
+
+
+int32_t QCameraParametersIntf::set3ALock(bool lock3A)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->set3ALock(lock3A);
+}
+
+int32_t QCameraParametersIntf::setAndCommitZoom(int zoom_level)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->setAndCommitZoom(zoom_level);
+}
+uint8_t QCameraParametersIntf::getBurstCountForAdvancedCapture()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getBurstCountForAdvancedCapture();
+}
+uint32_t QCameraParametersIntf::getNumberInBufsForSingleShot()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getNumberInBufsForSingleShot();
+}
+uint32_t QCameraParametersIntf::getNumberOutBufsForSingleShot()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getNumberOutBufsForSingleShot();
+}
+int32_t QCameraParametersIntf::setLongshotEnable(bool enable)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->setLongshotEnable(enable);
+}
+String8 QCameraParametersIntf::dump()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->dump();
+}
+bool QCameraParametersIntf::isUbiRefocus()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isUbiRefocus();
+}
+uint32_t QCameraParametersIntf::getRefocusMaxMetaSize()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getRefocusMaxMetaSize();
+}
+uint8_t QCameraParametersIntf::getRefocusOutputCount()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getRefocusOutputCount();
+}
+
+bool QCameraParametersIntf::generateThumbFromMain()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->generateThumbFromMain();
+}
+
+void QCameraParametersIntf::updateCurrentFocusPosition(cam_focus_pos_info_t &cur_pos_info)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    mImpl->updateCurrentFocusPosition(cur_pos_info);
+}
+
+void QCameraParametersIntf::updateAEInfo(cam_3a_params_t &ae_params)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    mImpl->updateAEInfo(ae_params);
+}
+
+bool QCameraParametersIntf::isAdvCamFeaturesEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isAdvCamFeaturesEnabled();
+}
+
+int32_t QCameraParametersIntf::setAecLock(const char *aecStr)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->setAecLock(aecStr);
+}
+
+int32_t QCameraParametersIntf::updateDebugLevel()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->updateDebugLevel();
+}
+
+bool QCameraParametersIntf::is4k2kVideoResolution()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->is4k2kVideoResolution();
+}
+
+bool QCameraParametersIntf::isUBWCEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isUBWCEnabled();
+}
+int QCameraParametersIntf::getBrightness()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getBrightness();
+}
+
+int32_t QCameraParametersIntf::updateOisValue(bool oisValue)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->updateOisValue(oisValue);
+}
+
+int32_t QCameraParametersIntf::setIntEvent(cam_int_evt_params_t params)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->setIntEvent(params);
+}
+
+bool QCameraParametersIntf::getofflineRAW()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getofflineRAW();
+}
+
+int32_t QCameraParametersIntf::updatePpFeatureMask(cam_stream_type_t stream_type)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->updatePpFeatureMask(stream_type);
+}
+
+int32_t QCameraParametersIntf::getStreamPpMask(cam_stream_type_t stream_type,
+        cam_feature_mask_t &pp_mask)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getStreamPpMask(stream_type, pp_mask);
+}
+
+int32_t QCameraParametersIntf::getSharpness()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getSharpness();
+}
+
+int32_t QCameraParametersIntf::getEffect()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getEffect();
+}
+
+int32_t QCameraParametersIntf::updateFlashMode(cam_flash_mode_t flash_mode)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->updateFlashMode(flash_mode);
+}
+
+int32_t QCameraParametersIntf::configureAEBracketing(cam_capture_frame_config_t &frame_config)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->configureAEBracketing(frame_config);
+}
+
+int32_t QCameraParametersIntf::configureHDRBracketing(cam_capture_frame_config_t &frame_config)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->configureHDRBracketing(frame_config);
+}
+
+int32_t QCameraParametersIntf::configFrameCapture(bool commitSettings)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->configFrameCapture(commitSettings);
+}
+
+int32_t QCameraParametersIntf::resetFrameCapture(bool commitSettings)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->resetFrameCapture(commitSettings);
+}
+
+cam_still_more_t QCameraParametersIntf::getStillMoreSettings()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getStillMoreSettings();
+}
+
+void QCameraParametersIntf::setStillMoreSettings(cam_still_more_t stillmore_config)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    mImpl->setStillMoreSettings(stillmore_config);
+}
+
+cam_still_more_t QCameraParametersIntf::getStillMoreCapability()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getStillMoreCapability();
+}
+
+cam_dyn_img_data_t QCameraParametersIntf::getDynamicImgData()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getDynamicImgData();
+}
+
+void QCameraParametersIntf::setDynamicImgData(cam_dyn_img_data_t d)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    mImpl->setDynamicImgData(d);
+}
+
+int32_t QCameraParametersIntf::getParmZoomLevel()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getParmZoomLevel();
+}
+
+
+int8_t QCameraParametersIntf::getReprocCount()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getReprocCount();
+}
+
+
+int8_t QCameraParametersIntf::getCurPPCount()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getCurPPCount();
+}
+
+
+void QCameraParametersIntf::setReprocCount()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    mImpl->setReprocCount();
+}
+
+
+bool QCameraParametersIntf::isPostProcScaling()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isPostProcScaling();
+}
+
+
+bool QCameraParametersIntf::isLLNoiseEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isLLNoiseEnabled();
+}
+
+
+void QCameraParametersIntf::setCurPPCount(int8_t count)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    mImpl->setCurPPCount(count);
+}
+
+int32_t QCameraParametersIntf::setToneMapMode(uint32_t value, bool initCommit)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->setToneMapMode(value, initCommit);
+}
+
+void QCameraParametersIntf::setTintless(bool enable)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    mImpl->setTintless(enable);
+}
+
+uint8_t QCameraParametersIntf::getLongshotStages()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getLongshotStages();
+}
+
+int8_t  QCameraParametersIntf::getBufBatchCount()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getBufBatchCount();
+}
+
+int8_t  QCameraParametersIntf::getVideoBatchSize()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getVideoBatchSize();
+}
+
+int32_t QCameraParametersIntf::setManualCaptureMode(
+        QCameraManualCaptureModes value)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->setManualCaptureMode(value);
+}
+
+QCameraManualCaptureModes QCameraParametersIntf::getManualCaptureMode()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getManualCaptureMode();
+}
+
+int64_t QCameraParametersIntf::getExposureTime()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getExposureTime();
+}
+
+cam_capture_frame_config_t QCameraParametersIntf::getCaptureFrameConfig()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getCaptureFrameConfig();
+}
+
+void QCameraParametersIntf::setJpegRotation(int rotation)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    mImpl->setJpegRotation(rotation);
+}
+
+uint32_t QCameraParametersIntf::getJpegRotation()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getJpegRotation();
+}
+
+void QCameraParametersIntf::setLowLightLevel(cam_low_light_mode_t value)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    mImpl->setLowLightLevel(value);
+}
+
+cam_low_light_mode_t QCameraParametersIntf::getLowLightLevel()
+{
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getLowLightLevel();
+}
+
+bool QCameraParametersIntf::getLowLightCapture()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getLowLightCapture();
+}
+
+bool QCameraParametersIntf::getDcrf()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getDcrf();
+}
+
+int32_t QCameraParametersIntf::setRelatedCamSyncInfo(
+	cam_sync_related_sensors_event_info_t* info)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->setRelatedCamSyncInfo(info);
+}
+
+const cam_sync_related_sensors_event_info_t*
+	QCameraParametersIntf::getRelatedCamSyncInfo(void)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getRelatedCamSyncInfo();
+}
+
+int32_t QCameraParametersIntf::setFrameSyncEnabled(
+	bool enable)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->setFrameSyncEnabled(enable);
+}
+
+bool QCameraParametersIntf::isFrameSyncEnabled(void)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isFrameSyncEnabled();
+}
+
+int32_t QCameraParametersIntf::getRelatedCamCalibration(
+	cam_related_system_calibration_data_t* calib)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getRelatedCamCalibration(calib);
+}
+
+int32_t QCameraParametersIntf::bundleRelatedCameras(bool sync, uint32_t sessionid)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->bundleRelatedCameras(sync, sessionid);
+}
+
+uint8_t QCameraParametersIntf::fdModeInVideo()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->fdModeInVideo();
+}
+
+bool QCameraParametersIntf::isOEMFeatEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isOEMFeatEnabled();
+}
+
+int32_t QCameraParametersIntf::setZslMode(bool value)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->setZslMode(value);
+}
+
+int32_t QCameraParametersIntf::updateZSLModeValue(bool value)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->updateZSLModeValue(value);
+}
+
+bool QCameraParametersIntf::isReprocScaleEnabled()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isReprocScaleEnabled();
+}
+
+bool QCameraParametersIntf::isUnderReprocScaling()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->isUnderReprocScaling();
+}
+
+int32_t QCameraParametersIntf::getPicSizeFromAPK(int &width, int &height)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getPicSizeFromAPK(width, height);
+}
+
+int32_t QCameraParametersIntf::checkFeatureConcurrency()
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->checkFeatureConcurrency();
+}
+
+int32_t QCameraParametersIntf::setInstantAEC(uint8_t enable, bool initCommit)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->setInstantAEC(enable, initCommit);
+}
+
+int32_t QCameraParametersIntf::getAnalysisInfo(
+        bool fdVideoEnabled,
+        bool hal3,
+        cam_feature_mask_t featureMask,
+        cam_analysis_info_t *pAnalysisInfo)
+{
+    Mutex::Autolock lock(mLock);
+    CHECK_PARAM_INTF(mImpl);
+    return mImpl->getAnalysisInfo(fdVideoEnabled, hal3, featureMask, pAnalysisInfo);
+}
+
+}; // namespace qcamera
diff --git a/msmcobalt/QCamera2/HAL/QCameraParametersIntf.h b/msmcobalt/QCamera2/HAL/QCameraParametersIntf.h
new file mode 100644
index 0000000..fb09ce8
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/QCameraParametersIntf.h
@@ -0,0 +1,308 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef ANDROID_HARDWARE_QCAMERA_PARAMETERS_INTF_H
+#define ANDROID_HARDWARE_QCAMERA_PARAMETERS_INTF_H
+
+#include <utils/String8.h>
+#include <utils/Mutex.h>
+#include "cam_intf.h"
+#include "cam_types.h"
+#include "QCameraThermalAdapter.h"
+
+extern "C" {
+#include <mm_camera_interface.h>
+#include <mm_jpeg_interface.h>
+}
+
+using namespace android;
+
+namespace qcamera {
+
+typedef cam_manual_capture_type QCameraManualCaptureModes;
+
+class QCameraAdjustFPS
+{
+public:
+    virtual int recalcFPSRange(int &minFPS, int &maxFPS,
+            const float &minVideoFPS, const float &maxVideoFPs,
+            cam_fps_range_t &adjustedRange) = 0;
+    virtual ~QCameraAdjustFPS() {}
+};
+
+class QCameraParameters;
+
+class QCameraParametersIntf
+{
+public:
+
+    // member variables
+    QCameraParametersIntf();
+    ~QCameraParametersIntf();
+
+    int32_t allocate();
+    int32_t init(cam_capability_t *capabilities,
+                 mm_camera_vtbl_t *mmOps,
+                 QCameraAdjustFPS *adjustFPS);
+
+    void deinit();
+    int32_t updateParameters(const String8& params, bool &needRestart);
+    int32_t commitParameters();
+
+    char* getParameters();
+    void getPreviewFpsRange(int *min_fps, int *max_fps) const;
+#ifdef TARGET_TS_MAKEUP
+    bool getTsMakeupInfo(int &whiteLevel, int &cleanLevel) const;
+#endif
+
+    int getPreviewHalPixelFormat();
+    int32_t getStreamRotation(cam_stream_type_t streamType,
+            cam_pp_feature_config_t &featureConfig,
+            cam_dimension_t &dim);
+    int32_t getStreamFormat(cam_stream_type_t streamType,
+            cam_format_t &format);
+    int32_t getStreamDimension(cam_stream_type_t streamType,
+            cam_dimension_t &dim);
+
+    void getThumbnailSize(int *width, int *height) const;
+    uint8_t getZSLBurstInterval();
+    uint8_t getZSLQueueDepth();
+    uint8_t getZSLBackLookCount();
+    uint8_t getMaxUnmatchedFramesInQueue();
+    bool isZSLMode();
+    bool isRdiMode();
+    bool isSecureMode();
+    bool isNoDisplayMode();
+    bool isWNREnabled();
+    bool isTNRSnapshotEnabled();
+    int32_t getCDSMode();
+    bool isLTMForSeeMoreEnabled();
+    bool isHfrMode();
+    void getHfrFps(cam_fps_range_t &pFpsRange);
+    uint8_t getNumOfSnapshots();
+    uint8_t getNumOfRetroSnapshots();
+    uint8_t getNumOfExtraHDRInBufsIfNeeded();
+    uint8_t getNumOfExtraHDROutBufsIfNeeded();
+
+    bool getRecordingHintValue();
+    uint32_t getJpegQuality();
+    uint32_t getRotation();
+    uint32_t getDeviceRotation();
+    uint32_t getJpegExifRotation();
+    bool useJpegExifRotation();
+    int32_t getEffectValue();
+    bool isInstantAECEnabled();
+    bool isInstantCaptureEnabled();
+    uint8_t getAecFrameBoundValue();
+    uint8_t getAecSkipDisplayFrameBound();
+
+    int32_t getExifDateTime(String8 &dateTime, String8 &subsecTime);
+    int32_t getExifFocalLength(rat_t *focalLenght);
+    uint16_t getExifIsoSpeed();
+    int32_t getExifGpsProcessingMethod(char *gpsProcessingMethod,
+            uint32_t &count);
+    int32_t getExifLatitude(rat_t *latitude, char *latRef);
+    int32_t getExifLongitude(rat_t *longitude, char *lonRef);
+    int32_t getExifAltitude(rat_t *altitude, char *altRef);
+    int32_t getExifGpsDateTimeStamp(char *gpsDateStamp,
+            uint32_t bufLen, rat_t *gpsTimeStamp);
+    bool isVideoBuffersCached();
+    int32_t updateFocusDistances(cam_focus_distances_info_t *focusDistances);
+
+    bool isAEBracketEnabled();
+    int32_t setAEBracketing();
+    bool isFpsDebugEnabled();
+    bool isHistogramEnabled();
+    bool isSceneSelectionEnabled();
+    int32_t setSelectedScene(cam_scene_mode_type scene);
+    cam_scene_mode_type getSelectedScene();
+    bool isFaceDetectionEnabled();
+    int32_t setFaceDetectionOption(bool enabled);
+    int32_t setHistogram(bool enabled);
+    int32_t setFaceDetection(bool enabled, bool initCommit);
+    int32_t setFrameSkip(enum msm_vfe_frame_skip_pattern pattern);
+    qcamera_thermal_mode getThermalMode();
+    int32_t updateRecordingHintValue(int32_t value);
+    int32_t setHDRAEBracket(cam_exp_bracketing_t hdrBracket);
+    bool isHDREnabled();
+    bool isAutoHDREnabled();
+    int32_t stopAEBracket();
+    int32_t updateRAW(cam_dimension_t max_dim);
+    bool isDISEnabled();
+    cam_is_type_t getISType();
+    cam_is_type_t getPreviewISType();
+    uint8_t getMobicatMask();
+
+    cam_focus_mode_type getFocusMode() const;
+    int32_t setNumOfSnapshot();
+    int32_t adjustPreviewFpsRange(cam_fps_range_t *fpsRange);
+    bool isJpegPictureFormat();
+    bool isNV16PictureFormat();
+    bool isNV21PictureFormat();
+    cam_denoise_process_type_t getDenoiseProcessPlate(cam_intf_parm_type_t type);
+    int32_t getMaxPicSize(cam_dimension_t &dim);
+    int getFlipMode(cam_stream_type_t streamType);
+    bool isSnapshotFDNeeded();
+
+    bool isHDR1xFrameEnabled();
+    bool isYUVFrameInfoNeeded();
+    const char*getFrameFmtString(cam_format_t fmt);
+    bool isHDR1xExtraBufferNeeded();
+    bool isHDROutputCropEnabled();
+
+    bool isPreviewFlipChanged();
+    bool isVideoFlipChanged();
+    bool isSnapshotFlipChanged();
+    void setHDRSceneEnable(bool bflag);
+    int32_t updateAWBParams(cam_awb_params_t &awb_params);
+
+    const char *getASDStateString(cam_auto_scene_t scene);
+    bool isHDRThumbnailProcessNeeded();
+    void setMinPpMask(cam_feature_mask_t min_pp_mask);
+    bool setStreamConfigure(bool isCapture,
+            bool previewAsPostview, bool resetConfig);
+    int32_t addOnlineRotation(uint32_t rotation, uint32_t streamId,
+            int32_t device_rotation);
+    uint8_t getNumOfExtraBuffersForImageProc();
+    uint8_t getNumOfExtraBuffersForVideo();
+    uint8_t getNumOfExtraBuffersForPreview();
+    uint32_t getExifBufIndex(uint32_t captureIndex);
+    bool needThumbnailReprocess(cam_feature_mask_t *pFeatureMask);
+    bool isUbiFocusEnabled();
+    bool isChromaFlashEnabled();
+    bool isHighQualityNoiseReductionMode();
+    bool isTruePortraitEnabled();
+    size_t getTPMaxMetaSize();
+    bool isSeeMoreEnabled();
+    bool isStillMoreEnabled();
+    bool isOptiZoomEnabled();
+
+    int32_t commitAFBracket(cam_af_bracketing_t afBracket);
+    int32_t set3ALock(bool lock3A);
+    int32_t setAndCommitZoom(int zoom_level);
+    uint8_t getBurstCountForAdvancedCapture();
+    uint32_t getNumberInBufsForSingleShot();
+    uint32_t getNumberOutBufsForSingleShot();
+    int32_t setLongshotEnable(bool enable);
+    String8 dump();
+    bool isUbiRefocus();
+    uint32_t getRefocusMaxMetaSize();
+    uint8_t getRefocusOutputCount();
+    bool generateThumbFromMain();
+    void updateCurrentFocusPosition(cam_focus_pos_info_t &cur_pos_info);
+    void updateAEInfo(cam_3a_params_t &ae_params);
+    bool isDisplayFrameNeeded();
+    bool isAdvCamFeaturesEnabled();
+    int32_t setAecLock(const char *aecStr);
+    int32_t updateDebugLevel();
+    bool is4k2kVideoResolution();
+    bool isUBWCEnabled();
+
+    int getBrightness();
+    int32_t updateOisValue(bool oisValue);
+    int32_t setIntEvent(cam_int_evt_params_t params);
+    bool getofflineRAW();
+    int32_t updatePpFeatureMask(cam_stream_type_t stream_type);
+    int32_t getStreamPpMask(cam_stream_type_t stream_type, cam_feature_mask_t &pp_mask);
+    int32_t getSharpness();
+    int32_t getEffect();
+    int32_t updateFlashMode(cam_flash_mode_t flash_mode);
+    int32_t configureAEBracketing(cam_capture_frame_config_t &frame_config);
+    int32_t configureHDRBracketing(cam_capture_frame_config_t &frame_config);
+    int32_t configFrameCapture(bool commitSettings);
+    int32_t resetFrameCapture(bool commitSettings);
+    cam_still_more_t getStillMoreSettings();
+    void setStillMoreSettings(cam_still_more_t stillmore_config);
+    cam_still_more_t getStillMoreCapability();
+    cam_dyn_img_data_t getDynamicImgData();
+    void setDynamicImgData(cam_dyn_img_data_t d);
+
+    int32_t getParmZoomLevel();
+    int8_t getReprocCount();
+    int8_t getCurPPCount();
+    void setReprocCount();
+    bool isPostProcScaling();
+    bool isLLNoiseEnabled();
+    void setCurPPCount(int8_t count);
+    int32_t setToneMapMode(uint32_t value, bool initCommit);
+    void setTintless(bool enable);
+    uint8_t getLongshotStages();
+    int8_t getBufBatchCount();
+    int8_t getVideoBatchSize();
+
+    int32_t setManualCaptureMode(
+            QCameraManualCaptureModes value = CAM_MANUAL_CAPTURE_TYPE_OFF);
+    QCameraManualCaptureModes getManualCaptureMode();
+    int64_t getExposureTime();
+
+    cam_capture_frame_config_t getCaptureFrameConfig();
+    void setJpegRotation(int rotation);
+    uint32_t getJpegRotation();
+
+    void setLowLightLevel(cam_low_light_mode_t value);
+    cam_low_light_mode_t getLowLightLevel();
+    bool getLowLightCapture();
+
+    /* Dual camera specific */
+    bool getDcrf();
+    int32_t setRelatedCamSyncInfo(
+            cam_sync_related_sensors_event_info_t* info);
+    const cam_sync_related_sensors_event_info_t*
+            getRelatedCamSyncInfo(void);
+    int32_t setFrameSyncEnabled(bool enable);
+    bool isFrameSyncEnabled(void);
+    int32_t getRelatedCamCalibration(
+            cam_related_system_calibration_data_t* calib);
+    int32_t bundleRelatedCameras(bool sync, uint32_t sessionid);
+    uint8_t fdModeInVideo();
+    bool isOEMFeatEnabled();
+
+    int32_t setZslMode(bool value);
+    int32_t updateZSLModeValue(bool value);
+
+    bool isReprocScaleEnabled();
+    bool isUnderReprocScaling();
+    int32_t getPicSizeFromAPK(int &width, int &height);
+
+    int32_t checkFeatureConcurrency();
+    int32_t setInstantAEC(uint8_t enable, bool initCommit);
+
+    int32_t getAnalysisInfo(
+        bool fdVideoEnabled,
+        bool hal3,
+        cam_feature_mask_t featureMask,
+        cam_analysis_info_t *pAnalysisInfo);
+private:
+    QCameraParameters *mImpl;
+    mutable Mutex mLock;
+};
+
+}; // namespace qcamera
+
+#endif
diff --git a/msmcobalt/QCamera2/HAL/QCameraPostProc.cpp b/msmcobalt/QCamera2/HAL/QCameraPostProc.cpp
new file mode 100644
index 0000000..6ba96e9
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/QCameraPostProc.cpp
@@ -0,0 +1,3662 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCameraPostProc"
+
+// System dependencies
+#include <fcntl.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <utils/Errors.h>
+
+// Camera dependencies
+#include "QCamera2HWI.h"
+#include "QCameraPostProc.h"
+#include "QCameraTrace.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+namespace qcamera {
+
+const char *QCameraPostProcessor::STORE_LOCATION = "/sdcard/img_%d.jpg";
+
+#define FREE_JPEG_OUTPUT_BUFFER(ptr,cnt)     \
+    int jpeg_bufs; \
+    for (jpeg_bufs = 0; jpeg_bufs < (int)cnt; jpeg_bufs++)  { \
+      if (ptr[jpeg_bufs] != NULL) { \
+          free(ptr[jpeg_bufs]); \
+          ptr[jpeg_bufs] = NULL; \
+      } \
+    }
+
+/*===========================================================================
+ * FUNCTION   : QCameraPostProcessor
+ *
+ * DESCRIPTION: constructor of QCameraPostProcessor.
+ *
+ * PARAMETERS :
+ *   @cam_ctrl : ptr to HWI object
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraPostProcessor::QCameraPostProcessor(QCamera2HardwareInterface *cam_ctrl)
+    : m_parent(cam_ctrl),
+      mJpegCB(NULL),
+      mJpegUserData(NULL),
+      mJpegClientHandle(0),
+      mJpegSessionId(0),
+      m_pJpegExifObj(NULL),
+      m_bThumbnailNeeded(TRUE),
+      mPPChannelCount(0),
+      m_bInited(FALSE),
+      m_inputPPQ(releaseOngoingPPData, this),
+      m_ongoingPPQ(releaseOngoingPPData, this),
+      m_inputJpegQ(releaseJpegData, this),
+      m_ongoingJpegQ(releaseJpegData, this),
+      m_inputRawQ(releaseRawData, this),
+      mSaveFrmCnt(0),
+      mUseSaveProc(false),
+      mUseJpegBurst(false),
+      mJpegMemOpt(true),
+      m_JpegOutputMemCount(0),
+      mNewJpegSessionNeeded(true),
+      m_bufCountPPQ(0),
+      m_PPindex(0)
+{
+    memset(&mJpegHandle, 0, sizeof(mJpegHandle));
+    memset(&mJpegMpoHandle, 0, sizeof(mJpegMpoHandle));
+    memset(&m_pJpegOutputMem, 0, sizeof(m_pJpegOutputMem));
+    memset(mPPChannels, 0, sizeof(mPPChannels));
+    m_DataMem = NULL;
+    mOfflineDataBufs = NULL;
+    pthread_mutex_init(&m_reprocess_lock,NULL);
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraPostProcessor
+ *
+ * DESCRIPTION: deconstructor of QCameraPostProcessor.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraPostProcessor::~QCameraPostProcessor()
+{
+    FREE_JPEG_OUTPUT_BUFFER(m_pJpegOutputMem,m_JpegOutputMemCount);
+    if (m_pJpegExifObj != NULL) {
+        delete m_pJpegExifObj;
+        m_pJpegExifObj = NULL;
+    }
+    for (int8_t i = 0; i < mPPChannelCount; i++) {
+        QCameraChannel *pChannel = mPPChannels[i];
+        if ( pChannel != NULL ) {
+            pChannel->stop();
+            delete pChannel;
+            pChannel = NULL;
+        }
+    }
+    mPPChannelCount = 0;
+    pthread_mutex_destroy(&m_reprocess_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : setJpegHandle
+ *
+ * DESCRIPTION: set JPEG client handles
+ *
+ * PARAMETERS :
+ *   @pJpegHandle    : JPEG ops handle
+ *   @pJpegMpoHandle    : MPO JPEG ops handle
+ *   @clientHandle    : JPEG client handle
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::setJpegHandle(mm_jpeg_ops_t *pJpegHandle,
+    mm_jpeg_mpo_ops_t *pJpegMpoHandle, uint32_t clientHandle)
+{
+    LOGH("E mJpegClientHandle: %d, clientHandle: %d",
+             mJpegClientHandle, clientHandle);
+
+    if(pJpegHandle) {
+        memcpy(&mJpegHandle, pJpegHandle, sizeof(mm_jpeg_ops_t));
+    }
+
+    if(pJpegMpoHandle) {
+        memcpy(&mJpegMpoHandle, pJpegMpoHandle, sizeof(mm_jpeg_mpo_ops_t));
+    }
+    mJpegClientHandle = clientHandle;
+    LOGH("X mJpegClientHandle: %d, clientHandle: %d",
+             mJpegClientHandle, clientHandle);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : init
+ *
+ * DESCRIPTION: initialization of postprocessor
+ *
+ * PARAMETERS :
+ *   @jpeg_cb      : callback to handle jpeg event from mm-camera-interface
+ *   @user_data    : user data ptr for jpeg callback
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::init(jpeg_encode_callback_t jpeg_cb, void *user_data)
+{
+    mJpegCB = jpeg_cb;
+    mJpegUserData = user_data;
+    m_dataProcTh.launch(dataProcessRoutine, this);
+    m_saveProcTh.launch(dataSaveRoutine, this);
+    m_parent->mParameters.setReprocCount();
+    m_bInited = TRUE;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : deinit
+ *
+ * DESCRIPTION: de-initialization of postprocessor
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::deinit()
+{
+    if (m_bInited == TRUE) {
+        m_dataProcTh.exit();
+        m_saveProcTh.exit();
+        m_bInited = FALSE;
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : start
+ *
+ * DESCRIPTION: start postprocessor. Data process thread and data notify thread
+ *              will be launched.
+ *
+ * PARAMETERS :
+ *   @pSrcChannel : source channel obj ptr that possibly needs reprocess
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : if any reprocess is needed, a reprocess channel/stream
+ *              will be started.
+ *==========================================================================*/
+int32_t QCameraPostProcessor::start(QCameraChannel *pSrcChannel)
+{
+    char prop[PROPERTY_VALUE_MAX];
+    int32_t rc = NO_ERROR;
+    QCameraChannel *pInputChannel = pSrcChannel;
+
+    LOGH("E ");
+    if (m_bInited == FALSE) {
+        LOGE("postproc not initialized yet");
+        return UNKNOWN_ERROR;
+    }
+
+    if (m_DataMem != NULL) {
+        m_DataMem->release(m_DataMem);
+        m_DataMem = NULL;
+    }
+
+    if (pInputChannel == NULL) {
+        LOGE("Input Channel for pproc is NULL.");
+        return UNKNOWN_ERROR;
+    }
+
+    if ( m_parent->needReprocess() ) {
+        for (int8_t i = 0; i < mPPChannelCount; i++) {
+            // Delete previous reproc channel
+            QCameraReprocessChannel *pChannel = mPPChannels[i];
+            if (pChannel != NULL) {
+                pChannel->stop();
+                delete pChannel;
+                pChannel = NULL;
+            }
+        }
+        mPPChannelCount = 0;
+
+        m_bufCountPPQ = 0;
+        if (!m_parent->isLongshotEnabled()) {
+            m_parent->mParameters.setReprocCount();
+        }
+
+        if (m_parent->mParameters.getManualCaptureMode() >=
+                CAM_MANUAL_CAPTURE_TYPE_3) {
+            mPPChannelCount = m_parent->mParameters.getReprocCount() - 1;
+        } else {
+            mPPChannelCount = m_parent->mParameters.getReprocCount();
+        }
+
+        // Create all reproc channels and start channel
+        for (int8_t i = 0; i < mPPChannelCount; i++) {
+            mPPChannels[i] = m_parent->addReprocChannel(pInputChannel, i);
+            if (mPPChannels[i] == NULL) {
+                LOGE("cannot add multi reprocess channel i = %d", i);
+                return UNKNOWN_ERROR;
+            }
+            rc = mPPChannels[i]->start();
+            if (rc != 0) {
+                LOGE("cannot start multi reprocess channel i = %d", i);
+                delete mPPChannels[i];
+                mPPChannels[i] = NULL;
+                return UNKNOWN_ERROR;
+            }
+            pInputChannel = static_cast<QCameraChannel *>(mPPChannels[i]);
+        }
+    }
+
+    property_get("persist.camera.longshot.save", prop, "0");
+    mUseSaveProc = atoi(prop) > 0 ? true : false;
+
+    m_PPindex = 0;
+    m_InputMetadata.clear();
+    m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC, TRUE, FALSE);
+    m_parent->m_cbNotifier.startSnapshots();
+    LOGH("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop
+ *
+ * DESCRIPTION: stop postprocessor. Data process and notify thread will be stopped.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : reprocess channel will be stopped and deleted if there is any
+ *==========================================================================*/
+int32_t QCameraPostProcessor::stop()
+{
+    if (m_bInited == TRUE) {
+        m_parent->m_cbNotifier.stopSnapshots();
+
+        if (m_DataMem != NULL) {
+            m_DataMem->release(m_DataMem);
+            m_DataMem = NULL;
+        }
+
+        // dataProc Thread need to process "stop" as sync call because abort jpeg job should be a sync call
+        m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, TRUE, TRUE);
+    }
+    // stop reproc channel if exists
+    for (int8_t i = 0; i < mPPChannelCount; i++) {
+        QCameraReprocessChannel *pChannel = mPPChannels[i];
+        if (pChannel != NULL) {
+            pChannel->stop();
+            delete pChannel;
+            pChannel = NULL;
+        }
+    }
+    mPPChannelCount = 0;
+    m_PPindex = 0;
+    m_InputMetadata.clear();
+
+    if (mOfflineDataBufs != NULL) {
+        mOfflineDataBufs->deallocate();
+        delete mOfflineDataBufs;
+        mOfflineDataBufs = NULL;
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : createJpegSession
+ *
+ * DESCRIPTION: start JPEG session in parallel to reproces to reduce the KPI
+ *
+ * PARAMETERS :
+ *   @pSrcChannel : source channel obj ptr that possibly needs reprocess
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::createJpegSession(QCameraChannel *pSrcChannel)
+{
+    int32_t rc = NO_ERROR;
+
+    LOGH("E ");
+    if (m_bInited == FALSE) {
+        LOGE("postproc not initialized yet");
+        return UNKNOWN_ERROR;
+    }
+
+    if (pSrcChannel == NULL) {
+        LOGE("Input Channel for pproc is NULL.");
+        return UNKNOWN_ERROR;
+    }
+
+    if (mPPChannelCount > 0) {
+        QCameraChannel *pChannel = NULL;
+        int ppChannel_idx = mPPChannelCount - 1;
+        pChannel = m_parent->needReprocess() ? mPPChannels[ppChannel_idx] :
+                pSrcChannel;
+        QCameraStream *pSnapshotStream = NULL;
+        QCameraStream *pThumbStream = NULL;
+        bool thumb_stream_needed = ((!m_parent->isZSLMode() ||
+            (m_parent->mParameters.getFlipMode(CAM_STREAM_TYPE_SNAPSHOT) ==
+             m_parent->mParameters.getFlipMode(CAM_STREAM_TYPE_PREVIEW))) &&
+            !m_parent->mParameters.generateThumbFromMain());
+
+        if (pChannel == NULL) {
+            LOGE("Input Channel for pproc is NULL for index %d.",
+                     ppChannel_idx);
+            return UNKNOWN_ERROR;
+        }
+
+        for (uint32_t i = 0; i < pChannel->getNumOfStreams(); ++i) {
+            QCameraStream *pStream = pChannel->getStreamByIndex(i);
+
+            if ( NULL == pStream ) {
+                break;
+            }
+
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+                    pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
+                pSnapshotStream = pStream;
+            }
+
+            if ((thumb_stream_needed) &&
+                   (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+                    pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
+                    pStream->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+                    pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW))) {
+                pThumbStream = pStream;
+            }
+        }
+
+        // If thumbnail is not part of the reprocess channel, then
+        // try to get it from the source channel
+        if ((thumb_stream_needed) && (NULL == pThumbStream) &&
+                (pChannel == mPPChannels[ppChannel_idx])) {
+            for (uint32_t i = 0; i < pSrcChannel->getNumOfStreams(); ++i) {
+                QCameraStream *pStream = pSrcChannel->getStreamByIndex(i);
+
+                if ( NULL == pStream ) {
+                    break;
+                }
+
+                if (pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
+                        pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
+                        pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+                        pStream->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW)) {
+                    pThumbStream = pStream;
+                }
+            }
+        }
+
+        if ( NULL != pSnapshotStream ) {
+            mm_jpeg_encode_params_t encodeParam;
+            memset(&encodeParam, 0, sizeof(mm_jpeg_encode_params_t));
+            rc = getJpegEncodingConfig(encodeParam, pSnapshotStream, pThumbStream);
+            if (rc != NO_ERROR) {
+                LOGE("error getting encoding config");
+                return rc;
+            }
+            LOGH("[KPI Perf] : call jpeg create_session");
+
+            rc = mJpegHandle.create_session(mJpegClientHandle,
+                    &encodeParam,
+                    &mJpegSessionId);
+            if (rc != NO_ERROR) {
+                LOGE("error creating a new jpeg encoding session");
+                return rc;
+            }
+            mNewJpegSessionNeeded = false;
+        }
+    }
+    LOGH("X ");
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegEncodingConfig
+ *
+ * DESCRIPTION: function to prepare encoding job information
+ *
+ * PARAMETERS :
+ *   @encode_parm   : param to be filled with encoding configuration
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::getJpegEncodingConfig(mm_jpeg_encode_params_t& encode_parm,
+                                                    QCameraStream *main_stream,
+                                                    QCameraStream *thumb_stream)
+{
+    LOGD("E");
+    int32_t ret = NO_ERROR;
+    size_t out_size;
+
+    char prop[PROPERTY_VALUE_MAX];
+    property_get("persist.camera.jpeg_burst", prop, "0");
+    mUseJpegBurst = (atoi(prop) > 0) && !mUseSaveProc;
+    encode_parm.burst_mode = mUseJpegBurst;
+
+    cam_rect_t crop;
+    memset(&crop, 0, sizeof(cam_rect_t));
+    main_stream->getCropInfo(crop);
+
+    cam_dimension_t src_dim, dst_dim;
+    memset(&src_dim, 0, sizeof(cam_dimension_t));
+    memset(&dst_dim, 0, sizeof(cam_dimension_t));
+    main_stream->getFrameDimension(src_dim);
+
+    bool hdr_output_crop = m_parent->mParameters.isHDROutputCropEnabled();
+    if (hdr_output_crop && crop.height) {
+        dst_dim.height = crop.height;
+    } else {
+        dst_dim.height = src_dim.height;
+    }
+    if (hdr_output_crop && crop.width) {
+        dst_dim.width = crop.width;
+    } else {
+        dst_dim.width = src_dim.width;
+    }
+
+    // set rotation only when no online rotation or offline pp rotation is done before
+    if (!m_parent->needRotationReprocess()) {
+        encode_parm.rotation = m_parent->mParameters.getJpegRotation();
+    }
+
+    encode_parm.main_dim.src_dim = src_dim;
+    encode_parm.main_dim.dst_dim = dst_dim;
+
+    m_dst_dim = dst_dim;
+
+    encode_parm.jpeg_cb = mJpegCB;
+    encode_parm.userdata = mJpegUserData;
+
+    m_bThumbnailNeeded = TRUE; // need encode thumbnail by default
+    // system property to disable the thumbnail encoding in order to reduce the power
+    // by default thumbnail encoding is set to TRUE and explicitly set this property to
+    // disable the thumbnail encoding
+    property_get("persist.camera.tn.disable", prop, "0");
+    if (atoi(prop) == 1) {
+        m_bThumbnailNeeded = FALSE;
+        LOGH("m_bThumbnailNeeded is %d", m_bThumbnailNeeded);
+    }
+    cam_dimension_t thumbnailSize;
+    memset(&thumbnailSize, 0, sizeof(cam_dimension_t));
+    m_parent->getThumbnailSize(thumbnailSize);
+    if (thumbnailSize.width == 0 || thumbnailSize.height == 0) {
+        // (0,0) means no thumbnail
+        m_bThumbnailNeeded = FALSE;
+    }
+    encode_parm.encode_thumbnail = m_bThumbnailNeeded;
+
+    // get color format
+    cam_format_t img_fmt = CAM_FORMAT_YUV_420_NV12;
+    main_stream->getFormat(img_fmt);
+    encode_parm.color_format = getColorfmtFromImgFmt(img_fmt);
+
+    // get jpeg quality
+    uint32_t val = m_parent->getJpegQuality();
+    if (0U < val) {
+        encode_parm.quality = val;
+    } else {
+        LOGH("Using default JPEG quality");
+        encode_parm.quality = 85;
+    }
+    cam_frame_len_offset_t main_offset;
+    memset(&main_offset, 0, sizeof(cam_frame_len_offset_t));
+    main_stream->getFrameOffset(main_offset);
+
+    // src buf config
+    QCameraMemory *pStreamMem = main_stream->getStreamBufs();
+    if (pStreamMem == NULL) {
+        LOGE("cannot get stream bufs from main stream");
+        ret = BAD_VALUE;
+        goto on_error;
+    }
+    encode_parm.num_src_bufs = pStreamMem->getCnt();
+    for (uint32_t i = 0; i < encode_parm.num_src_bufs; i++) {
+        camera_memory_t *stream_mem = pStreamMem->getMemory(i, false);
+        if (stream_mem != NULL) {
+            encode_parm.src_main_buf[i].index = i;
+            encode_parm.src_main_buf[i].buf_size = stream_mem->size;
+            encode_parm.src_main_buf[i].buf_vaddr = (uint8_t *)stream_mem->data;
+            encode_parm.src_main_buf[i].fd = pStreamMem->getFd(i);
+            encode_parm.src_main_buf[i].format = MM_JPEG_FMT_YUV;
+            encode_parm.src_main_buf[i].offset = main_offset;
+        }
+    }
+    LOGI("Src Buffer cnt = %d, res = %dX%d len = %d rot = %d "
+            "src_dim = %dX%d dst_dim = %dX%d",
+            encode_parm.num_src_bufs,
+            main_offset.mp[0].width, main_offset.mp[0].height,
+            main_offset.frame_len, encode_parm.rotation,
+            src_dim.width, src_dim.height,
+            dst_dim.width, dst_dim.height);
+
+    if (m_bThumbnailNeeded == TRUE) {
+        m_parent->getThumbnailSize(encode_parm.thumb_dim.dst_dim);
+
+        if (thumb_stream == NULL) {
+            thumb_stream = main_stream;
+        }
+        if (((90 == m_parent->mParameters.getJpegRotation())
+                || (270 == m_parent->mParameters.getJpegRotation()))
+                && (m_parent->needRotationReprocess())) {
+            // swap thumbnail dimensions
+            cam_dimension_t tmp_dim = encode_parm.thumb_dim.dst_dim;
+            encode_parm.thumb_dim.dst_dim.width = tmp_dim.height;
+            encode_parm.thumb_dim.dst_dim.height = tmp_dim.width;
+        }
+        pStreamMem = thumb_stream->getStreamBufs();
+        if (pStreamMem == NULL) {
+            LOGE("cannot get stream bufs from thumb stream");
+            ret = BAD_VALUE;
+            goto on_error;
+        }
+        cam_frame_len_offset_t thumb_offset;
+        memset(&thumb_offset, 0, sizeof(cam_frame_len_offset_t));
+        thumb_stream->getFrameOffset(thumb_offset);
+        encode_parm.num_tmb_bufs =  pStreamMem->getCnt();
+        for (uint32_t i = 0; i < pStreamMem->getCnt(); i++) {
+            camera_memory_t *stream_mem = pStreamMem->getMemory(i, false);
+            if (stream_mem != NULL) {
+                encode_parm.src_thumb_buf[i].index = i;
+                encode_parm.src_thumb_buf[i].buf_size = stream_mem->size;
+                encode_parm.src_thumb_buf[i].buf_vaddr = (uint8_t *)stream_mem->data;
+                encode_parm.src_thumb_buf[i].fd = pStreamMem->getFd(i);
+                encode_parm.src_thumb_buf[i].format = MM_JPEG_FMT_YUV;
+                encode_parm.src_thumb_buf[i].offset = thumb_offset;
+            }
+        }
+        cam_format_t img_fmt_thumb = CAM_FORMAT_YUV_420_NV12;
+        thumb_stream->getFormat(img_fmt_thumb);
+        encode_parm.thumb_color_format = getColorfmtFromImgFmt(img_fmt_thumb);
+
+        // crop is the same if frame is the same
+        if (thumb_stream != main_stream) {
+            memset(&crop, 0, sizeof(cam_rect_t));
+            thumb_stream->getCropInfo(crop);
+        }
+
+        memset(&src_dim, 0, sizeof(cam_dimension_t));
+        thumb_stream->getFrameDimension(src_dim);
+        encode_parm.thumb_dim.src_dim = src_dim;
+
+        if (!m_parent->needRotationReprocess()) {
+            encode_parm.thumb_rotation = m_parent->mParameters.getJpegRotation();
+        }
+        encode_parm.thumb_dim.crop = crop;
+        encode_parm.thumb_from_postview =
+            !m_parent->mParameters.generateThumbFromMain() &&
+            (img_fmt_thumb != CAM_FORMAT_YUV_420_NV12_UBWC) &&
+            (m_parent->mParameters.useJpegExifRotation() ||
+            m_parent->mParameters.getJpegRotation() == 0);
+        LOGI("Src THUMB buf_cnt = %d, res = %dX%d len = %d rot = %d "
+            "src_dim = %dX%d, dst_dim = %dX%d",
+            encode_parm.num_tmb_bufs,
+            thumb_offset.mp[0].width, thumb_offset.mp[0].height,
+            thumb_offset.frame_len, encode_parm.thumb_rotation,
+            encode_parm.thumb_dim.src_dim.width,
+            encode_parm.thumb_dim.src_dim.height,
+            encode_parm.thumb_dim.dst_dim.width,
+            encode_parm.thumb_dim.dst_dim.height);
+    }
+
+    if (m_parent->mParameters.useJpegExifRotation()){
+        encode_parm.thumb_rotation = m_parent->mParameters.getJpegExifRotation();
+    }
+
+    encode_parm.num_dst_bufs = 1;
+    if (mUseJpegBurst) {
+        encode_parm.num_dst_bufs = MAX_JPEG_BURST;
+    }
+    encode_parm.get_memory = NULL;
+    out_size = main_offset.frame_len;
+    if (mJpegMemOpt) {
+        encode_parm.get_memory = getJpegMemory;
+        encode_parm.put_memory = releaseJpegMemory;
+        out_size = sizeof(omx_jpeg_ouput_buf_t);
+        encode_parm.num_dst_bufs = encode_parm.num_src_bufs;
+    }
+    m_JpegOutputMemCount = (uint32_t)encode_parm.num_dst_bufs;
+    for (uint32_t i = 0; i < m_JpegOutputMemCount; i++) {
+        if (m_pJpegOutputMem[i] != NULL)
+          free(m_pJpegOutputMem[i]);
+        omx_jpeg_ouput_buf_t omx_out_buf;
+        memset(&omx_out_buf, 0, sizeof(omx_jpeg_ouput_buf_t));
+        omx_out_buf.handle = this;
+        // allocate output buf for jpeg encoding
+        m_pJpegOutputMem[i] = malloc(out_size);
+
+        if (NULL == m_pJpegOutputMem[i]) {
+          ret = NO_MEMORY;
+          LOGE("initHeapMem for jpeg, ret = NO_MEMORY");
+          goto on_error;
+        }
+
+        if (mJpegMemOpt) {
+            memcpy(m_pJpegOutputMem[i], &omx_out_buf, sizeof(omx_out_buf));
+        }
+
+        encode_parm.dest_buf[i].index = i;
+        encode_parm.dest_buf[i].buf_size = main_offset.frame_len;
+        encode_parm.dest_buf[i].buf_vaddr = (uint8_t *)m_pJpegOutputMem[i];
+        encode_parm.dest_buf[i].fd = -1;
+        encode_parm.dest_buf[i].format = MM_JPEG_FMT_YUV;
+        encode_parm.dest_buf[i].offset = main_offset;
+    }
+
+    LOGD("X");
+    return NO_ERROR;
+
+on_error:
+    FREE_JPEG_OUTPUT_BUFFER(m_pJpegOutputMem, m_JpegOutputMemCount);
+
+    LOGD("X with error %d", ret);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : sendEvtNotify
+ *
+ * DESCRIPTION: send event notify through notify callback registered by upper layer
+ *
+ * PARAMETERS :
+ *   @msg_type: msg type of notify
+ *   @ext1    : extension
+ *   @ext2    : extension
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::sendEvtNotify(int32_t msg_type,
+                                            int32_t ext1,
+                                            int32_t ext2)
+{
+    return m_parent->sendEvtNotify(msg_type, ext1, ext2);
+}
+
+/*===========================================================================
+ * FUNCTION   : sendDataNotify
+ *
+ * DESCRIPTION: enqueue data into dataNotify thread
+ *
+ * PARAMETERS :
+ *   @msg_type: data callback msg type
+ *   @data    : ptr to data memory struct
+ *   @index   : index to data buffer
+ *   @metadata: ptr to meta data buffer if there is any
+ *   @release_data : ptr to struct indicating if data need to be released
+ *                   after notify
+ *   @super_buf_frame_idx : super buffer frame index
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::sendDataNotify(int32_t msg_type,
+                                             camera_memory_t *data,
+                                             uint8_t index,
+                                             camera_frame_metadata_t *metadata,
+                                             qcamera_release_data_t *release_data,
+                                             uint32_t super_buf_frame_idx)
+{
+    qcamera_data_argm_t *data_cb = (qcamera_data_argm_t *)malloc(sizeof(qcamera_data_argm_t));
+    if (NULL == data_cb) {
+        LOGE("no mem for acamera_data_argm_t");
+        return NO_MEMORY;
+    }
+    memset(data_cb, 0, sizeof(qcamera_data_argm_t));
+    data_cb->msg_type = msg_type;
+    data_cb->data = data;
+    data_cb->index = index;
+    data_cb->metadata = metadata;
+    if (release_data != NULL) {
+        data_cb->release_data = *release_data;
+    }
+
+    qcamera_callback_argm_t cbArg;
+    memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+    cbArg.cb_type = QCAMERA_DATA_SNAPSHOT_CALLBACK;
+    cbArg.msg_type = msg_type;
+    cbArg.data = data;
+    cbArg.metadata = metadata;
+    cbArg.user_data = data_cb;
+    cbArg.cookie = this;
+    cbArg.release_cb = releaseNotifyData;
+    cbArg.frame_index = super_buf_frame_idx;
+    int rc = m_parent->m_cbNotifier.notifyCallback(cbArg);
+    if ( NO_ERROR != rc ) {
+        LOGE("Error enqueuing jpeg data into notify queue");
+        releaseNotifyData(data_cb, this, UNKNOWN_ERROR);
+        return UNKNOWN_ERROR;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : validatePostProcess
+ *
+ * DESCRIPTION: Verify output buffer count of pp module
+ *
+ * PARAMETERS :
+ *   @frame   : process frame received from mm-camera-interface
+ *
+ * RETURN     : bool type of status
+ *              TRUE  -- success
+ *              FALSE     failure
+ *==========================================================================*/
+bool QCameraPostProcessor::validatePostProcess(mm_camera_super_buf_t *frame)
+{
+    bool status = TRUE;
+    QCameraChannel *pChannel = NULL;
+    QCameraReprocessChannel *m_pReprocChannel = NULL;
+
+    if (frame == NULL) {
+        return status;
+    }
+
+    pChannel = m_parent->getChannelByHandle(frame->ch_id);
+    for (int8_t i = 0; i < mPPChannelCount; i++) {
+        if (pChannel == mPPChannels[i]->getSrcChannel()) {
+            m_pReprocChannel = mPPChannels[i];
+            break;
+        }
+    }
+
+    if ((m_pReprocChannel != NULL) && (pChannel == m_pReprocChannel->getSrcChannel())) {
+        QCameraStream *pStream = NULL;
+        for (uint8_t i = 0; i < m_pReprocChannel->getNumOfStreams(); i++) {
+            pStream = m_pReprocChannel->getStreamByIndex(i);
+            if (pStream && (m_inputPPQ.getCurrentSize() > 0) &&
+                    (m_ongoingPPQ.getCurrentSize() >=  pStream->getNumQueuedBuf())) {
+                LOGW("Out of PP Buffer PPQ = %d ongoingQ = %d Jpeg = %d onJpeg = %d",
+                        m_inputPPQ.getCurrentSize(), m_ongoingPPQ.getCurrentSize(),
+                        m_inputJpegQ.getCurrentSize(), m_ongoingJpegQ.getCurrentSize());
+                status = FALSE;
+                break;
+            }
+        }
+    }
+    return status;
+}
+
+/*===========================================================================
+ * FUNCTION   : getOfflinePPInputBuffer
+ *
+ * DESCRIPTION: Function to generate offline post proc buffer
+ *
+ * PARAMETERS :
+ * @src_frame : process frame received from mm-camera-interface
+ *
+ * RETURN     : Buffer pointer if successfull
+ *            : NULL in case of failures
+ *==========================================================================*/
+mm_camera_buf_def_t *QCameraPostProcessor::getOfflinePPInputBuffer(
+        mm_camera_super_buf_t *src_frame)
+{
+    mm_camera_buf_def_t *mBufDefs = NULL;
+    QCameraChannel *pChannel = NULL;
+    QCameraStream *src_pStream = NULL;
+    mm_camera_buf_def_t *data_frame = NULL;
+    mm_camera_buf_def_t *meta_frame = NULL;
+
+    if (mOfflineDataBufs == NULL) {
+        LOGE("Offline Buffer not allocated");
+        return NULL;
+    }
+
+    uint32_t num_bufs = mOfflineDataBufs->getCnt();
+    size_t bufDefsSize = num_bufs * sizeof(mm_camera_buf_def_t);
+    mBufDefs = (mm_camera_buf_def_t *)malloc(bufDefsSize);
+    if (mBufDefs == NULL) {
+        LOGE("No memory");
+        return NULL;
+    }
+    memset(mBufDefs, 0, bufDefsSize);
+
+    pChannel = m_parent->getChannelByHandle(src_frame->ch_id);
+    for (uint32_t i = 0; i < src_frame->num_bufs; i++) {
+        src_pStream = pChannel->getStreamByHandle(
+                src_frame->bufs[i]->stream_id);
+        if (src_pStream != NULL) {
+            if (src_pStream->getMyType() == CAM_STREAM_TYPE_RAW) {
+                LOGH("Found RAW input stream");
+                data_frame = src_frame->bufs[i];
+            } else if (src_pStream->getMyType() == CAM_STREAM_TYPE_METADATA){
+                LOGH("Found Metada input stream");
+                meta_frame = src_frame->bufs[i];
+            }
+        }
+    }
+
+    if ((src_pStream != NULL) && (data_frame != NULL)) {
+        cam_frame_len_offset_t offset;
+        memset(&offset, 0, sizeof(cam_frame_len_offset_t));
+        src_pStream->getFrameOffset(offset);
+        for (uint32_t i = 0; i < num_bufs; i++) {
+            mBufDefs[i] = *data_frame;
+            mOfflineDataBufs->getBufDef(offset, mBufDefs[i], i);
+
+            LOGD("Dumping RAW data on offline buffer");
+            /*Actual data memcpy just for verification*/
+            memcpy(mBufDefs[i].buffer, data_frame->buffer,
+                    mBufDefs[i].frame_len);
+        }
+        releaseSuperBuf(src_frame, CAM_STREAM_TYPE_RAW);
+    } else {
+        free(mBufDefs);
+        mBufDefs = NULL;
+    }
+
+    LOGH("mBufDefs = %p", mBufDefs);
+    return mBufDefs;
+}
+
+/*===========================================================================
+ * FUNCTION   : processData
+ *
+ * DESCRIPTION: enqueue data into dataProc thread
+ *
+ * PARAMETERS :
+ *   @frame   : process frame received from mm-camera-interface
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : depends on if offline reprocess is needed, received frame will
+ *              be sent to either input queue of postprocess or jpeg encoding
+ *==========================================================================*/
+int32_t QCameraPostProcessor::processData(mm_camera_super_buf_t *frame)
+{
+    if (m_bInited == FALSE) {
+        LOGE("postproc not initialized yet");
+        return UNKNOWN_ERROR;
+    }
+
+    if (frame == NULL) {
+        LOGE("Invalid parameter");
+        return UNKNOWN_ERROR;
+    }
+
+    mm_camera_buf_def_t *meta_frame = NULL;
+    for (uint32_t i = 0; i < frame->num_bufs; i++) {
+        // look through input superbuf
+        if (frame->bufs[i]->stream_type == CAM_STREAM_TYPE_METADATA) {
+            meta_frame = frame->bufs[i];
+            break;
+        }
+    }
+    if (meta_frame != NULL) {
+        //Function to upadte metadata for frame based parameter
+        m_parent->updateMetadata((metadata_buffer_t *)meta_frame->buffer);
+    }
+
+    if (m_parent->needReprocess()) {
+        if ((!m_parent->isLongshotEnabled() &&
+             !m_parent->m_stateMachine.isNonZSLCaptureRunning()) ||
+            (m_parent->isLongshotEnabled() &&
+             m_parent->isCaptureShutterEnabled())) {
+            //play shutter sound
+            m_parent->playShutter();
+        }
+
+        ATRACE_INT("Camera:Reprocess", 1);
+        LOGH("need reprocess");
+
+        // enqueu to post proc input queue
+        qcamera_pp_data_t *pp_request_job =
+                (qcamera_pp_data_t *)malloc(sizeof(qcamera_pp_data_t));
+        if (pp_request_job == NULL) {
+            LOGE("No memory for pproc job");
+            return NO_MEMORY;
+        }
+        memset(pp_request_job, 0, sizeof(qcamera_pp_data_t));
+        pp_request_job->src_frame = frame;
+        pp_request_job->src_reproc_frame = frame;
+        pp_request_job->reprocCount = 0;
+        pp_request_job->ppChannelIndex = 0;
+
+        if ((NULL != frame) &&
+                (0 < frame->num_bufs)
+                && (m_parent->isRegularCapture())) {
+            /*Regular capture. Source stream will be deleted*/
+            mm_camera_buf_def_t *bufs = NULL;
+            uint32_t num_bufs = frame->num_bufs;
+            bufs = new mm_camera_buf_def_t[num_bufs];
+            if (NULL == bufs) {
+                LOGE("Unable to allocate cached buffers");
+                return NO_MEMORY;
+            }
+
+            for (uint32_t i = 0; i < num_bufs; i++) {
+                bufs[i] = *frame->bufs[i];
+                frame->bufs[i] = &bufs[i];
+            }
+            pp_request_job->src_reproc_bufs = bufs;
+
+            // Don't release source frame after encoding
+            // at this point the source channel will not exist.
+            pp_request_job->reproc_frame_release = true;
+        }
+
+        if (mOfflineDataBufs != NULL) {
+            pp_request_job->offline_reproc_buf =
+                    getOfflinePPInputBuffer(frame);
+            if (pp_request_job->offline_reproc_buf != NULL) {
+                pp_request_job->offline_buffer = true;
+            }
+        }
+
+        if (false == m_inputPPQ.enqueue((void *)pp_request_job)) {
+            LOGW("Input PP Q is not active!!!");
+            releaseSuperBuf(frame);
+            free(frame);
+            free(pp_request_job);
+            frame = NULL;
+            pp_request_job = NULL;
+            return NO_ERROR;
+        }
+        if (m_parent->mParameters.isAdvCamFeaturesEnabled()
+                && (meta_frame != NULL)) {
+            m_InputMetadata.add(meta_frame);
+        }
+    } else if (m_parent->mParameters.isNV16PictureFormat() ||
+        m_parent->mParameters.isNV21PictureFormat()) {
+        //check if raw frame information is needed.
+        if(m_parent->mParameters.isYUVFrameInfoNeeded())
+            setYUVFrameInfo(frame);
+
+        processRawData(frame);
+    } else {
+        //play shutter sound
+        if(!m_parent->m_stateMachine.isNonZSLCaptureRunning() &&
+           !m_parent->mLongshotEnabled)
+           m_parent->playShutter();
+
+        LOGH("no need offline reprocess, sending to jpeg encoding");
+        qcamera_jpeg_data_t *jpeg_job =
+            (qcamera_jpeg_data_t *)malloc(sizeof(qcamera_jpeg_data_t));
+        if (jpeg_job == NULL) {
+            LOGE("No memory for jpeg job");
+            return NO_MEMORY;
+        }
+
+        memset(jpeg_job, 0, sizeof(qcamera_jpeg_data_t));
+        jpeg_job->src_frame = frame;
+
+        if (meta_frame != NULL) {
+            // fill in meta data frame ptr
+            jpeg_job->metadata = (metadata_buffer_t *)meta_frame->buffer;
+        }
+
+        // enqueu to jpeg input queue
+        if (!m_inputJpegQ.enqueue((void *)jpeg_job)) {
+            LOGW("Input Jpeg Q is not active!!!");
+            releaseJpegJobData(jpeg_job);
+            free(jpeg_job);
+            jpeg_job = NULL;
+            return NO_ERROR;
+        }
+    }
+
+    m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : processRawData
+ *
+ * DESCRIPTION: enqueue raw data into dataProc thread
+ *
+ * PARAMETERS :
+ *   @frame   : process frame received from mm-camera-interface
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::processRawData(mm_camera_super_buf_t *frame)
+{
+    if (m_bInited == FALSE) {
+        LOGE("postproc not initialized yet");
+        return UNKNOWN_ERROR;
+    }
+
+    // enqueu to raw input queue
+    if (m_inputRawQ.enqueue((void *)frame)) {
+        m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+    } else {
+        LOGW("m_inputRawQ is not active!!!");
+        releaseSuperBuf(frame);
+        free(frame);
+        frame = NULL;
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : processJpegEvt
+ *
+ * DESCRIPTION: process jpeg event from mm-jpeg-interface.
+ *
+ * PARAMETERS :
+ *   @evt     : payload of jpeg event, including information about jpeg encoding
+ *              status, jpeg size and so on.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : This event will also trigger DataProc thread to move to next job
+ *              processing (i.e., send a new jpeg encoding job to mm-jpeg-interface
+ *              if there is any pending job in jpeg input queue)
+ *==========================================================================*/
+int32_t QCameraPostProcessor::processJpegEvt(qcamera_jpeg_evt_payload_t *evt)
+{
+    if (m_bInited == FALSE) {
+        LOGE("postproc not initialized yet");
+        return UNKNOWN_ERROR;
+    }
+
+    int32_t rc = NO_ERROR;
+    camera_memory_t *jpeg_mem = NULL;
+    omx_jpeg_ouput_buf_t *jpeg_out = NULL;
+    void *jpegData = NULL;
+    if (mUseSaveProc && m_parent->isLongshotEnabled()) {
+        qcamera_jpeg_evt_payload_t *saveData = ( qcamera_jpeg_evt_payload_t * ) malloc(sizeof(qcamera_jpeg_evt_payload_t));
+        if ( NULL == saveData ) {
+            LOGE("Can not allocate save data message!");
+            return NO_MEMORY;
+        }
+        *saveData = *evt;
+        if (m_inputSaveQ.enqueue((void *) saveData)) {
+            m_saveProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+        } else {
+            LOGD("m_inputSaveQ PP Q is not active!!!");
+            free(saveData);
+            saveData = NULL;
+            return rc;
+        }
+    } else {
+        /* To be removed later when ISP Frame sync feature is available
+                qcamera_jpeg_data_t *jpeg_job =
+                    (qcamera_jpeg_data_t *)m_ongoingJpegQ.dequeue(matchJobId,
+                    (void*)&evt->jobId);
+                    uint32_t frame_idx = jpeg_job->src_frame->bufs[0]->frame_idx;*/
+        uint32_t frame_idx = 75;
+        LOGH("FRAME INDEX %d", frame_idx);
+        // Release jpeg job data
+        m_ongoingJpegQ.flushNodes(matchJobId, (void*)&evt->jobId);
+
+        if (m_inputPPQ.getCurrentSize() > 0) {
+            m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+        }
+        LOGH("[KPI Perf] : jpeg job %d", evt->jobId);
+
+        if ((false == m_parent->m_bIntJpegEvtPending) &&
+             (m_parent->mDataCb == NULL ||
+              m_parent->msgTypeEnabledWithLock(CAMERA_MSG_COMPRESSED_IMAGE) == 0 )) {
+            LOGW("No dataCB or CAMERA_MSG_COMPRESSED_IMAGE not enabled");
+            rc = NO_ERROR;
+            goto end;
+        }
+
+        if(evt->status == JPEG_JOB_STATUS_ERROR) {
+            LOGE("Error event handled from jpeg, status = %d",
+                   evt->status);
+            rc = FAILED_TRANSACTION;
+            goto end;
+        }
+        if (!mJpegMemOpt) {
+            jpegData = evt->out_data.buf_vaddr;
+        }
+        else {
+            jpeg_out  = (omx_jpeg_ouput_buf_t*) evt->out_data.buf_vaddr;
+            if (jpeg_out != NULL) {
+                jpeg_mem = (camera_memory_t *)jpeg_out->mem_hdl;
+                if (jpeg_mem != NULL) {
+                    jpegData = jpeg_mem->data;
+                }
+            }
+        }
+        m_parent->dumpJpegToFile(jpegData,
+                                  evt->out_data.buf_filled_len,
+                                  evt->jobId);
+        LOGH("Dump jpeg_size=%d", evt->out_data.buf_filled_len);
+        if(true == m_parent->m_bIntJpegEvtPending) {
+              //Sending JPEG snapshot taken notification to HAL
+              pthread_mutex_lock(&m_parent->m_int_lock);
+              pthread_cond_signal(&m_parent->m_int_cond);
+              pthread_mutex_unlock(&m_parent->m_int_lock);
+              m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+              return rc;
+        }
+        if (!mJpegMemOpt) {
+            // alloc jpeg memory to pass to upper layer
+            jpeg_mem = m_parent->mGetMemory(-1, evt->out_data.buf_filled_len,
+                1, m_parent->mCallbackCookie);
+            if (NULL == jpeg_mem) {
+                rc = NO_MEMORY;
+                LOGE("getMemory for jpeg, ret = NO_MEMORY");
+                goto end;
+            }
+            memcpy(jpeg_mem->data, evt->out_data.buf_vaddr, evt->out_data.buf_filled_len);
+        }
+        LOGH("Calling upperlayer callback to store JPEG image");
+        qcamera_release_data_t release_data;
+        memset(&release_data, 0, sizeof(qcamera_release_data_t));
+        release_data.data = jpeg_mem;
+        LOGI("[KPI Perf]: PROFILE_JPEG_CB ");
+        rc = sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
+                jpeg_mem,
+                0,
+                NULL,
+                &release_data,
+                frame_idx);
+        m_parent->setOutputImageCount(m_parent->getOutputImageCount() + 1);
+
+end:
+        if (rc != NO_ERROR) {
+            // send error msg to upper layer
+            LOGE("Jpeg Encoding failed. Notify Application");
+            sendEvtNotify(CAMERA_MSG_ERROR,
+                          UNKNOWN_ERROR,
+                          0);
+
+            if (NULL != jpeg_mem) {
+                jpeg_mem->release(jpeg_mem);
+                jpeg_mem = NULL;
+            }
+        }
+
+        /* check whether to send callback for depth map */
+        if (m_parent->mParameters.isUbiRefocus() &&
+                (m_parent->getOutputImageCount() + 1 ==
+                        m_parent->mParameters.getRefocusOutputCount())) {
+            m_parent->setOutputImageCount(m_parent->getOutputImageCount() + 1);
+
+            jpeg_mem = m_DataMem;
+            release_data.data = jpeg_mem;
+            m_DataMem = NULL;
+            LOGH("[KPI Perf]: send jpeg callback for depthmap ");
+            rc = sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
+                    jpeg_mem,
+                    0,
+                    NULL,
+                    &release_data,
+                    frame_idx);
+            if (rc != NO_ERROR) {
+                // send error msg to upper layer
+                sendEvtNotify(CAMERA_MSG_ERROR,
+                        UNKNOWN_ERROR,
+                        0);
+                if (NULL != jpeg_mem) {
+                    jpeg_mem->release(jpeg_mem);
+                    jpeg_mem = NULL;
+                }
+            }
+            m_DataMem = NULL;
+        }
+    }
+
+    // wait up data proc thread to do next job,
+    // if previous request is blocked due to ongoing jpeg job
+    m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : processPPData
+ *
+ * DESCRIPTION: process received frame after reprocess.
+ *
+ * PARAMETERS :
+ *   @frame   : received frame from reprocess channel.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : The frame after reprocess need to send to jpeg encoding.
+ *==========================================================================*/
+int32_t QCameraPostProcessor::processPPData(mm_camera_super_buf_t *frame)
+{
+    bool triggerEvent = TRUE;
+
+    LOGD("QCameraPostProcessor::processPPData");
+    bool needSuperBufMatch = m_parent->mParameters.generateThumbFromMain();
+    if (m_bInited == FALSE) {
+        LOGE("postproc not initialized yet");
+        return UNKNOWN_ERROR;
+    }
+
+    qcamera_pp_data_t *job = (qcamera_pp_data_t *)m_ongoingPPQ.dequeue();
+    if (NULL == job) {
+        LOGE("Cannot find reprocess job");
+        return BAD_VALUE;
+    }
+
+    if (!needSuperBufMatch && (job->src_frame == NULL
+            || job->src_reproc_frame == NULL) ) {
+        LOGE("Invalid reprocess job");
+        return BAD_VALUE;
+    }
+
+    if (!needSuperBufMatch && (m_parent->mParameters.isNV16PictureFormat() ||
+        m_parent->mParameters.isNV21PictureFormat())) {
+        releaseOngoingPPData(job, this);
+        free(job);
+
+        if(m_parent->mParameters.isYUVFrameInfoNeeded())
+            setYUVFrameInfo(frame);
+        return processRawData(frame);
+    }
+#ifdef TARGET_TS_MAKEUP
+    // find snapshot frame frame
+    mm_camera_buf_def_t *pReprocFrame = NULL;
+    QCameraStream * pSnapshotStream = NULL;
+    QCameraChannel *pChannel = m_parent->getChannelByHandle(frame->ch_id);
+    if (pChannel == NULL) {
+        for (int8_t i = 0; i < mPPChannelCount; i++) {
+            if ((mPPChannels[i] != NULL) &&
+                    (mPPChannels[i]->getMyHandle() == frame->ch_id)) {
+                pChannel = mPPChannels[i];
+                break;
+            }
+        }
+    }
+    if (pChannel == NULL) {
+        LOGE("No corresponding channel (ch_id = %d) exist, return here",
+                frame->ch_id);
+        return BAD_VALUE;
+    }
+
+    for (uint32_t i = 0; i < frame->num_bufs; i++) {
+        pSnapshotStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
+        if (pSnapshotStream != NULL) {
+            if (pSnapshotStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
+                pReprocFrame = frame->bufs[i];
+                break;
+            }
+        }
+    }
+    if (pReprocFrame != NULL && m_parent->mParameters.isFaceDetectionEnabled()) {
+        m_parent->TsMakeupProcess_Snapshot(pReprocFrame,pSnapshotStream);
+    } else {
+        LOGH("pReprocFrame == NULL || isFaceDetectionEnabled = %d",
+                m_parent->mParameters.isFaceDetectionEnabled());
+    }
+#endif
+    if ((m_parent->isLongshotEnabled())
+            && (!m_parent->isCaptureShutterEnabled())
+            && (!m_parent->mCACDoneReceived)) {
+        // play shutter sound for longshot
+        // after reprocess is done
+        m_parent->playShutter();
+    }
+    m_parent->mCACDoneReceived = FALSE;
+
+    int8_t mCurReprocCount = job->reprocCount;
+    int8_t mCurChannelIndex = job->ppChannelIndex;
+    if ( mCurReprocCount > 1 ) {
+        //In case of pp 2nd pass, we can release input of 2nd pass
+        releaseSuperBuf(job->src_frame);
+        free(job->src_frame);
+        job->src_frame = NULL;
+    }
+
+    LOGD("mCurReprocCount = %d mCurChannelIndex = %d mTotalNumReproc = %d",
+             mCurReprocCount, mCurChannelIndex,
+            m_parent->mParameters.getReprocCount());
+    if (mCurReprocCount < m_parent->mParameters.getReprocCount()) {
+        //More pp pass needed. Push frame back to pp queue.
+        qcamera_pp_data_t *pp_request_job = job;
+        pp_request_job->src_frame = frame;
+
+        if ((mPPChannels[mCurChannelIndex]->getReprocCount()
+                == mCurReprocCount) &&
+                (mPPChannels[mCurChannelIndex + 1] != NULL)) {
+            pp_request_job->ppChannelIndex++;
+        }
+
+        // enqueu to post proc input queue
+        if (false == m_inputPPQ.enqueue((void *)pp_request_job)) {
+            LOGW("m_input PP Q is not active!!!");
+            releaseOngoingPPData(pp_request_job,this);
+            free(pp_request_job);
+            pp_request_job = NULL;
+            triggerEvent = FALSE;
+        }
+    } else {
+        //Done with post processing. Send frame to Jpeg
+        qcamera_jpeg_data_t *jpeg_job =
+                (qcamera_jpeg_data_t *)malloc(sizeof(qcamera_jpeg_data_t));
+        if (jpeg_job == NULL) {
+            LOGE("No memory for jpeg job");
+            return NO_MEMORY;
+        }
+
+        memset(jpeg_job, 0, sizeof(qcamera_jpeg_data_t));
+        jpeg_job->src_frame = frame;
+        jpeg_job->src_reproc_frame = job ? job->src_reproc_frame : NULL;
+        jpeg_job->src_reproc_bufs = job ? job->src_reproc_bufs : NULL;
+        jpeg_job->reproc_frame_release = job ? job->reproc_frame_release : false;
+        jpeg_job->offline_reproc_buf = job ? job->offline_reproc_buf : NULL;
+        jpeg_job->offline_buffer = job ? job->offline_buffer : false;
+
+        // find meta data frame
+        mm_camera_buf_def_t *meta_frame = NULL;
+        if (m_parent->mParameters.isAdvCamFeaturesEnabled()) {
+            size_t meta_idx = m_parent->mParameters.getExifBufIndex(m_PPindex);
+            if (m_InputMetadata.size() >= (meta_idx + 1)) {
+                meta_frame = m_InputMetadata.itemAt(meta_idx);
+            } else {
+                LOGW("Input metadata vector contains %d entries, index required %d",
+                         m_InputMetadata.size(), meta_idx);
+            }
+            m_PPindex++;
+        } else {
+            for (uint32_t i = 0; job && job->src_reproc_frame &&
+                    (i < job->src_reproc_frame->num_bufs); i++) {
+                // look through input superbuf
+                if (job->src_reproc_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_METADATA) {
+                    meta_frame = job->src_reproc_frame->bufs[i];
+                    break;
+                }
+            }
+
+            if (meta_frame == NULL) {
+                // look through reprocess superbuf
+                for (uint32_t i = 0; i < frame->num_bufs; i++) {
+                    if (frame->bufs[i]->stream_type == CAM_STREAM_TYPE_METADATA) {
+                        meta_frame = frame->bufs[i];
+                        break;
+                    }
+                }
+            }
+        }
+        if (meta_frame != NULL) {
+            // fill in meta data frame ptr
+            jpeg_job->metadata = (metadata_buffer_t *)meta_frame->buffer;
+        }
+
+        // enqueu reprocessed frame to jpeg input queue
+        if (false == m_inputJpegQ.enqueue((void *)jpeg_job)) {
+            LOGW("Input Jpeg Q is not active!!!");
+            releaseJpegJobData(jpeg_job);
+            free(jpeg_job);
+            jpeg_job = NULL;
+            triggerEvent = FALSE;
+        }
+
+        // free pp job buf
+        pthread_mutex_lock(&m_reprocess_lock);
+        if (job) {
+            free(job);
+        }
+        pthread_mutex_unlock(&m_reprocess_lock);
+    }
+
+    LOGD("");
+    // wait up data proc thread
+
+    if (triggerEvent) {
+        m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : findJpegJobByJobId
+ *
+ * DESCRIPTION: find a jpeg job from ongoing Jpeg queue by its job ID
+ *
+ * PARAMETERS :
+ *   @jobId   : job Id of the job
+ *
+ * RETURN     : ptr to a jpeg job struct. NULL if not found.
+ *
+ * NOTE       : Currently only one job is sending to mm-jpeg-interface for jpeg
+ *              encoding. Therefore simply dequeue from the ongoing Jpeg Queue
+ *              will serve the purpose to find the jpeg job.
+ *==========================================================================*/
+qcamera_jpeg_data_t *QCameraPostProcessor::findJpegJobByJobId(uint32_t jobId)
+{
+    qcamera_jpeg_data_t * job = NULL;
+    if (jobId == 0) {
+        LOGE("not a valid jpeg jobId");
+        return NULL;
+    }
+
+    // currely only one jpeg job ongoing, so simply dequeue the head
+    job = (qcamera_jpeg_data_t *)m_ongoingJpegQ.dequeue();
+    return job;
+}
+
+/*===========================================================================
+ * FUNCTION   : releasePPInputData
+ *
+ * DESCRIPTION: callback function to release post process input data node
+ *
+ * PARAMETERS :
+ *   @data      : ptr to post process input data
+ *   @user_data : user data ptr (QCameraReprocessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraPostProcessor::releasePPInputData(void *data, void *user_data)
+{
+    QCameraPostProcessor *pme = (QCameraPostProcessor *)user_data;
+    if (NULL != pme) {
+        qcamera_pp_request_t *pp_job = (qcamera_pp_request_t *)data;
+        if (NULL != pp_job->src_frame) {
+            pme->releaseSuperBuf(pp_job->src_frame);
+            if (pp_job->src_frame == pp_job->src_reproc_frame)
+                pp_job->src_reproc_frame = NULL;
+            free(pp_job->src_frame);
+            pp_job->src_frame = NULL;
+        }
+        if (NULL != pp_job->src_reproc_frame) {
+            pme->releaseSuperBuf(pp_job->src_reproc_frame);
+            free(pp_job->src_reproc_frame);
+            pp_job->src_reproc_frame = NULL;
+        }
+        pp_job->reprocCount = 0;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseJpegData
+ *
+ * DESCRIPTION: callback function to release jpeg job node
+ *
+ * PARAMETERS :
+ *   @data      : ptr to ongoing jpeg job data
+ *   @user_data : user data ptr (QCameraReprocessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraPostProcessor::releaseJpegData(void *data, void *user_data)
+{
+    QCameraPostProcessor *pme = (QCameraPostProcessor *)user_data;
+    if (NULL != pme) {
+        pme->releaseJpegJobData((qcamera_jpeg_data_t *)data);
+        LOGH("Rleased job ID %u",
+            ((qcamera_jpeg_data_t *)data)->jobId);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseOngoingPPData
+ *
+ * DESCRIPTION: callback function to release ongoing postprocess job node
+ *
+ * PARAMETERS :
+ *   @data      : ptr to onging postprocess job
+ *   @user_data : user data ptr (QCameraReprocessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraPostProcessor::releaseOngoingPPData(void *data, void *user_data)
+{
+    QCameraPostProcessor *pme = (QCameraPostProcessor *)user_data;
+    if (NULL != pme) {
+        qcamera_pp_data_t *pp_job = (qcamera_pp_data_t *)data;
+        if (NULL != pp_job->src_frame) {
+            if (!pp_job->reproc_frame_release) {
+                pme->releaseSuperBuf(pp_job->src_frame);
+            }
+            if (pp_job->src_frame == pp_job->src_reproc_frame)
+                pp_job->src_reproc_frame = NULL;
+
+            free(pp_job->src_frame);
+            pp_job->src_frame = NULL;
+        }
+        if (NULL != pp_job->src_reproc_frame) {
+            pme->releaseSuperBuf(pp_job->src_reproc_frame);
+            free(pp_job->src_reproc_frame);
+            pp_job->src_reproc_frame = NULL;
+        }
+        if ((pp_job->offline_reproc_buf != NULL)
+                && (pp_job->offline_buffer)) {
+            free(pp_job->offline_reproc_buf);
+            pp_job->offline_buffer = false;
+        }
+        pp_job->reprocCount = 0;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseNotifyData
+ *
+ * DESCRIPTION: function to release internal resources in notify data struct
+ *
+ * PARAMETERS :
+ *   @user_data  : ptr user data
+ *   @cookie     : callback cookie
+ *   @cb_status  : callback status
+ *
+ * RETURN     : None
+ *
+ * NOTE       : deallocate jpeg heap memory if it's not NULL
+ *==========================================================================*/
+void QCameraPostProcessor::releaseNotifyData(void *user_data,
+                                             void *cookie,
+                                             int32_t cb_status)
+{
+    LOGD("releaseNotifyData release_data %p", user_data);
+
+    qcamera_data_argm_t *app_cb = ( qcamera_data_argm_t * ) user_data;
+    QCameraPostProcessor *postProc = ( QCameraPostProcessor * ) cookie;
+    if ( ( NULL != app_cb ) && ( NULL != postProc ) ) {
+
+        if ( postProc->mUseSaveProc &&
+             app_cb->release_data.unlinkFile &&
+             ( NO_ERROR != cb_status ) ) {
+
+            String8 unlinkPath((const char *) app_cb->release_data.data->data,
+                                app_cb->release_data.data->size);
+            int rc = unlink(unlinkPath.string());
+            LOGH("Unlinking stored file rc = %d",
+                  rc);
+        }
+
+        if (app_cb && NULL != app_cb->release_data.data) {
+            app_cb->release_data.data->release(app_cb->release_data.data);
+            app_cb->release_data.data = NULL;
+        }
+        if (app_cb && NULL != app_cb->release_data.frame) {
+            postProc->releaseSuperBuf(app_cb->release_data.frame);
+            free(app_cb->release_data.frame);
+            app_cb->release_data.frame = NULL;
+        }
+        if (app_cb && NULL != app_cb->release_data.streamBufs) {
+            app_cb->release_data.streamBufs->deallocate();
+            delete app_cb->release_data.streamBufs;
+            app_cb->release_data.streamBufs = NULL;
+        }
+        free(app_cb);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseSuperBuf
+ *
+ * DESCRIPTION: function to release a superbuf frame by returning back to kernel
+ *
+ * PARAMETERS :
+ * @super_buf : ptr to the superbuf frame
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraPostProcessor::releaseSuperBuf(mm_camera_super_buf_t *super_buf)
+{
+    QCameraChannel *pChannel = NULL;
+
+    if (NULL != super_buf) {
+        pChannel = m_parent->getChannelByHandle(super_buf->ch_id);
+
+        if ( NULL == pChannel ) {
+            for (int8_t i = 0; i < mPPChannelCount; i++) {
+                if ((mPPChannels[i] != NULL) &&
+                        (mPPChannels[i]->getMyHandle() == super_buf->ch_id)) {
+                    pChannel = mPPChannels[i];
+                    break;
+                }
+            }
+        }
+
+        if (pChannel != NULL) {
+            pChannel->bufDone(super_buf);
+        } else {
+            LOGE("Channel id %d not found!!",
+                  super_buf->ch_id);
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION    : releaseSuperBuf
+ *
+ * DESCRIPTION : function to release a superbuf frame by returning back to kernel
+ *
+ * PARAMETERS  :
+ * @super_buf  : ptr to the superbuf frame
+ * @stream_type: Type of stream to be released
+ *
+ * RETURN      : None
+ *==========================================================================*/
+void QCameraPostProcessor::releaseSuperBuf(mm_camera_super_buf_t *super_buf,
+        cam_stream_type_t stream_type)
+{
+    QCameraChannel *pChannel = NULL;
+
+    if (NULL != super_buf) {
+        pChannel = m_parent->getChannelByHandle(super_buf->ch_id);
+        if (pChannel == NULL) {
+            for (int8_t i = 0; i < mPPChannelCount; i++) {
+                if ((mPPChannels[i] != NULL) &&
+                        (mPPChannels[i]->getMyHandle() == super_buf->ch_id)) {
+                    pChannel = mPPChannels[i];
+                    break;
+                }
+            }
+        }
+
+        if (pChannel != NULL) {
+            for (uint32_t i = 0; i < super_buf->num_bufs; i++) {
+                if (super_buf->bufs[i] != NULL) {
+                    QCameraStream *pStream =
+                            pChannel->getStreamByHandle(super_buf->bufs[i]->stream_id);
+                    if ((pStream != NULL) && ((pStream->getMyType() == stream_type)
+                            || (pStream->getMyOriginalType() == stream_type))) {
+                        pChannel->bufDone(super_buf, super_buf->bufs[i]->stream_id);
+                        break;
+                    }
+                }
+            }
+        } else {
+            LOGE("Channel id %d not found!!",
+                   super_buf->ch_id);
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseJpegJobData
+ *
+ * DESCRIPTION: function to release internal resources in jpeg job struct
+ *
+ * PARAMETERS :
+ *   @job     : ptr to jpeg job struct
+ *
+ * RETURN     : None
+ *
+ * NOTE       : original source frame need to be queued back to kernel for
+ *              future use. Output buf of jpeg job need to be released since
+ *              it's allocated for each job. Exif object need to be deleted.
+ *==========================================================================*/
+void QCameraPostProcessor::releaseJpegJobData(qcamera_jpeg_data_t *job)
+{
+    LOGD("E");
+    if (NULL != job) {
+        if (NULL != job->src_reproc_frame) {
+            if (!job->reproc_frame_release) {
+                releaseSuperBuf(job->src_reproc_frame);
+            }
+            free(job->src_reproc_frame);
+            job->src_reproc_frame = NULL;
+        }
+
+        if (NULL != job->src_frame) {
+            releaseSuperBuf(job->src_frame);
+            free(job->src_frame);
+            job->src_frame = NULL;
+        }
+
+        if (NULL != job->pJpegExifObj) {
+            delete job->pJpegExifObj;
+            job->pJpegExifObj = NULL;
+        }
+
+        if (NULL != job->src_reproc_bufs) {
+            delete [] job->src_reproc_bufs;
+        }
+
+        if ((job->offline_reproc_buf != NULL)
+                && (job->offline_buffer)) {
+            free(job->offline_reproc_buf);
+            job->offline_buffer = false;
+        }
+    }
+    LOGD("X");
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseSaveJobData
+ *
+ * DESCRIPTION: function to release internal resources in store jobs
+ *
+ * PARAMETERS :
+ *   @job     : ptr to save job struct
+ *
+ * RETURN     : None
+ *
+ *==========================================================================*/
+void QCameraPostProcessor::releaseSaveJobData(void *data, void *user_data)
+{
+    LOGD("E");
+
+    QCameraPostProcessor *pme = (QCameraPostProcessor *) user_data;
+    if (NULL == pme) {
+        LOGE("Invalid postproc handle");
+        return;
+    }
+
+    qcamera_jpeg_evt_payload_t *job_data = (qcamera_jpeg_evt_payload_t *) data;
+    if (job_data == NULL) {
+        LOGE("Invalid jpeg event data");
+        return;
+    }
+
+    // find job by jobId
+    qcamera_jpeg_data_t *job = pme->findJpegJobByJobId(job_data->jobId);
+
+    if (NULL != job) {
+        pme->releaseJpegJobData(job);
+        free(job);
+    } else {
+        LOGE("Invalid jpeg job");
+    }
+
+    LOGD("X");
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseRawData
+ *
+ * DESCRIPTION: function to release internal resources in store jobs
+ *
+ * PARAMETERS :
+ *   @job     : ptr to save job struct
+ *
+ * RETURN     : None
+ *
+ *==========================================================================*/
+void QCameraPostProcessor::releaseRawData(void *data, void *user_data)
+{
+    LOGD("E");
+
+    QCameraPostProcessor *pme = (QCameraPostProcessor *) user_data;
+    if (NULL == pme) {
+        LOGE("Invalid postproc handle");
+        return;
+    }
+    mm_camera_super_buf_t *super_buf = (mm_camera_super_buf_t *) data;
+    pme->releaseSuperBuf(super_buf);
+
+    LOGD("X");
+}
+
+
+/*===========================================================================
+ * FUNCTION   : getColorfmtFromImgFmt
+ *
+ * DESCRIPTION: function to return jpeg color format based on its image format
+ *
+ * PARAMETERS :
+ *   @img_fmt : image format
+ *
+ * RETURN     : jpeg color format that can be understandable by omx lib
+ *==========================================================================*/
+mm_jpeg_color_format QCameraPostProcessor::getColorfmtFromImgFmt(cam_format_t img_fmt)
+{
+    switch (img_fmt) {
+    case CAM_FORMAT_YUV_420_NV21:
+    case CAM_FORMAT_YUV_420_NV21_VENUS:
+        return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+    case CAM_FORMAT_YUV_420_NV21_ADRENO:
+        return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+    case CAM_FORMAT_YUV_420_NV12:
+    case CAM_FORMAT_YUV_420_NV12_VENUS:
+        return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2;
+    case CAM_FORMAT_YUV_420_YV12:
+        return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2;
+    case CAM_FORMAT_YUV_422_NV61:
+        return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1;
+    case CAM_FORMAT_YUV_422_NV16:
+        return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1;
+    default:
+        return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegImgTypeFromImgFmt
+ *
+ * DESCRIPTION: function to return jpeg encode image type based on its image format
+ *
+ * PARAMETERS :
+ *   @img_fmt : image format
+ *
+ * RETURN     : return jpeg source image format (YUV or Bitstream)
+ *==========================================================================*/
+mm_jpeg_format_t QCameraPostProcessor::getJpegImgTypeFromImgFmt(cam_format_t img_fmt)
+{
+    switch (img_fmt) {
+    case CAM_FORMAT_YUV_420_NV21:
+    case CAM_FORMAT_YUV_420_NV21_ADRENO:
+    case CAM_FORMAT_YUV_420_NV12:
+    case CAM_FORMAT_YUV_420_NV12_VENUS:
+    case CAM_FORMAT_YUV_420_NV21_VENUS:
+    case CAM_FORMAT_YUV_420_YV12:
+    case CAM_FORMAT_YUV_422_NV61:
+    case CAM_FORMAT_YUV_422_NV16:
+        return MM_JPEG_FMT_YUV;
+    default:
+        return MM_JPEG_FMT_YUV;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : queryStreams
+ *
+ * DESCRIPTION: utility method for retrieving main, thumbnail and reprocess
+ *              streams and frame from bundled super buffer
+ *
+ * PARAMETERS :
+ *   @main    : ptr to main stream if present
+ *   @thumb   : ptr to thumbnail stream if present
+ *   @reproc  : ptr to reprocess stream if present
+ *   @main_image : ptr to main image if present
+ *   @thumb_image: ptr to thumbnail image if present
+ *   @frame   : bundled super buffer
+ *   @reproc_frame : bundled source frame buffer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::queryStreams(QCameraStream **main,
+        QCameraStream **thumb,
+        QCameraStream **reproc,
+        mm_camera_buf_def_t **main_image,
+        mm_camera_buf_def_t **thumb_image,
+        mm_camera_super_buf_t *frame,
+        mm_camera_super_buf_t *reproc_frame)
+{
+    if (NULL == frame) {
+        return NO_INIT;
+    }
+
+    QCameraChannel *pChannel = m_parent->getChannelByHandle(frame->ch_id);
+    // check reprocess channel if not found
+    if (pChannel == NULL) {
+        for (int8_t i = 0; i < mPPChannelCount; i++) {
+            if ((mPPChannels[i] != NULL) &&
+                    (mPPChannels[i]->getMyHandle() == frame->ch_id)) {
+                pChannel = mPPChannels[i];
+                break;
+            }
+        }
+    }
+    if (pChannel == NULL) {
+        LOGD("No corresponding channel (ch_id = %d) exist, return here",
+               frame->ch_id);
+        return BAD_VALUE;
+    }
+
+    // Use snapshot stream to create thumbnail if snapshot and preview
+    // flip settings doesn't match in ZSL mode.
+    bool thumb_stream_needed = ((!m_parent->isZSLMode() ||
+        (m_parent->mParameters.getFlipMode(CAM_STREAM_TYPE_SNAPSHOT) ==
+         m_parent->mParameters.getFlipMode(CAM_STREAM_TYPE_PREVIEW))) &&
+        !m_parent->mParameters.generateThumbFromMain());
+
+    *main = *thumb = *reproc = NULL;
+    *main_image = *thumb_image = NULL;
+    // find snapshot frame and thumnail frame
+    for (uint32_t i = 0; i < frame->num_bufs; i++) {
+        QCameraStream *pStream =
+                pChannel->getStreamByHandle(frame->bufs[i]->stream_id);
+        if (pStream != NULL) {
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+                    pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+                    pStream->isTypeOf(CAM_STREAM_TYPE_VIDEO) ||
+                    pStream->isOrignalTypeOf(CAM_STREAM_TYPE_VIDEO) ||
+                    (m_parent->mParameters.getofflineRAW() &&
+                            pStream->isOrignalTypeOf(CAM_STREAM_TYPE_RAW))) {
+                *main= pStream;
+                *main_image = frame->bufs[i];
+            } else if (thumb_stream_needed &&
+                       (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+                        pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
+                        pStream->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+                        pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW))) {
+                *thumb = pStream;
+                *thumb_image = frame->bufs[i];
+            }
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_OFFLINE_PROC) ) {
+                *reproc = pStream;
+            }
+        }
+    }
+
+    if (thumb_stream_needed && *thumb_image == NULL && reproc_frame != NULL) {
+        QCameraChannel *pSrcReprocChannel = NULL;
+        pSrcReprocChannel = m_parent->getChannelByHandle(reproc_frame->ch_id);
+        if (pSrcReprocChannel != NULL) {
+            // find thumbnail frame
+            for (uint32_t i = 0; i < reproc_frame->num_bufs; i++) {
+                QCameraStream *pStream =
+                        pSrcReprocChannel->getStreamByHandle(
+                                reproc_frame->bufs[i]->stream_id);
+                if (pStream != NULL) {
+                    if (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+                        pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW)) {
+                        *thumb = pStream;
+                        *thumb_image = reproc_frame->bufs[i];
+                    }
+                }
+            }
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+* FUNCTION   : syncStreamParams
+*
+* DESCRIPTION: Query the runtime parameters of all streams included
+*              in the main and reprocessed frames
+*
+* PARAMETERS :
+*   @frame : Main image super buffer
+*   @reproc_frame : Image supper buffer that got processed
+*
+* RETURN     : int32_t type of status
+*              NO_ERROR  -- success
+*              none-zero failure code
+*==========================================================================*/
+int32_t QCameraPostProcessor::syncStreamParams(mm_camera_super_buf_t *frame,
+        mm_camera_super_buf_t *reproc_frame)
+{
+    QCameraStream *reproc_stream = NULL;
+    QCameraStream *main_stream = NULL;
+    QCameraStream *thumb_stream = NULL;
+    mm_camera_buf_def_t *main_frame = NULL;
+    mm_camera_buf_def_t *thumb_frame = NULL;
+    int32_t ret = NO_ERROR;
+
+    ret = queryStreams(&main_stream,
+            &thumb_stream,
+            &reproc_stream,
+            &main_frame,
+            &thumb_frame,
+            frame,
+            reproc_frame);
+    if (NO_ERROR != ret) {
+        LOGE("Camera streams query from input frames failed %d",
+                ret);
+        return ret;
+    }
+
+    if (NULL != main_stream) {
+        ret = main_stream->syncRuntimeParams();
+        if (NO_ERROR != ret) {
+            LOGE("Syncing of main stream runtime parameters failed %d",
+                    ret);
+            return ret;
+        }
+    }
+
+    if (NULL != thumb_stream) {
+        ret = thumb_stream->syncRuntimeParams();
+        if (NO_ERROR != ret) {
+            LOGE("Syncing of thumb stream runtime parameters failed %d",
+                    ret);
+            return ret;
+        }
+    }
+
+    if ((NULL != reproc_stream) && (reproc_stream != main_stream)) {
+        ret = reproc_stream->syncRuntimeParams();
+        if (NO_ERROR != ret) {
+            LOGE("Syncing of reproc stream runtime parameters failed %d",
+                    ret);
+            return ret;
+        }
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : encodeData
+ *
+ * DESCRIPTION: function to prepare encoding job information and send to
+ *              mm-jpeg-interface to do the encoding job
+ *
+ * PARAMETERS :
+ *   @jpeg_job_data : ptr to a struct saving job related information
+ *   @needNewSess   : flag to indicate if a new jpeg encoding session need
+ *                    to be created. After creation, this flag will be toggled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::encodeData(qcamera_jpeg_data_t *jpeg_job_data,
+                                         uint8_t &needNewSess)
+{
+    LOGD("E");
+    int32_t ret = NO_ERROR;
+    mm_jpeg_job_t jpg_job;
+    uint32_t jobId = 0;
+    QCameraStream *reproc_stream = NULL;
+    QCameraStream *main_stream = NULL;
+    mm_camera_buf_def_t *main_frame = NULL;
+    QCameraStream *thumb_stream = NULL;
+    mm_camera_buf_def_t *thumb_frame = NULL;
+    mm_camera_super_buf_t *recvd_frame = jpeg_job_data->src_frame;
+    cam_rect_t crop;
+    cam_stream_parm_buffer_t param;
+    cam_stream_img_prop_t imgProp;
+
+    // find channel
+    QCameraChannel *pChannel = m_parent->getChannelByHandle(recvd_frame->ch_id);
+    // check reprocess channel if not found
+    if (pChannel == NULL) {
+        for (int8_t i = 0; i < mPPChannelCount; i++) {
+            if ((mPPChannels[i] != NULL) &&
+                    (mPPChannels[i]->getMyHandle() == recvd_frame->ch_id)) {
+                pChannel = mPPChannels[i];
+                break;
+            }
+        }
+    }
+
+    if (pChannel == NULL) {
+        LOGE("No corresponding channel (ch_id = %d) exist, return here",
+                recvd_frame->ch_id);
+        return BAD_VALUE;
+    }
+
+    const uint32_t jpeg_rotation = m_parent->mParameters.getJpegRotation();
+
+    ret = queryStreams(&main_stream,
+            &thumb_stream,
+            &reproc_stream,
+            &main_frame,
+            &thumb_frame,
+            recvd_frame,
+            jpeg_job_data->src_reproc_frame);
+    if (NO_ERROR != ret) {
+        return ret;
+    }
+
+    if(NULL == main_frame){
+       LOGE("Main frame is NULL");
+       return BAD_VALUE;
+    }
+
+    if(NULL == thumb_frame){
+       LOGD("Thumbnail frame does not exist");
+    }
+
+    QCameraMemory *memObj = (QCameraMemory *)main_frame->mem_info;
+    if (NULL == memObj) {
+        LOGE("Memeory Obj of main frame is NULL");
+        return NO_MEMORY;
+    }
+
+    // dump snapshot frame if enabled
+    m_parent->dumpFrameToFile(main_stream, main_frame,
+            QCAMERA_DUMP_FRM_SNAPSHOT, (char *)"CPP");
+
+    // send upperlayer callback for raw image
+    camera_memory_t *mem = memObj->getMemory(main_frame->buf_idx, false);
+    if (NULL != m_parent->mDataCb &&
+        m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE) > 0) {
+        qcamera_callback_argm_t cbArg;
+        memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+        cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+        cbArg.msg_type = CAMERA_MSG_RAW_IMAGE;
+        cbArg.data = mem;
+        cbArg.index = 0;
+        m_parent->m_cbNotifier.notifyCallback(cbArg);
+    }
+    if (NULL != m_parent->mNotifyCb &&
+        m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE_NOTIFY) > 0) {
+        qcamera_callback_argm_t cbArg;
+        memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+        cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
+        cbArg.msg_type = CAMERA_MSG_RAW_IMAGE_NOTIFY;
+        cbArg.ext1 = 0;
+        cbArg.ext2 = 0;
+        m_parent->m_cbNotifier.notifyCallback(cbArg);
+    }
+
+    if (mJpegClientHandle <= 0) {
+        LOGE("Error: bug here, mJpegClientHandle is 0");
+        return UNKNOWN_ERROR;
+    }
+
+    if (needNewSess) {
+        // create jpeg encoding session
+        mm_jpeg_encode_params_t encodeParam;
+        memset(&encodeParam, 0, sizeof(mm_jpeg_encode_params_t));
+        ret = getJpegEncodingConfig(encodeParam, main_stream, thumb_stream);
+        if (ret != NO_ERROR) {
+            LOGE("error getting encoding config");
+            return ret;
+        }
+        LOGH("[KPI Perf] : call jpeg create_session");
+        ret = mJpegHandle.create_session(mJpegClientHandle, &encodeParam, &mJpegSessionId);
+        if (ret != NO_ERROR) {
+            LOGE("error creating a new jpeg encoding session");
+            return ret;
+        }
+        needNewSess = FALSE;
+    }
+    // Fill in new job
+    memset(&jpg_job, 0, sizeof(mm_jpeg_job_t));
+    jpg_job.job_type = JPEG_JOB_TYPE_ENCODE;
+    jpg_job.encode_job.session_id = mJpegSessionId;
+    jpg_job.encode_job.src_index = (int32_t)main_frame->buf_idx;
+    jpg_job.encode_job.dst_index = 0;
+
+    if (mJpegMemOpt) {
+        jpg_job.encode_job.dst_index = jpg_job.encode_job.src_index;
+    } else if (mUseJpegBurst) {
+        jpg_job.encode_job.dst_index = -1;
+    }
+
+    // use src to reproc frame as work buffer; if src buf is not available
+    // jpeg interface will allocate work buffer
+    if (jpeg_job_data->src_reproc_frame != NULL) {
+        int32_t ret = NO_ERROR;
+        QCameraStream *main_stream = NULL;
+        mm_camera_buf_def_t *main_frame = NULL;
+        QCameraStream *thumb_stream = NULL;
+        mm_camera_buf_def_t *thumb_frame = NULL;
+        QCameraStream *reproc_stream = NULL;
+        mm_camera_buf_def_t *workBuf = NULL;
+        // Call queryStreams to fetch source of reproc frame
+        ret = queryStreams(&main_stream,
+                &thumb_stream,
+                &reproc_stream,
+                &main_frame,
+                &thumb_frame,
+                jpeg_job_data->src_reproc_frame,
+                NULL);
+
+        if ((NO_ERROR == ret) && ((workBuf = main_frame) != NULL)
+                && !m_parent->isLowPowerMode()) {
+            camera_memory_t *camWorkMem = NULL;
+            int workBufIndex = workBuf->buf_idx;
+            QCameraMemory *workMem = (QCameraMemory *)workBuf->mem_info;
+            if (workMem != NULL) {
+                camWorkMem = workMem->getMemory(workBufIndex, false);
+            }
+            if (camWorkMem != NULL && workMem != NULL) {
+                jpg_job.encode_job.work_buf.buf_size = camWorkMem->size;
+                jpg_job.encode_job.work_buf.buf_vaddr = (uint8_t *)camWorkMem->data;
+                jpg_job.encode_job.work_buf.fd = workMem->getFd(workBufIndex);
+                workMem->invalidateCache(workBufIndex);
+            }
+        }
+    }
+
+    cam_dimension_t src_dim;
+    memset(&src_dim, 0, sizeof(cam_dimension_t));
+    main_stream->getFrameDimension(src_dim);
+
+    bool hdr_output_crop = m_parent->mParameters.isHDROutputCropEnabled();
+    bool img_feature_enabled =
+            m_parent->mParameters.isUbiFocusEnabled() ||
+            m_parent->mParameters.isUbiRefocus() ||
+            m_parent->mParameters.isChromaFlashEnabled() ||
+            m_parent->mParameters.isOptiZoomEnabled() ||
+            m_parent->mParameters.isStillMoreEnabled();
+
+    LOGH("Crop needed %d", img_feature_enabled);
+    crop.left = 0;
+    crop.top = 0;
+    crop.height = src_dim.height;
+    crop.width = src_dim.width;
+
+    param = main_stream->getOutputCrop();
+    for (int i = 0; i < param.outputCrop.num_of_streams; i++) {
+       if (param.outputCrop.crop_info[i].stream_id
+           == main_stream->getMyServerID()) {
+               crop = param.outputCrop.crop_info[i].crop;
+               main_stream->setCropInfo(crop);
+       }
+    }
+    if (img_feature_enabled) {
+        memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+
+        param = main_stream->getImgProp();
+        imgProp = param.imgProp;
+        main_stream->setCropInfo(imgProp.crop);
+        crop = imgProp.crop;
+        thumb_stream = NULL; /* use thumbnail from main image */
+
+        if ((reproc_stream != NULL) && (m_DataMem == NULL) &&
+                m_parent->mParameters.isUbiRefocus()) {
+
+            QCameraHeapMemory* miscBufHandler = reproc_stream->getMiscBuf();
+            cam_misc_buf_t* refocusResult =
+                    reinterpret_cast<cam_misc_buf_t *>(miscBufHandler->getPtr(0));
+            uint32_t resultSize = refocusResult->header_size +
+                    refocusResult->width * refocusResult->height;
+            camera_memory_t *dataMem = m_parent->mGetMemory(-1, resultSize,
+                    1, m_parent->mCallbackCookie);
+
+            LOGH("Refocus result header %u dims %dx%d",
+                    resultSize, refocusResult->width, refocusResult->height);
+
+            if (dataMem && dataMem->data) {
+                memcpy(dataMem->data, refocusResult->data, resultSize);
+                //save mem pointer for depth map
+                m_DataMem = dataMem;
+            }
+        }
+    } else if ((reproc_stream != NULL) && (m_parent->mParameters.isTruePortraitEnabled())) {
+
+        QCameraHeapMemory* miscBufHandler = reproc_stream->getMiscBuf();
+        cam_misc_buf_t* tpResult =
+                reinterpret_cast<cam_misc_buf_t *>(miscBufHandler->getPtr(0));
+        uint32_t tpMetaSize = tpResult->header_size + tpResult->width * tpResult->height;
+
+        LOGH("True portrait result header %d% dims dx%d",
+                tpMetaSize, tpResult->width, tpResult->height);
+
+        CAM_DUMP_TO_FILE(QCAMERA_DUMP_FRM_LOCATION"tp", "bm", -1, "y",
+                &tpResult->data, tpMetaSize);
+    }
+
+    cam_dimension_t dst_dim;
+
+    if (hdr_output_crop && crop.height) {
+        dst_dim.height = crop.height;
+    } else {
+        dst_dim.height = src_dim.height;
+    }
+    if (hdr_output_crop && crop.width) {
+        dst_dim.width = crop.width;
+    } else {
+        dst_dim.width = src_dim.width;
+    }
+
+    // main dim
+    jpg_job.encode_job.main_dim.src_dim = src_dim;
+    jpg_job.encode_job.main_dim.dst_dim = dst_dim;
+    jpg_job.encode_job.main_dim.crop = crop;
+
+    // get 3a sw version info
+    cam_q3a_version_t sw_version =
+        m_parent->getCamHalCapabilities()->q3a_version;
+
+    // get exif data
+    QCameraExif *pJpegExifObj = m_parent->getExifData();
+    jpeg_job_data->pJpegExifObj = pJpegExifObj;
+    if (pJpegExifObj != NULL) {
+        jpg_job.encode_job.exif_info.exif_data = pJpegExifObj->getEntries();
+        jpg_job.encode_job.exif_info.numOfEntries =
+            pJpegExifObj->getNumOfEntries();
+        jpg_job.encode_job.exif_info.debug_data.sw_3a_version[0] =
+            sw_version.major_version;
+        jpg_job.encode_job.exif_info.debug_data.sw_3a_version[1] =
+            sw_version.minor_version;
+        jpg_job.encode_job.exif_info.debug_data.sw_3a_version[2] =
+            sw_version.patch_version;
+        jpg_job.encode_job.exif_info.debug_data.sw_3a_version[3] =
+            sw_version.new_feature_des;
+    }
+
+    // set rotation only when no online rotation or offline pp rotation is done before
+    if (!m_parent->needRotationReprocess()) {
+        jpg_job.encode_job.rotation = jpeg_rotation;
+    }
+    LOGH("jpeg rotation is set to %d", jpg_job.encode_job.rotation);
+
+    // thumbnail dim
+    if (m_bThumbnailNeeded == TRUE) {
+        m_parent->getThumbnailSize(jpg_job.encode_job.thumb_dim.dst_dim);
+
+        if (thumb_stream == NULL) {
+            // need jpeg thumbnail, but no postview/preview stream exists
+            // we use the main stream/frame to encode thumbnail
+            thumb_stream = main_stream;
+            thumb_frame = main_frame;
+        }
+        if (m_parent->needRotationReprocess() &&
+                ((90 == jpeg_rotation) || (270 == jpeg_rotation))) {
+            // swap thumbnail dimensions
+            cam_dimension_t tmp_dim = jpg_job.encode_job.thumb_dim.dst_dim;
+            jpg_job.encode_job.thumb_dim.dst_dim.width = tmp_dim.height;
+            jpg_job.encode_job.thumb_dim.dst_dim.height = tmp_dim.width;
+        }
+
+        memset(&src_dim, 0, sizeof(cam_dimension_t));
+        thumb_stream->getFrameDimension(src_dim);
+        jpg_job.encode_job.thumb_dim.src_dim = src_dim;
+
+        // crop is the same if frame is the same
+        if (thumb_frame != main_frame) {
+            crop.left = 0;
+            crop.top = 0;
+            crop.height = src_dim.height;
+            crop.width = src_dim.width;
+
+            param = thumb_stream->getOutputCrop();
+            for (int i = 0; i < param.outputCrop.num_of_streams; i++) {
+               if (param.outputCrop.crop_info[i].stream_id
+                   == thumb_stream->getMyServerID()) {
+                       crop = param.outputCrop.crop_info[i].crop;
+                       thumb_stream->setCropInfo(crop);
+               }
+           }
+        }
+
+
+        jpg_job.encode_job.thumb_dim.crop = crop;
+        if (thumb_frame != NULL) {
+            jpg_job.encode_job.thumb_index = thumb_frame->buf_idx;
+        }
+        LOGI("Thumbnail idx = %d src w/h (%dx%d), dst w/h (%dx%d)",
+            jpg_job.encode_job.thumb_index,
+            jpg_job.encode_job.thumb_dim.src_dim.width,
+            jpg_job.encode_job.thumb_dim.src_dim.height,
+            jpg_job.encode_job.thumb_dim.dst_dim.width,
+            jpg_job.encode_job.thumb_dim.dst_dim.height);
+    }
+
+    LOGI("Main image idx = %d src w/h (%dx%d), dst w/h (%dx%d)",
+            jpg_job.encode_job.src_index,
+            jpg_job.encode_job.main_dim.src_dim.width,
+            jpg_job.encode_job.main_dim.src_dim.height,
+            jpg_job.encode_job.main_dim.dst_dim.width,
+            jpg_job.encode_job.main_dim.dst_dim.height);
+
+    if (thumb_frame != NULL) {
+        // dump thumbnail frame if enabled
+        m_parent->dumpFrameToFile(thumb_stream, thumb_frame, QCAMERA_DUMP_FRM_THUMBNAIL);
+    }
+
+    if (jpeg_job_data->metadata != NULL) {
+        // fill in meta data frame ptr
+        jpg_job.encode_job.p_metadata = jpeg_job_data->metadata;
+    }
+
+    jpg_job.encode_job.hal_version = CAM_HAL_V1;
+    m_parent->mExifParams.sensor_params.sens_type = m_parent->getSensorType();
+    jpg_job.encode_job.cam_exif_params = m_parent->mExifParams;
+    jpg_job.encode_job.cam_exif_params.debug_params =
+            (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
+    if (!jpg_job.encode_job.cam_exif_params.debug_params) {
+        LOGE("Out of Memory. Allocation failed for 3A debug exif params");
+        return NO_MEMORY;
+    }
+
+    jpg_job.encode_job.mobicat_mask = m_parent->mParameters.getMobicatMask();
+
+
+    if (NULL != jpg_job.encode_job.p_metadata && (jpg_job.encode_job.mobicat_mask > 0)) {
+
+       if (m_parent->mExifParams.debug_params) {
+           memcpy(jpg_job.encode_job.cam_exif_params.debug_params,
+                   m_parent->mExifParams.debug_params, (sizeof(mm_jpeg_debug_exif_params_t)));
+
+           /* Save a copy of mobicat params */
+           jpg_job.encode_job.p_metadata->is_mobicat_aec_params_valid =
+                    jpg_job.encode_job.cam_exif_params.cam_3a_params_valid;
+
+           if (jpg_job.encode_job.cam_exif_params.cam_3a_params_valid) {
+                    jpg_job.encode_job.p_metadata->mobicat_aec_params =
+                    jpg_job.encode_job.cam_exif_params.cam_3a_params;
+           }
+
+           /* Save a copy of 3A debug params */
+            jpg_job.encode_job.p_metadata->is_statsdebug_ae_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->ae_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_awb_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->awb_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_af_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->af_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_asd_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->asd_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_stats_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_bestats_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->bestats_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_bhist_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->bhist_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_3a_tuning_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->q3a_tuning_debug_params_valid;
+
+            if (jpg_job.encode_job.cam_exif_params.debug_params->ae_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_ae_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->ae_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->awb_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_awb_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->awb_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->af_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_af_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->af_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->asd_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_asd_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->asd_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_stats_buffer_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->bestats_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_bestats_buffer_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->bestats_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->bhist_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_bhist_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->bhist_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->q3a_tuning_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_3a_tuning_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->q3a_tuning_debug_params;
+            }
+        }
+
+    }
+
+    /* Init the QTable */
+    for (int i = 0; i < QTABLE_MAX; i++) {
+        jpg_job.encode_job.qtable_set[i] = 0;
+    }
+
+    const cam_sync_related_sensors_event_info_t* related_cam_info =
+            m_parent->getRelatedCamSyncInfo();
+    if (related_cam_info->sync_control == CAM_SYNC_RELATED_SENSORS_ON &&
+            m_parent->getMpoComposition()) {
+        jpg_job.encode_job.multi_image_info.type = MM_JPEG_TYPE_MPO;
+        if (related_cam_info->type == CAM_TYPE_MAIN ) {
+            jpg_job.encode_job.multi_image_info.is_primary = TRUE;
+            LOGD("Encoding MPO Primary JPEG");
+        } else {
+            jpg_job.encode_job.multi_image_info.is_primary = FALSE;
+            LOGD("Encoding MPO Aux JPEG");
+        }
+        jpg_job.encode_job.multi_image_info.num_of_images = 2;
+    } else {
+        LOGD("Encoding Single JPEG");
+        jpg_job.encode_job.multi_image_info.type = MM_JPEG_TYPE_JPEG;
+        jpg_job.encode_job.multi_image_info.is_primary = FALSE;
+        jpg_job.encode_job.multi_image_info.num_of_images = 1;
+    }
+
+    LOGI("[KPI Perf] : PROFILE_JPEG_JOB_START");
+    ret = mJpegHandle.start_job(&jpg_job, &jobId);
+    if (jpg_job.encode_job.cam_exif_params.debug_params) {
+        free(jpg_job.encode_job.cam_exif_params.debug_params);
+    }
+    if (ret == NO_ERROR) {
+        // remember job info
+        jpeg_job_data->jobId = jobId;
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : processRawImageImpl
+ *
+ * DESCRIPTION: function to send raw image to upper layer
+ *
+ * PARAMETERS :
+ *   @recvd_frame   : frame to be encoded
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::processRawImageImpl(mm_camera_super_buf_t *recvd_frame)
+{
+    int32_t rc = NO_ERROR;
+
+    QCameraChannel *pChannel = m_parent->getChannelByHandle(recvd_frame->ch_id);
+    QCameraStream *pStream = NULL;
+    mm_camera_buf_def_t *frame = NULL;
+    // check reprocess channel if not found
+    if (pChannel == NULL) {
+        for (int8_t i = 0; i < mPPChannelCount; i++) {
+            if ((mPPChannels[i] != NULL) &&
+                    (mPPChannels[i]->getMyHandle() == recvd_frame->ch_id)) {
+                pChannel = mPPChannels[i];
+                break;
+            }
+        }
+    }
+    if (pChannel == NULL) {
+        LOGE("No corresponding channel (ch_id = %d) exist, return here",
+                recvd_frame->ch_id);
+        return BAD_VALUE;
+    }
+
+    // find snapshot frame
+    for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
+        QCameraStream *pCurStream =
+            pChannel->getStreamByHandle(recvd_frame->bufs[i]->stream_id);
+        if (pCurStream != NULL) {
+            if (pCurStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+                pCurStream->isTypeOf(CAM_STREAM_TYPE_RAW) ||
+                pCurStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+                pCurStream->isOrignalTypeOf(CAM_STREAM_TYPE_RAW)) {
+                pStream = pCurStream;
+                frame = recvd_frame->bufs[i];
+                break;
+            }
+        }
+    }
+
+    if ( NULL == frame ) {
+        LOGE("No valid raw buffer");
+        return BAD_VALUE;
+    }
+
+    QCameraMemory *rawMemObj = (QCameraMemory *)frame->mem_info;
+    bool zslChannelUsed = m_parent->isZSLMode() &&
+            ( pChannel != mPPChannels[0] );
+    camera_memory_t *raw_mem = NULL;
+
+    if (rawMemObj != NULL) {
+        if (zslChannelUsed) {
+            raw_mem = rawMemObj->getMemory(frame->buf_idx, false);
+        } else {
+            raw_mem = m_parent->mGetMemory(-1,
+                                           frame->frame_len,
+                                           1,
+                                           m_parent->mCallbackCookie);
+            if (NULL == raw_mem) {
+                LOGE("Not enough memory for RAW cb ");
+                return NO_MEMORY;
+            }
+            memcpy(raw_mem->data, frame->buffer, frame->frame_len);
+        }
+    }
+
+    if (NULL != rawMemObj && NULL != raw_mem) {
+        // dump frame into file
+        if (frame->stream_type == CAM_STREAM_TYPE_SNAPSHOT ||
+            pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
+            // for YUV422 NV16 case
+            m_parent->dumpFrameToFile(pStream, frame, QCAMERA_DUMP_FRM_SNAPSHOT);
+        } else {
+            //Received RAW snapshot taken notification
+            m_parent->dumpFrameToFile(pStream, frame, QCAMERA_DUMP_FRM_RAW);
+
+            if(true == m_parent->m_bIntRawEvtPending) {
+              //Sending RAW snapshot taken notification to HAL
+              memset(&m_dst_dim, 0, sizeof(m_dst_dim));
+              pStream->getFrameDimension(m_dst_dim);
+              pthread_mutex_lock(&m_parent->m_int_lock);
+              pthread_cond_signal(&m_parent->m_int_cond);
+              pthread_mutex_unlock(&m_parent->m_int_lock);
+              raw_mem->release(raw_mem);
+              return rc;
+            }
+        }
+
+        // send data callback / notify for RAW_IMAGE
+        if (NULL != m_parent->mDataCb &&
+            m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE) > 0) {
+            qcamera_callback_argm_t cbArg;
+            memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+            cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+            cbArg.msg_type = CAMERA_MSG_RAW_IMAGE;
+            cbArg.data = raw_mem;
+            cbArg.index = 0;
+            m_parent->m_cbNotifier.notifyCallback(cbArg);
+        }
+        if (NULL != m_parent->mNotifyCb &&
+            m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE_NOTIFY) > 0) {
+            qcamera_callback_argm_t cbArg;
+            memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+            cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
+            cbArg.msg_type = CAMERA_MSG_RAW_IMAGE_NOTIFY;
+            cbArg.ext1 = 0;
+            cbArg.ext2 = 0;
+            m_parent->m_cbNotifier.notifyCallback(cbArg);
+        }
+
+        if ((m_parent->mDataCb != NULL) &&
+            m_parent->msgTypeEnabledWithLock(CAMERA_MSG_COMPRESSED_IMAGE) > 0) {
+            qcamera_release_data_t release_data;
+            memset(&release_data, 0, sizeof(qcamera_release_data_t));
+            if ( zslChannelUsed ) {
+                release_data.frame = recvd_frame;
+            } else {
+                release_data.data = raw_mem;
+            }
+            rc = sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
+                                raw_mem,
+                                0,
+                                NULL,
+                                &release_data);
+        } else {
+            raw_mem->release(raw_mem);
+        }
+    } else {
+        LOGE("Cannot get raw mem");
+        rc = UNKNOWN_ERROR;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : dataSaveRoutine
+ *
+ * DESCRIPTION: data saving routine
+ *
+ * PARAMETERS :
+ *   @data    : user data ptr (QCameraPostProcessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void *QCameraPostProcessor::dataSaveRoutine(void *data)
+{
+    int running = 1;
+    int ret;
+    uint8_t is_active = FALSE;
+    QCameraPostProcessor *pme = (QCameraPostProcessor *)data;
+    QCameraCmdThread *cmdThread = &pme->m_saveProcTh;
+    cmdThread->setName("CAM_JpegSave");
+    char saveName[PROPERTY_VALUE_MAX];
+
+    LOGH("E");
+    do {
+        do {
+            ret = cam_sem_wait(&cmdThread->cmd_sem);
+            if (ret != 0 && errno != EINVAL) {
+                LOGE("cam_sem_wait error (%s)",
+                            strerror(errno));
+                return NULL;
+            }
+        } while (ret != 0);
+
+        // we got notified about new cmd avail in cmd queue
+        camera_cmd_type_t cmd = cmdThread->getCmd();
+        switch (cmd) {
+        case CAMERA_CMD_TYPE_START_DATA_PROC:
+            LOGH("start data proc");
+            is_active = TRUE;
+            pme->m_inputSaveQ.init();
+            break;
+        case CAMERA_CMD_TYPE_STOP_DATA_PROC:
+            {
+                LOGH("stop data proc");
+                is_active = FALSE;
+
+                // flush input save Queue
+                pme->m_inputSaveQ.flush();
+
+                // signal cmd is completed
+                cam_sem_post(&cmdThread->sync_sem);
+            }
+            break;
+        case CAMERA_CMD_TYPE_DO_NEXT_JOB:
+            {
+                LOGH("Do next job, active is %d", is_active);
+
+                qcamera_jpeg_evt_payload_t *job_data = (qcamera_jpeg_evt_payload_t *) pme->m_inputSaveQ.dequeue();
+                if (job_data == NULL) {
+                    LOGE("Invalid jpeg event data");
+                    continue;
+                }
+                //qcamera_jpeg_data_t *jpeg_job =
+                //        (qcamera_jpeg_data_t *)pme->m_ongoingJpegQ.dequeue(false);
+                //uint32_t frame_idx = jpeg_job->src_frame->bufs[0]->frame_idx;
+                uint32_t frame_idx = 75;
+
+                pme->m_ongoingJpegQ.flushNodes(matchJobId, (void*)&job_data->jobId);
+
+                LOGH("[KPI Perf] : jpeg job %d", job_data->jobId);
+
+                if (is_active == TRUE) {
+                    memset(saveName, '\0', sizeof(saveName));
+                    snprintf(saveName,
+                             sizeof(saveName),
+                             QCameraPostProcessor::STORE_LOCATION,
+                             pme->mSaveFrmCnt);
+
+                    int file_fd = open(saveName, O_RDWR | O_CREAT, 0655);
+                    if (file_fd >= 0) {
+                        ssize_t written_len = write(file_fd, job_data->out_data.buf_vaddr,
+                                job_data->out_data.buf_filled_len);
+                        if ((ssize_t)job_data->out_data.buf_filled_len != written_len) {
+                            LOGE("Failed save complete data %d bytes "
+                                  "written instead of %d bytes!",
+                                   written_len,
+                                  job_data->out_data.buf_filled_len);
+                        } else {
+                            LOGH("written number of bytes %d\n",
+                                 written_len);
+                        }
+
+                        close(file_fd);
+                    } else {
+                        LOGE("fail t open file for saving");
+                    }
+                    pme->mSaveFrmCnt++;
+
+                    camera_memory_t* jpeg_mem = pme->m_parent->mGetMemory(-1,
+                                                         strlen(saveName),
+                                                         1,
+                                                         pme->m_parent->mCallbackCookie);
+                    if (NULL == jpeg_mem) {
+                        ret = NO_MEMORY;
+                        LOGE("getMemory for jpeg, ret = NO_MEMORY");
+                        goto end;
+                    }
+                    memcpy(jpeg_mem->data, saveName, strlen(saveName));
+
+                    LOGH("Calling upperlayer callback to store JPEG image");
+                    qcamera_release_data_t release_data;
+                    memset(&release_data, 0, sizeof(qcamera_release_data_t));
+                    release_data.data = jpeg_mem;
+                    release_data.unlinkFile = true;
+                    LOGI("[KPI Perf]: PROFILE_JPEG_CB ");
+                    ret = pme->sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
+                            jpeg_mem,
+                            0,
+                            NULL,
+                            &release_data,
+                            frame_idx);
+                }
+
+end:
+                free(job_data);
+            }
+            break;
+        case CAMERA_CMD_TYPE_EXIT:
+            LOGH("save thread exit");
+            running = 0;
+            break;
+        default:
+            break;
+        }
+    } while (running);
+    LOGH("X");
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : dataProcessRoutine
+ *
+ * DESCRIPTION: data process routine that handles input data either from input
+ *              Jpeg Queue to do jpeg encoding, or from input PP Queue to do
+ *              reprocess.
+ *
+ * PARAMETERS :
+ *   @data    : user data ptr (QCameraPostProcessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void *QCameraPostProcessor::dataProcessRoutine(void *data)
+{
+    int running = 1;
+    int ret;
+    uint8_t is_active = FALSE;
+    QCameraPostProcessor *pme = (QCameraPostProcessor *)data;
+    QCameraCmdThread *cmdThread = &pme->m_dataProcTh;
+    cmdThread->setName("CAM_DataProc");
+
+    LOGH("E");
+    do {
+        do {
+            ret = cam_sem_wait(&cmdThread->cmd_sem);
+            if (ret != 0 && errno != EINVAL) {
+                LOGE("cam_sem_wait error (%s)",
+                        strerror(errno));
+                return NULL;
+            }
+        } while (ret != 0);
+
+        // we got notified about new cmd avail in cmd queue
+        camera_cmd_type_t cmd = cmdThread->getCmd();
+        switch (cmd) {
+        case CAMERA_CMD_TYPE_START_DATA_PROC:
+            LOGH("start data proc");
+            is_active = TRUE;
+
+            pme->m_ongoingPPQ.init();
+            pme->m_inputJpegQ.init();
+            pme->m_inputPPQ.init();
+            pme->m_inputRawQ.init();
+
+            pme->m_saveProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC,
+                                      FALSE,
+                                      FALSE);
+
+            // signal cmd is completed
+            cam_sem_post(&cmdThread->sync_sem);
+
+            break;
+        case CAMERA_CMD_TYPE_STOP_DATA_PROC:
+            {
+                LOGH("stop data proc");
+                is_active = FALSE;
+
+                pme->m_saveProcTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC,
+                                           TRUE,
+                                           TRUE);
+                // cancel all ongoing jpeg jobs
+                qcamera_jpeg_data_t *jpeg_job =
+                    (qcamera_jpeg_data_t *)pme->m_ongoingJpegQ.dequeue();
+                while (jpeg_job != NULL) {
+                    pme->mJpegHandle.abort_job(jpeg_job->jobId);
+
+                    pme->releaseJpegJobData(jpeg_job);
+                    free(jpeg_job);
+
+                    jpeg_job = (qcamera_jpeg_data_t *)pme->m_ongoingJpegQ.dequeue();
+                }
+
+                // destroy jpeg encoding session
+                if ( 0 < pme->mJpegSessionId ) {
+                    pme->mJpegHandle.destroy_session(pme->mJpegSessionId);
+                    pme->mJpegSessionId = 0;
+                }
+
+                // free jpeg out buf and exif obj
+                FREE_JPEG_OUTPUT_BUFFER(pme->m_pJpegOutputMem,
+                    pme->m_JpegOutputMemCount);
+
+                if (pme->m_pJpegExifObj != NULL) {
+                    delete pme->m_pJpegExifObj;
+                    pme->m_pJpegExifObj = NULL;
+                }
+
+                // flush ongoing postproc Queue
+                pme->m_ongoingPPQ.flush();
+
+                // flush input jpeg Queue
+                pme->m_inputJpegQ.flush();
+
+                // flush input Postproc Queue
+                pme->m_inputPPQ.flush();
+
+                // flush input raw Queue
+                pme->m_inputRawQ.flush();
+
+                // signal cmd is completed
+                cam_sem_post(&cmdThread->sync_sem);
+
+                pme->mNewJpegSessionNeeded = true;
+            }
+            break;
+        case CAMERA_CMD_TYPE_DO_NEXT_JOB:
+            {
+                LOGH("Do next job, active is %d", is_active);
+                if (is_active == TRUE) {
+                    qcamera_jpeg_data_t *jpeg_job =
+                        (qcamera_jpeg_data_t *)pme->m_inputJpegQ.dequeue();
+
+                    if (NULL != jpeg_job) {
+                        // To avoid any race conditions,
+                        // sync any stream specific parameters here.
+                        if (pme->m_parent->mParameters.isAdvCamFeaturesEnabled()) {
+                            // Sync stream params, only if advanced features configured
+                            // Reduces the latency for normal snapshot.
+                            pme->syncStreamParams(jpeg_job->src_frame, NULL);
+                        }
+
+                        // add into ongoing jpeg job Q
+                        if (pme->m_ongoingJpegQ.enqueue((void *)jpeg_job)) {
+                            ret = pme->encodeData(jpeg_job,
+                                      pme->mNewJpegSessionNeeded);
+                            if (NO_ERROR != ret) {
+                                // dequeue the last one
+                                pme->m_ongoingJpegQ.dequeue(false);
+                                pme->releaseJpegJobData(jpeg_job);
+                                free(jpeg_job);
+                                jpeg_job = NULL;
+                                pme->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+                            }
+                        } else {
+                            LOGW("m_ongoingJpegQ is not active!!!");
+                            pme->releaseJpegJobData(jpeg_job);
+                            free(jpeg_job);
+                            jpeg_job = NULL;
+                        }
+                    }
+
+
+                    // process raw data if any
+                    mm_camera_super_buf_t *super_buf =
+                        (mm_camera_super_buf_t *)pme->m_inputRawQ.dequeue();
+
+                    if (NULL != super_buf) {
+                        //play shutter sound
+                        pme->m_parent->playShutter();
+                        ret = pme->processRawImageImpl(super_buf);
+                        if (NO_ERROR != ret) {
+                            pme->releaseSuperBuf(super_buf);
+                            free(super_buf);
+                            pme->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+                        }
+                    }
+
+                    ret = pme->doReprocess();
+                    if (NO_ERROR != ret) {
+                        pme->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+                    } else {
+                        ret = pme->stopCapture();
+                    }
+
+                } else {
+                    // not active, simply return buf and do no op
+                    qcamera_jpeg_data_t *jpeg_data =
+                        (qcamera_jpeg_data_t *)pme->m_inputJpegQ.dequeue();
+                    if (NULL != jpeg_data) {
+                        pme->releaseJpegJobData(jpeg_data);
+                        free(jpeg_data);
+                    }
+                    mm_camera_super_buf_t *super_buf =
+                        (mm_camera_super_buf_t *)pme->m_inputRawQ.dequeue();
+                    if (NULL != super_buf) {
+                        pme->releaseSuperBuf(super_buf);
+                        free(super_buf);
+                    }
+
+                    // flush input Postproc Queue
+                    pme->m_inputPPQ.flush();
+                }
+            }
+            break;
+        case CAMERA_CMD_TYPE_EXIT:
+            running = 0;
+            break;
+        default:
+            break;
+        }
+    } while (running);
+    LOGH("X");
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : doReprocess
+ *
+ * DESCRIPTION: Trigger channel reprocessing
+ *
+ * PARAMETERS :None
+ *
+ * RETURN     : int32_t type of status
+ *                    NO_ERROR  -- success
+ *                    none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::doReprocess()
+{
+    int32_t ret = NO_ERROR;
+    QCameraChannel *m_pSrcChannel = NULL;
+    QCameraStream *pMetaStream = NULL;
+    uint8_t meta_buf_index = 0;
+    mm_camera_buf_def_t *meta_buf = NULL;
+    mm_camera_super_buf_t *ppInputFrame = NULL;
+
+    qcamera_pp_data_t *ppreq_job = (qcamera_pp_data_t *)m_inputPPQ.peek();
+    if ((ppreq_job == NULL) || (ppreq_job->src_frame == NULL)) {
+        return ret;
+    }
+
+    if (!validatePostProcess(ppreq_job->src_frame)) {
+        return ret;
+    }
+
+    ppreq_job = (qcamera_pp_data_t *)m_inputPPQ.dequeue();
+    if (ppreq_job == NULL || ppreq_job->src_frame == NULL ||
+            ppreq_job->src_reproc_frame == NULL) {
+        return ret;
+    }
+
+    mm_camera_super_buf_t *src_frame = ppreq_job->src_frame;
+    mm_camera_super_buf_t *src_reproc_frame = ppreq_job->src_reproc_frame;
+    int8_t mCurReprocCount = ppreq_job->reprocCount;
+    int8_t mCurChannelIdx = ppreq_job->ppChannelIndex;
+
+    LOGD("frame = %p src_frame = %p mCurReprocCount = %d mCurChannelIdx = %d",
+            src_frame,src_reproc_frame,mCurReprocCount, mCurChannelIdx);
+
+    if ((m_parent->mParameters.getManualCaptureMode() >=
+            CAM_MANUAL_CAPTURE_TYPE_3)  && (mCurChannelIdx == 0)) {
+        ppInputFrame = src_reproc_frame;
+    } else {
+        ppInputFrame = src_frame;
+    }
+
+    if (mPPChannelCount >= CAM_PP_CHANNEL_MAX) {
+        LOGE("invalid channel count");
+        return UNKNOWN_ERROR;
+    }
+
+    // find meta data stream and index of meta data frame in the superbuf
+    for (int8_t j = 0; j < mPPChannelCount; j++) {
+        /*First search in src buffer for any offline metadata */
+        for (uint32_t i = 0; i < src_frame->num_bufs; i++) {
+            QCameraStream *pStream = mPPChannels[j]->getStreamByHandle(
+                    src_frame->bufs[i]->stream_id);
+            if (pStream != NULL && pStream->isOrignalTypeOf(CAM_STREAM_TYPE_METADATA)) {
+                meta_buf_index = (uint8_t) src_frame->bufs[i]->buf_idx;
+                pMetaStream = pStream;
+                meta_buf = src_frame->bufs[i];
+                break;
+            }
+        }
+
+        if ((pMetaStream != NULL) && (meta_buf != NULL)) {
+            LOGD("Found Offline stream metadata = %d",
+                    (int)meta_buf_index);
+            break;
+        }
+    }
+
+    if ((pMetaStream == NULL) && (meta_buf == NULL)) {
+        for (int8_t j = 0; j < mPPChannelCount; j++) {
+            m_pSrcChannel = mPPChannels[j]->getSrcChannel();
+            if (m_pSrcChannel == NULL)
+                continue;
+            for (uint32_t i = 0; i < src_reproc_frame->num_bufs; i++) {
+                QCameraStream *pStream =
+                        m_pSrcChannel->getStreamByHandle(
+                        src_reproc_frame->bufs[i]->stream_id);
+                if (pStream != NULL && pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+                    meta_buf_index = (uint8_t) src_reproc_frame->bufs[i]->buf_idx;
+                    pMetaStream = pStream;
+                    meta_buf = src_reproc_frame->bufs[i];
+                    break;
+                }
+            }
+            if ((pMetaStream != NULL) && (meta_buf != NULL)) {
+                LOGD("Found Meta data info for reprocessing index = %d",
+                        (int)meta_buf_index);
+                break;
+            }
+        }
+    }
+
+    if (m_parent->mParameters.isAdvCamFeaturesEnabled()) {
+        // No need to sync stream params, if none of the advanced features configured
+        // Reduces the latency for normal snapshot.
+        syncStreamParams(src_frame, src_reproc_frame);
+    }
+    if (mPPChannels[mCurChannelIdx] != NULL) {
+        // add into ongoing PP job Q
+        ppreq_job->reprocCount = (int8_t) (mCurReprocCount + 1);
+
+        if ((m_parent->isRegularCapture()) || (ppreq_job->offline_buffer)) {
+            m_bufCountPPQ++;
+            if (m_ongoingPPQ.enqueue((void *)ppreq_job)) {
+                pthread_mutex_lock(&m_reprocess_lock);
+                ret = mPPChannels[mCurChannelIdx]->doReprocessOffline(ppInputFrame,
+                        meta_buf, m_parent->mParameters);
+                if (ret != NO_ERROR) {
+                    pthread_mutex_unlock(&m_reprocess_lock);
+                    goto end;
+                }
+
+                if ((ppreq_job->offline_buffer) &&
+                        (ppreq_job->offline_reproc_buf)) {
+                    mPPChannels[mCurChannelIdx]->doReprocessOffline(
+                            ppreq_job->offline_reproc_buf, meta_buf);
+                }
+                pthread_mutex_unlock(&m_reprocess_lock);
+            } else {
+                LOGW("m_ongoingPPQ is not active!!!");
+                ret = UNKNOWN_ERROR;
+                goto end;
+            }
+        } else {
+            m_bufCountPPQ++;
+            if (!m_ongoingPPQ.enqueue((void *)ppreq_job)) {
+                LOGW("m_ongoingJpegQ is not active!!!");
+                ret = UNKNOWN_ERROR;
+                goto end;
+            }
+
+            int32_t numRequiredPPQBufsForSingleOutput = (int32_t)
+                    m_parent->mParameters.getNumberInBufsForSingleShot();
+
+            if (m_bufCountPPQ % numRequiredPPQBufsForSingleOutput == 0) {
+                int32_t extra_pp_job_count =
+                        m_parent->mParameters.getNumberOutBufsForSingleShot() -
+                        m_parent->mParameters.getNumberInBufsForSingleShot();
+
+                for (int32_t i = 0; i < extra_pp_job_count; i++) {
+                    qcamera_pp_data_t *extra_pp_job =
+                            (qcamera_pp_data_t *)calloc(1, sizeof(qcamera_pp_data_t));
+                    if (!extra_pp_job) {
+                        LOGE("no mem for qcamera_pp_data_t");
+                        ret = NO_MEMORY;
+                        break;
+                    }
+                    extra_pp_job->reprocCount = ppreq_job->reprocCount;
+                    if (!m_ongoingPPQ.enqueue((void *)extra_pp_job)) {
+                        LOGW("m_ongoingJpegQ is not active!!!");
+                        releaseOngoingPPData(extra_pp_job, this);
+                        free(extra_pp_job);
+                        extra_pp_job = NULL;
+                        goto end;
+                    }
+                }
+            }
+
+            ret = mPPChannels[mCurChannelIdx]->doReprocess(ppInputFrame,
+                    m_parent->mParameters, pMetaStream, meta_buf_index);
+        }
+    } else {
+        LOGE("Reprocess channel is NULL");
+        ret = UNKNOWN_ERROR;
+    }
+
+end:
+    if (ret != NO_ERROR) {
+        releaseOngoingPPData(ppreq_job, this);
+        if (ppreq_job != NULL) {
+            free(ppreq_job);
+            ppreq_job = NULL;
+        }
+    }
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : getReprocChannel
+ *
+ * DESCRIPTION:  Returns reprocessing channel handle
+ *
+ * PARAMETERS : index for reprocessing array
+ *
+ * RETURN     : QCameraReprocessChannel * type of pointer
+                       NULL if no reprocessing channel
+ *==========================================================================*/
+QCameraReprocessChannel * QCameraPostProcessor::getReprocChannel(uint8_t index)
+{
+    if (index >= mPPChannelCount) {
+        LOGE("Invalid index value");
+        return NULL;
+    }
+    return mPPChannels[index];
+}
+
+/*===========================================================================
+ * FUNCTION   : stopCapture
+ *
+ * DESCRIPTION: Trigger image capture stop
+ *
+ * PARAMETERS :
+ * None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::stopCapture()
+{
+     int rc = NO_ERROR;
+
+     if (m_parent->isRegularCapture()) {
+        rc = m_parent->processAPI(
+                        QCAMERA_SM_EVT_STOP_CAPTURE_CHANNEL,
+                        NULL);
+     }
+
+     return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegPaddingReq
+ *
+ * DESCRIPTION: function to add an entry to exif data
+ *
+ * PARAMETERS :
+ *   @padding_info : jpeg specific padding requirement
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::getJpegPaddingReq(cam_padding_info_t &padding_info)
+{
+    // TODO: hardcode for now, needs to query from mm-jpeg-interface
+    padding_info.width_padding  = CAM_PAD_NONE;
+    padding_info.height_padding  = CAM_PAD_TO_16;
+    padding_info.plane_padding  = CAM_PAD_TO_WORD;
+    padding_info.offset_info.offset_x = 0;
+    padding_info.offset_info.offset_y = 0;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setYUVFrameInfo
+ *
+ * DESCRIPTION: set Raw YUV frame data info for up-layer
+ *
+ * PARAMETERS :
+ *   @frame   : process frame received from mm-camera-interface
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : currently we return frame len, y offset, cbcr offset and frame format
+ *==========================================================================*/
+int32_t QCameraPostProcessor::setYUVFrameInfo(mm_camera_super_buf_t *recvd_frame)
+{
+    QCameraChannel *pChannel = m_parent->getChannelByHandle(recvd_frame->ch_id);
+    // check reprocess channel if not found
+    if (pChannel == NULL) {
+        for (int8_t i = 0; i < mPPChannelCount; i++) {
+            if ((mPPChannels[i] != NULL) &&
+                    (mPPChannels[i]->getMyHandle() == recvd_frame->ch_id)) {
+                pChannel = mPPChannels[i];
+                break;
+            }
+        }
+    }
+
+    if (pChannel == NULL) {
+        LOGE("No corresponding channel (ch_id = %d) exist, return here",
+                recvd_frame->ch_id);
+        return BAD_VALUE;
+    }
+
+    // find snapshot frame
+    for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
+        QCameraStream *pStream =
+            pChannel->getStreamByHandle(recvd_frame->bufs[i]->stream_id);
+        if (pStream != NULL) {
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+                pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) {
+                //get the main frame, use stream info
+                cam_frame_len_offset_t frame_offset;
+                cam_dimension_t frame_dim;
+                cam_format_t frame_fmt;
+                const char *fmt_string;
+                pStream->getFrameDimension(frame_dim);
+                pStream->getFrameOffset(frame_offset);
+                pStream->getFormat(frame_fmt);
+                fmt_string = m_parent->mParameters.getFrameFmtString(frame_fmt);
+
+                int cbcr_offset = (int32_t)frame_offset.mp[0].len -
+                        frame_dim.width * frame_dim.height;
+
+                LOGH("frame width=%d, height=%d, yoff=%d, cbcroff=%d, fmt_string=%s",
+                        frame_dim.width, frame_dim.height, frame_offset.mp[0].offset, cbcr_offset, fmt_string);
+                return NO_ERROR;
+            }
+        }
+    }
+
+    return BAD_VALUE;
+}
+
+bool QCameraPostProcessor::matchJobId(void *data, void *, void *match_data)
+{
+  qcamera_jpeg_data_t * job = (qcamera_jpeg_data_t *) data;
+  uint32_t job_id = *((uint32_t *) match_data);
+  return job->jobId == job_id;
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegMemory
+ *
+ * DESCRIPTION: buffer allocation function
+ *   to pass to jpeg interface
+ *
+ * PARAMETERS :
+ *   @out_buf : buffer descriptor struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraPostProcessor::getJpegMemory(omx_jpeg_ouput_buf_t *out_buf)
+{
+    LOGH("Allocating jpeg out buffer of size: %d", out_buf->size);
+    QCameraPostProcessor *procInst = (QCameraPostProcessor *) out_buf->handle;
+    camera_memory_t *cam_mem = procInst->m_parent->mGetMemory(out_buf->fd, out_buf->size, 1U,
+            procInst->m_parent->mCallbackCookie);
+    out_buf->mem_hdl = cam_mem;
+    out_buf->vaddr = cam_mem->data;
+
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseJpegMemory
+ *
+ * DESCRIPTION: release jpeg memory function
+ *   to pass to jpeg interface, in case of abort
+ *
+ * PARAMETERS :
+ *   @out_buf : buffer descriptor struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraPostProcessor::releaseJpegMemory(omx_jpeg_ouput_buf_t *out_buf)
+{
+    if (out_buf && out_buf->mem_hdl) {
+      LOGD("releasing jpeg out buffer of size: %d", out_buf->size);
+      camera_memory_t *cam_mem = (camera_memory_t*)out_buf->mem_hdl;
+      cam_mem->release(cam_mem);
+      out_buf->mem_hdl = NULL;
+      out_buf->vaddr = NULL;
+      return NO_ERROR;
+    }
+    return -1;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraExif
+ *
+ * DESCRIPTION: constructor of QCameraExif
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraExif::QCameraExif()
+    : m_nNumEntries(0)
+{
+    memset(m_Entries, 0, sizeof(m_Entries));
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraExif
+ *
+ * DESCRIPTION: deconstructor of QCameraExif. Will release internal memory ptr.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraExif::~QCameraExif()
+{
+    for (uint32_t i = 0; i < m_nNumEntries; i++) {
+        switch (m_Entries[i].tag_entry.type) {
+        case EXIF_BYTE:
+            {
+                if (m_Entries[i].tag_entry.count > 1 &&
+                    m_Entries[i].tag_entry.data._bytes != NULL) {
+                    free(m_Entries[i].tag_entry.data._bytes);
+                    m_Entries[i].tag_entry.data._bytes = NULL;
+                }
+            }
+            break;
+        case EXIF_ASCII:
+            {
+                if (m_Entries[i].tag_entry.data._ascii != NULL) {
+                    free(m_Entries[i].tag_entry.data._ascii);
+                    m_Entries[i].tag_entry.data._ascii = NULL;
+                }
+            }
+            break;
+        case EXIF_SHORT:
+            {
+                if (m_Entries[i].tag_entry.count > 1 &&
+                    m_Entries[i].tag_entry.data._shorts != NULL) {
+                    free(m_Entries[i].tag_entry.data._shorts);
+                    m_Entries[i].tag_entry.data._shorts = NULL;
+                }
+            }
+            break;
+        case EXIF_LONG:
+            {
+                if (m_Entries[i].tag_entry.count > 1 &&
+                    m_Entries[i].tag_entry.data._longs != NULL) {
+                    free(m_Entries[i].tag_entry.data._longs);
+                    m_Entries[i].tag_entry.data._longs = NULL;
+                }
+            }
+            break;
+        case EXIF_RATIONAL:
+            {
+                if (m_Entries[i].tag_entry.count > 1 &&
+                    m_Entries[i].tag_entry.data._rats != NULL) {
+                    free(m_Entries[i].tag_entry.data._rats);
+                    m_Entries[i].tag_entry.data._rats = NULL;
+                }
+            }
+            break;
+        case EXIF_UNDEFINED:
+            {
+                if (m_Entries[i].tag_entry.data._undefined != NULL) {
+                    free(m_Entries[i].tag_entry.data._undefined);
+                    m_Entries[i].tag_entry.data._undefined = NULL;
+                }
+            }
+            break;
+        case EXIF_SLONG:
+            {
+                if (m_Entries[i].tag_entry.count > 1 &&
+                    m_Entries[i].tag_entry.data._slongs != NULL) {
+                    free(m_Entries[i].tag_entry.data._slongs);
+                    m_Entries[i].tag_entry.data._slongs = NULL;
+                }
+            }
+            break;
+        case EXIF_SRATIONAL:
+            {
+                if (m_Entries[i].tag_entry.count > 1 &&
+                    m_Entries[i].tag_entry.data._srats != NULL) {
+                    free(m_Entries[i].tag_entry.data._srats);
+                    m_Entries[i].tag_entry.data._srats = NULL;
+                }
+            }
+            break;
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : addEntry
+ *
+ * DESCRIPTION: function to add an entry to exif data
+ *
+ * PARAMETERS :
+ *   @tagid   : exif tag ID
+ *   @type    : data type
+ *   @count   : number of data in uint of its type
+ *   @data    : input data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraExif::addEntry(exif_tag_id_t tagid,
+                              exif_tag_type_t type,
+                              uint32_t count,
+                              void *data)
+{
+    int32_t rc = NO_ERROR;
+    if(m_nNumEntries >= MAX_EXIF_TABLE_ENTRIES) {
+        LOGE("Number of entries exceeded limit");
+        return NO_MEMORY;
+    }
+
+    m_Entries[m_nNumEntries].tag_id = tagid;
+    m_Entries[m_nNumEntries].tag_entry.type = type;
+    m_Entries[m_nNumEntries].tag_entry.count = count;
+    m_Entries[m_nNumEntries].tag_entry.copy = 1;
+    switch (type) {
+    case EXIF_BYTE:
+        {
+            if (count > 1) {
+                uint8_t *values = (uint8_t *)malloc(count);
+                if (values == NULL) {
+                    LOGE("No memory for byte array");
+                    rc = NO_MEMORY;
+                } else {
+                    memcpy(values, data, count);
+                    m_Entries[m_nNumEntries].tag_entry.data._bytes = values;
+                }
+            } else {
+                m_Entries[m_nNumEntries].tag_entry.data._byte = *(uint8_t *)data;
+            }
+        }
+        break;
+    case EXIF_ASCII:
+        {
+            char *str = NULL;
+            str = (char *)malloc(count + 1);
+            if (str == NULL) {
+                LOGE("No memory for ascii string");
+                rc = NO_MEMORY;
+            } else {
+                memset(str, 0, count + 1);
+                memcpy(str, data, count);
+                m_Entries[m_nNumEntries].tag_entry.data._ascii = str;
+            }
+        }
+        break;
+    case EXIF_SHORT:
+        {
+            uint16_t *exif_data = (uint16_t *)data;
+            if (count > 1) {
+                uint16_t *values = (uint16_t *)malloc(count * sizeof(uint16_t));
+                if (values == NULL) {
+                    LOGE("No memory for short array");
+                    rc = NO_MEMORY;
+                } else {
+                    memcpy(values, exif_data, count * sizeof(uint16_t));
+                    m_Entries[m_nNumEntries].tag_entry.data._shorts = values;
+                }
+            } else {
+                m_Entries[m_nNumEntries].tag_entry.data._short = *(uint16_t *)data;
+            }
+        }
+        break;
+    case EXIF_LONG:
+        {
+            uint32_t *exif_data = (uint32_t *)data;
+            if (count > 1) {
+                uint32_t *values = (uint32_t *)malloc(count * sizeof(uint32_t));
+                if (values == NULL) {
+                    LOGE("No memory for long array");
+                    rc = NO_MEMORY;
+                } else {
+                    memcpy(values, exif_data, count * sizeof(uint32_t));
+                    m_Entries[m_nNumEntries].tag_entry.data._longs = values;
+                }
+            } else {
+                m_Entries[m_nNumEntries].tag_entry.data._long = *(uint32_t *)data;
+            }
+        }
+        break;
+    case EXIF_RATIONAL:
+        {
+            rat_t *exif_data = (rat_t *)data;
+            if (count > 1) {
+                rat_t *values = (rat_t *)malloc(count * sizeof(rat_t));
+                if (values == NULL) {
+                    LOGE("No memory for rational array");
+                    rc = NO_MEMORY;
+                } else {
+                    memcpy(values, exif_data, count * sizeof(rat_t));
+                    m_Entries[m_nNumEntries].tag_entry.data._rats = values;
+                }
+            } else {
+                m_Entries[m_nNumEntries].tag_entry.data._rat = *(rat_t *)data;
+            }
+        }
+        break;
+    case EXIF_UNDEFINED:
+        {
+            uint8_t *values = (uint8_t *)malloc(count);
+            if (values == NULL) {
+                LOGE("No memory for undefined array");
+                rc = NO_MEMORY;
+            } else {
+                memcpy(values, data, count);
+                m_Entries[m_nNumEntries].tag_entry.data._undefined = values;
+            }
+        }
+        break;
+    case EXIF_SLONG:
+        {
+            uint32_t *exif_data = (uint32_t *)data;
+            if (count > 1) {
+                int32_t *values = (int32_t *)malloc(count * sizeof(int32_t));
+                if (values == NULL) {
+                    LOGE("No memory for signed long array");
+                    rc = NO_MEMORY;
+                } else {
+                    memcpy(values, exif_data, count * sizeof(int32_t));
+                    m_Entries[m_nNumEntries].tag_entry.data._slongs = values;
+                }
+            } else {
+                m_Entries[m_nNumEntries].tag_entry.data._slong = *(int32_t *)data;
+            }
+        }
+        break;
+    case EXIF_SRATIONAL:
+        {
+            srat_t *exif_data = (srat_t *)data;
+            if (count > 1) {
+                srat_t *values = (srat_t *)malloc(count * sizeof(srat_t));
+                if (values == NULL) {
+                    LOGE("No memory for signed rational array");
+                    rc = NO_MEMORY;
+                } else {
+                    memcpy(values, exif_data, count * sizeof(srat_t));
+                    m_Entries[m_nNumEntries].tag_entry.data._srats = values;
+                }
+            } else {
+                m_Entries[m_nNumEntries].tag_entry.data._srat = *(srat_t *)data;
+            }
+        }
+        break;
+    }
+
+    // Increase number of entries
+    m_nNumEntries++;
+    return rc;
+}
+
+}; // namespace qcamera
diff --git a/msmcobalt/QCamera2/HAL/QCameraPostProc.h b/msmcobalt/QCamera2/HAL/QCameraPostProc.h
new file mode 100644
index 0000000..5c56214
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/QCameraPostProc.h
@@ -0,0 +1,250 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_POSTPROC_H__
+#define __QCAMERA_POSTPROC_H__
+
+// Camera dependencies
+#include "QCamera2HWI.h"
+
+extern "C" {
+#include "mm_camera_interface.h"
+#include "mm_jpeg_interface.h"
+}
+
+#define MAX_JPEG_BURST 2
+#define CAM_PP_CHANNEL_MAX 8
+
+namespace qcamera {
+
+class QCameraExif;
+class QCamera2HardwareInterface;
+
+typedef struct {
+    uint32_t jobId;                  // job ID
+    uint32_t client_hdl;             // handle of jpeg client (obtained when open jpeg)
+    mm_camera_super_buf_t *src_frame;// source frame (need to be returned back to kernel
+                                     //after done)
+    mm_camera_super_buf_t *src_reproc_frame; // original source
+                                             //frame for reproc if not NULL
+    metadata_buffer_t *metadata;     // source frame metadata
+    bool reproc_frame_release;       // false release original buffer, true don't release it
+    mm_camera_buf_def_t *src_reproc_bufs;
+    QCameraExif *pJpegExifObj;
+    uint8_t offline_buffer;
+    mm_camera_buf_def_t *offline_reproc_buf; //HAL processed buffer
+} qcamera_jpeg_data_t;
+
+
+typedef struct {
+    int8_t reprocCount;
+    mm_camera_super_buf_t *src_frame;    // source frame that needs post process
+    mm_camera_super_buf_t *src_reproc_frame;// source frame (need to be
+                                            //returned back to kernel after done)
+}qcamera_pp_request_t;
+
+typedef struct {
+    uint32_t jobId;                  // job ID
+    int8_t reprocCount;              //Current pass count
+    int8_t ppChannelIndex;           //Reprocess channel object index
+    mm_camera_super_buf_t *src_frame;// source frame
+    bool reproc_frame_release;       // false release original buffer
+                                     // true don't release it
+    mm_camera_buf_def_t *src_reproc_bufs;
+    mm_camera_super_buf_t *src_reproc_frame;// source frame (need to be
+                                            //returned back to kernel after done)
+    uint8_t offline_buffer;
+    mm_camera_buf_def_t *offline_reproc_buf; //HAL processed buffer
+} qcamera_pp_data_t;
+
+typedef struct {
+    uint32_t jobId;                  // job ID (obtained when start_jpeg_job)
+    jpeg_job_status_t status;        // jpeg encoding status
+    mm_jpeg_output_t out_data;         // ptr to jpeg output buf
+} qcamera_jpeg_evt_payload_t;
+
+typedef struct {
+    camera_memory_t *        data;     // ptr to data memory struct
+    mm_camera_super_buf_t *  frame;    // ptr to frame
+    QCameraMemory *          streamBufs; //ptr to stream buffers
+    bool                     unlinkFile; // unlink any stored buffers on error
+} qcamera_release_data_t;
+
+typedef struct {
+    int32_t                  msg_type; // msg type of data notify
+    camera_memory_t *        data;     // ptr to data memory struct
+    unsigned int             index;    // index of the buf in the whole buffer
+    camera_frame_metadata_t *metadata; // ptr to meta data
+    qcamera_release_data_t   release_data; // any data needs to be release after notify
+} qcamera_data_argm_t;
+
+#define MAX_EXIF_TABLE_ENTRIES 17
+class QCameraExif
+{
+public:
+    QCameraExif();
+    virtual ~QCameraExif();
+
+    int32_t addEntry(exif_tag_id_t tagid,
+                     exif_tag_type_t type,
+                     uint32_t count,
+                     void *data);
+    uint32_t getNumOfEntries() {return m_nNumEntries;};
+    QEXIF_INFO_DATA *getEntries() {return m_Entries;};
+
+private:
+    QEXIF_INFO_DATA m_Entries[MAX_EXIF_TABLE_ENTRIES];  // exif tags for JPEG encoder
+    uint32_t  m_nNumEntries;                            // number of valid entries
+};
+
+class QCameraPostProcessor
+{
+public:
+    QCameraPostProcessor(QCamera2HardwareInterface *cam_ctrl);
+    virtual ~QCameraPostProcessor();
+
+    int32_t init(jpeg_encode_callback_t jpeg_cb, void *user_data);
+    int32_t deinit();
+    int32_t start(QCameraChannel *pSrcChannel);
+    int32_t stop();
+    bool validatePostProcess(mm_camera_super_buf_t *frame);
+    int32_t processData(mm_camera_super_buf_t *frame);
+    int32_t processRawData(mm_camera_super_buf_t *frame);
+    int32_t processPPData(mm_camera_super_buf_t *frame);
+    int32_t processJpegEvt(qcamera_jpeg_evt_payload_t *evt);
+    int32_t getJpegPaddingReq(cam_padding_info_t &padding_info);
+    QCameraReprocessChannel * getReprocChannel(uint8_t index);
+    inline bool getJpegMemOpt() {return mJpegMemOpt;}
+    inline void setJpegMemOpt(bool val) {mJpegMemOpt = val;}
+    int32_t setJpegHandle(mm_jpeg_ops_t *pJpegHandle,
+            mm_jpeg_mpo_ops_t* pJpegMpoHandle, uint32_t clientHandle);
+    int32_t createJpegSession(QCameraChannel *pSrcChannel);
+
+    int8_t getPPChannelCount() {return mPPChannelCount;};
+    mm_camera_buf_def_t *getOfflinePPInputBuffer(
+            mm_camera_super_buf_t *src_frame);
+    QCameraMemory *mOfflineDataBufs;
+
+private:
+    int32_t sendDataNotify(int32_t msg_type,
+            camera_memory_t *data,
+            uint8_t index,
+            camera_frame_metadata_t *metadata,
+            qcamera_release_data_t *release_data,
+            uint32_t super_buf_frame_idx = 0);
+    int32_t sendEvtNotify(int32_t msg_type, int32_t ext1, int32_t ext2);
+    qcamera_jpeg_data_t *findJpegJobByJobId(uint32_t jobId);
+    mm_jpeg_color_format getColorfmtFromImgFmt(cam_format_t img_fmt);
+    mm_jpeg_format_t getJpegImgTypeFromImgFmt(cam_format_t img_fmt);
+    int32_t getJpegEncodingConfig(mm_jpeg_encode_params_t& encode_parm,
+                                  QCameraStream *main_stream,
+                                  QCameraStream *thumb_stream);
+    int32_t encodeData(qcamera_jpeg_data_t *jpeg_job_data,
+                       uint8_t &needNewSess);
+    int32_t queryStreams(QCameraStream **main,
+            QCameraStream **thumb,
+            QCameraStream **reproc,
+            mm_camera_buf_def_t **main_image,
+            mm_camera_buf_def_t **thumb_image,
+            mm_camera_super_buf_t *main_frame,
+            mm_camera_super_buf_t *reproc_frame);
+    int32_t syncStreamParams(mm_camera_super_buf_t *frame,
+            mm_camera_super_buf_t *reproc_frame);
+    void releaseSuperBuf(mm_camera_super_buf_t *super_buf);
+    void releaseSuperBuf(mm_camera_super_buf_t *super_buf,
+            cam_stream_type_t stream_type);
+    static void releaseNotifyData(void *user_data,
+                                  void *cookie,
+                                  int32_t cb_status);
+    void releaseJpegJobData(qcamera_jpeg_data_t *job);
+    static void releaseSaveJobData(void *data, void *user_data);
+    static void releaseRawData(void *data, void *user_data);
+    int32_t processRawImageImpl(mm_camera_super_buf_t *recvd_frame);
+
+    static void releaseJpegData(void *data, void *user_data);
+    static void releasePPInputData(void *data, void *user_data);
+    static void releaseOngoingPPData(void *data, void *user_data);
+
+    static void *dataProcessRoutine(void *data);
+    static void *dataSaveRoutine(void *data);
+
+    int32_t setYUVFrameInfo(mm_camera_super_buf_t *recvd_frame);
+    static bool matchJobId(void *data, void *user_data, void *match_data);
+    static int getJpegMemory(omx_jpeg_ouput_buf_t *out_buf);
+    static int releaseJpegMemory(omx_jpeg_ouput_buf_t *out_buf);
+
+    int32_t doReprocess();
+    int32_t stopCapture();
+private:
+    QCamera2HardwareInterface *m_parent;
+    jpeg_encode_callback_t     mJpegCB;
+    void *                     mJpegUserData;
+    mm_jpeg_ops_t              mJpegHandle;
+    mm_jpeg_mpo_ops_t          mJpegMpoHandle; // handle for mpo composition for dualcam
+    uint32_t                   mJpegClientHandle;
+    uint32_t                   mJpegSessionId;
+
+    void *                     m_pJpegOutputMem[MM_JPEG_MAX_BUF];
+    QCameraExif *              m_pJpegExifObj;
+    uint32_t                   m_bThumbnailNeeded;
+
+    int8_t                     mPPChannelCount;
+    QCameraReprocessChannel    *mPPChannels[CAM_PP_CHANNEL_MAX];
+
+    camera_memory_t *          m_DataMem; // save frame mem pointer
+
+    int8_t                     m_bInited; // if postproc is inited
+
+    QCameraQueue m_inputPPQ;            // input queue for postproc
+    QCameraQueue m_ongoingPPQ;          // ongoing postproc queue
+    QCameraQueue m_inputJpegQ;          // input jpeg job queue
+    QCameraQueue m_ongoingJpegQ;        // ongoing jpeg job queue
+    QCameraQueue m_inputRawQ;           // input raw job queue
+    QCameraQueue m_inputSaveQ;          // input save job queue
+    QCameraCmdThread m_dataProcTh;      // thread for data processing
+    QCameraCmdThread m_saveProcTh;      // thread for storing buffers
+    uint32_t mSaveFrmCnt;               // save frame counter
+    static const char *STORE_LOCATION;  // path for storing buffers
+    bool mUseSaveProc;                  // use store thread
+    bool mUseJpegBurst;                 // use jpeg burst encoding mode
+    bool mJpegMemOpt;
+    uint32_t   m_JpegOutputMemCount;
+    uint8_t mNewJpegSessionNeeded;
+    int32_t m_bufCountPPQ;
+    Vector<mm_camera_buf_def_t *> m_InputMetadata; // store input metadata buffers for AOST cases
+    size_t m_PPindex;                   // counter for each incoming AOST buffer
+    pthread_mutex_t m_reprocess_lock;   // lock to ensure reprocess job is not freed early.
+
+public:
+    cam_dimension_t m_dst_dim;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_POSTPROC_H__ */
diff --git a/msmcobalt/QCamera2/HAL/QCameraStateMachine.cpp b/msmcobalt/QCamera2/HAL/QCameraStateMachine.cpp
new file mode 100644
index 0000000..f9e85b8
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/QCameraStateMachine.cpp
@@ -0,0 +1,3867 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCameraStateMachine"
+
+// System dependencies
+#include <utils/Errors.h>
+#include <stdio.h>
+
+// Camera dependencies
+#include "QCamera2HWI.h"
+#include "QCameraStateMachine.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : smEvtProcRoutine
+ *
+ * DESCRIPTION: Statemachine process thread routine to handle events
+ *              in different state.
+ *
+ * PARAMETERS :
+ *   @data    : ptr to QCameraStateMachine object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void *QCameraStateMachine::smEvtProcRoutine(void *data)
+{
+    int running = 1, ret;
+    QCameraStateMachine *pme = (QCameraStateMachine *)data;
+
+    LOGH("E");
+    do {
+        do {
+            ret = cam_sem_wait(&pme->cmd_sem);
+            if (ret != 0 && errno != EINVAL) {
+                LOGE("cam_sem_wait error (%s)",
+                            strerror(errno));
+                return NULL;
+            }
+        } while (ret != 0);
+
+        // we got notified about new cmd avail in cmd queue
+        // first check API cmd queue
+        qcamera_sm_cmd_t *node = (qcamera_sm_cmd_t *)pme->api_queue.dequeue();
+        if (node == NULL) {
+            // no API cmd, then check evt cmd queue
+            node = (qcamera_sm_cmd_t *)pme->evt_queue.dequeue();
+        }
+        if (node != NULL) {
+            switch (node->cmd) {
+            case QCAMERA_SM_CMD_TYPE_API:
+                pme->stateMachine(node->evt, node->evt_payload);
+                // API is in a way sync call, so evt_payload is managed by HWI
+                // no need to free payload for API
+                break;
+            case QCAMERA_SM_CMD_TYPE_EVT:
+                pme->stateMachine(node->evt, node->evt_payload);
+
+                // EVT is async call, so payload need to be free after use
+                free(node->evt_payload);
+                node->evt_payload = NULL;
+                break;
+            case QCAMERA_SM_CMD_TYPE_EXIT:
+                running = 0;
+                break;
+            default:
+                break;
+            }
+            free(node);
+            node = NULL;
+        }
+    } while (running);
+    LOGH("X");
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraStateMachine
+ *
+ * DESCRIPTION: constructor of QCameraStateMachine. Will start process thread
+ *
+ * PARAMETERS :
+ *   @ctrl    : ptr to HWI object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraStateMachine::QCameraStateMachine(QCamera2HardwareInterface *ctrl) :
+    api_queue(),
+    evt_queue()
+{
+    m_parent = ctrl;
+    m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+    cmd_pid = 0;
+    cam_sem_init(&cmd_sem, 0);
+    pthread_create(&cmd_pid,
+                   NULL,
+                   smEvtProcRoutine,
+                   this);
+    pthread_setname_np(cmd_pid, "CAM_stMachine");
+    m_bDelayPreviewMsgs = false;
+    m_DelayedMsgs = 0;
+    m_RestoreZSL = TRUE;
+    m_bPreviewCallbackNeeded = TRUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraStateMachine
+ *
+ * DESCRIPTION: desctructor of QCameraStateMachine. Will stop process thread.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraStateMachine::~QCameraStateMachine()
+{
+    cam_sem_destroy(&cmd_sem);
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseThread
+ *
+ * DESCRIPTION: Sends an exit command and terminates the state machine thread
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraStateMachine::releaseThread()
+{
+    if (cmd_pid != 0) {
+        qcamera_sm_cmd_t *node =
+            (qcamera_sm_cmd_t *)malloc(sizeof(qcamera_sm_cmd_t));
+        if (NULL != node) {
+            memset(node, 0, sizeof(qcamera_sm_cmd_t));
+            node->cmd = QCAMERA_SM_CMD_TYPE_EXIT;
+
+            if (api_queue.enqueue((void *)node)) {
+                cam_sem_post(&cmd_sem);
+            } else {
+                free(node);
+                node = NULL;
+            }
+
+            /* wait until cmd thread exits */
+            if (pthread_join(cmd_pid, NULL) != 0) {
+                LOGW("pthread dead already\n");
+            }
+        }
+        cmd_pid = 0;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : applyDelayedMsgs
+ *
+ * DESCRIPTION: Enable if needed any delayed message types
+ *
+ * PARAMETERS : None
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::applyDelayedMsgs()
+{
+    int32_t rc = NO_ERROR;
+
+    if (m_bDelayPreviewMsgs && m_DelayedMsgs) {
+        rc = m_parent->enableMsgType(m_DelayedMsgs);
+        m_bDelayPreviewMsgs = false;
+        m_DelayedMsgs = 0;
+    } else if (m_bDelayPreviewMsgs) {
+        m_bDelayPreviewMsgs = false;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procAPI
+ *
+ * DESCRIPTION: process incoming API request from framework layer.
+ *
+ * PARAMETERS :
+ *   @evt          : event to be processed
+ *   @api_payload  : API payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procAPI(qcamera_sm_evt_enum_t evt,
+                                     void *api_payload)
+{
+    qcamera_sm_cmd_t *node =
+        (qcamera_sm_cmd_t *)malloc(sizeof(qcamera_sm_cmd_t));
+    if (NULL == node) {
+        LOGE("No memory for qcamera_sm_cmd_t");
+        return NO_MEMORY;
+    }
+
+    memset(node, 0, sizeof(qcamera_sm_cmd_t));
+    node->cmd = QCAMERA_SM_CMD_TYPE_API;
+    node->evt = evt;
+    node->evt_payload = api_payload;
+    if (api_queue.enqueue((void *)node)) {
+        cam_sem_post(&cmd_sem);
+        return NO_ERROR;
+    } else {
+        LOGE("API enqueue failed API = %d", evt);
+        free(node);
+        return UNKNOWN_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvt
+ *
+ * DESCRIPTION: process incoming envent from mm-camera-interface and
+ *              mm-jpeg-interface.
+ *
+ * PARAMETERS :
+ *   @evt          : event to be processed
+ *   @evt_payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvt(qcamera_sm_evt_enum_t evt,
+                                     void *evt_payload)
+{
+    qcamera_sm_cmd_t *node =
+        (qcamera_sm_cmd_t *)malloc(sizeof(qcamera_sm_cmd_t));
+    if (NULL == node) {
+        LOGE("No memory for qcamera_sm_cmd_t");
+        return NO_MEMORY;
+    }
+
+    memset(node, 0, sizeof(qcamera_sm_cmd_t));
+    node->cmd = QCAMERA_SM_CMD_TYPE_EVT;
+    node->evt = evt;
+    node->evt_payload = evt_payload;
+    if (evt_queue.enqueue((void *)node)) {
+        cam_sem_post(&cmd_sem);
+        return NO_ERROR;
+    } else {
+        LOGE("EVENT enqueue failed Event = %d", evt);
+        free(node);
+        return UNKNOWN_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : stateMachine
+ *
+ * DESCRIPTION: finite state machine entry function. Depends on state,
+ *              incoming event will be handled differently.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::stateMachine(qcamera_sm_evt_enum_t evt, void *payload)
+{
+    int32_t rc = NO_ERROR;
+    LOGL("m_state %d, event (%d)", m_state, evt);
+    switch (m_state) {
+    case QCAMERA_SM_STATE_PREVIEW_STOPPED:
+        rc = procEvtPreviewStoppedState(evt, payload);
+        break;
+    case QCAMERA_SM_STATE_PREVIEW_READY:
+        rc = procEvtPreviewReadyState(evt, payload);
+        break;
+    case QCAMERA_SM_STATE_PREVIEWING:
+        rc = procEvtPreviewingState(evt, payload);
+        break;
+    case QCAMERA_SM_STATE_PREPARE_SNAPSHOT:
+        rc = procEvtPrepareSnapshotState(evt, payload);
+        break;
+    case QCAMERA_SM_STATE_PIC_TAKING:
+        rc = procEvtPicTakingState(evt, payload);
+        break;
+    case QCAMERA_SM_STATE_RECORDING:
+        rc = procEvtRecordingState(evt, payload);
+        break;
+    case QCAMERA_SM_STATE_VIDEO_PIC_TAKING:
+        rc = procEvtVideoPicTakingState(evt, payload);
+        break;
+    case QCAMERA_SM_STATE_PREVIEW_PIC_TAKING:
+        rc = procEvtPreviewPicTakingState(evt, payload);
+        break;
+    default:
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvtPreviewStoppedState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ *              QCAMERA_SM_STATE_PREVIEW_STOPPED.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtPreviewStoppedState(qcamera_sm_evt_enum_t evt,
+                                                        void *payload)
+{
+    int32_t rc = NO_ERROR;
+    qcamera_api_result_t result;
+    memset(&result, 0, sizeof(qcamera_api_result_t));
+
+    LOGL("event (%d)", evt);
+    switch (evt) {
+    case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+        {
+            rc = m_parent->setPreviewWindow((struct preview_stream_ops *)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_CALLBACKS:
+        {
+            qcamera_sm_evt_setcb_payload_t *setcbs =
+                (qcamera_sm_evt_setcb_payload_t *)payload;
+            rc = m_parent->setCallBacks(setcbs->notify_cb,
+                                        setcbs->data_cb,
+                                        setcbs->data_cb_timestamp,
+                                        setcbs->get_memory,
+                                        setcbs->user);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+        {
+            rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
+            if (NO_ERROR != rc) {
+                LOGE("Param init deferred work failed");
+            } else {
+                rc = m_parent->enableMsgType(*((int32_t *)payload));
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+        {
+            rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
+            if (NO_ERROR != rc) {
+                LOGE("Param init deferred work failed");
+            } else {
+                rc = m_parent->disableMsgType(*((int32_t *)payload));
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+        {
+            int enabled = m_parent->msgTypeEnabled(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = enabled;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS:
+        {
+            bool needRestart = false;
+
+            rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
+            if (NO_ERROR != rc) {
+                LOGE("Param init deferred work failed");
+            } else {
+                rc = m_parent->updateParameters((char*)payload, needRestart);
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS_STOP:
+        {
+            m_parent->m_memoryPool.clear();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS_COMMIT:
+        {
+            rc = m_parent->commitParameterChanges();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS_RESTART:
+        {
+            m_parent->setNeedRestart(false);
+            result.status            = rc;
+            result.request_api       = evt;
+            result.result_type       = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_GET_PARAMS:
+        {
+            rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
+            if (NO_ERROR != rc) {
+                LOGE("Param init deferred work failed");
+                char* nullParams = (char *)malloc(1);
+                if (nullParams) {
+                    memset(nullParams, 0, 1);
+                }
+                result.params = nullParams;
+            } else {
+                result.params = m_parent->getParameters();
+            }
+            rc = result.params ? NO_ERROR : UNKNOWN_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PUT_PARAMS:
+        {
+            rc = m_parent->putParameters((char*)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREPARE_PREVIEW:
+        {
+            rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
+            if (NO_ERROR != rc) {
+                LOGE("Param init deferred work failed");
+            } else {
+                rc = m_parent->preparePreview();
+            }
+            if (rc == NO_ERROR) {
+                //prepare preview success, move to ready state
+                m_state = QCAMERA_SM_STATE_PREVIEW_READY;
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_PREVIEW:
+        {
+            rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
+            if (NO_ERROR != rc) {
+                LOGE("Param init deferred work failed");
+            } else if (m_parent->mPreviewWindow == NULL) {
+                rc = m_parent->preparePreview();
+                if(rc == NO_ERROR) {
+                    // preview window is not set yet, move to previewReady state
+                    m_state = QCAMERA_SM_STATE_PREVIEW_READY;
+                } else {
+                    LOGE("preparePreview failed");
+                }
+            } else {
+                rc = m_parent->preparePreview();
+                if (rc == NO_ERROR) {
+                    applyDelayedMsgs();
+                    rc = m_parent->startPreview();
+                    if (rc != NO_ERROR) {
+                        m_parent->unpreparePreview();
+                    } else {
+                        // start preview success, move to previewing state
+                        m_state = QCAMERA_SM_STATE_PREVIEWING;
+                    }
+                }
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+        {
+            rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
+            if (NO_ERROR != rc) {
+                LOGE("Param init deferred work failed");
+            } else {
+                rc = m_parent->preparePreview();
+            }
+            if (rc == NO_ERROR) {
+                applyDelayedMsgs();
+                rc = m_parent->startPreview();
+                if (rc != NO_ERROR) {
+                    m_parent->unpreparePreview();
+                } else {
+                    m_state = QCAMERA_SM_STATE_PREVIEWING;
+                }
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+    break;
+    case QCAMERA_SM_EVT_STOP_PREVIEW:
+        {
+            // no op needed here
+            LOGW("already in preview stopped state, do nothing");
+            result.status = NO_ERROR;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+    case QCAMERA_SM_EVT_RECORDING_ENABLED:
+        {
+            result.status = NO_ERROR;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 0;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RELEASE:
+        {
+            rc = m_parent->release();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+        {
+            rc = m_parent->storeMetaDataInBuffers(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DUMP:
+        {
+            rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
+            if (NO_ERROR != rc) {
+                LOGE("Param init deferred work failed");
+            } else {
+                rc = m_parent->dump(*((int *)payload));
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SEND_COMMAND:
+        {
+            rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
+            if (NO_ERROR != rc) {
+                LOGE("Param init deferred work failed");
+            } else {
+                qcamera_sm_evt_command_payload_t *cmd_payload =
+                        (qcamera_sm_evt_command_payload_t *)payload;
+                rc = m_parent->sendCommand(cmd_payload->cmd,
+                        cmd_payload->arg1,
+                        cmd_payload->arg2);
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PRE_START_RECORDING:
+    case QCAMERA_SM_EVT_RESTART_STOP_PREVIEW:
+    case QCAMERA_SM_EVT_RESTART_START_PREVIEW:
+    case QCAMERA_SM_EVT_START_RECORDING:
+    case QCAMERA_SM_EVT_STOP_RECORDING:
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+    case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+    case QCAMERA_SM_EVT_PRE_TAKE_PICTURE:
+    case QCAMERA_SM_EVT_TAKE_PICTURE:
+        {
+            LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+    case QCAMERA_SM_EVT_CANCEL_PICTURE:
+        {
+            // no op needed here
+            LOGW("No ops for evt(%d) in state(%d)", evt, m_state);
+            result.status = NO_ERROR;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+        {
+            rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
+            if (NO_ERROR != rc) {
+                LOGE("Param init deferred work failed");
+            } else {
+                rc = m_parent->cancelAutoFocus();
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+        {
+            int32_t faceID = 0;
+            qcamera_sm_evt_reg_face_payload_t *reg_payload =
+                (qcamera_sm_evt_reg_face_payload_t *)payload;
+            rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+                                             reg_payload->config,
+                                             faceID);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+            result.handle = faceID;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+        {
+            rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
+            if (NO_ERROR != rc) {
+                LOGE("Param init deferred work failed");
+            } else {
+                rc = m_parent->updateThermalLevel(payload);
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_NOTIFY:
+        {
+            mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+            switch (cam_evt->server_event_type) {
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                {
+                    m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_SERVER_DIED,
+                                            0);
+                }
+                break;
+            default:
+                LOGE("Invalid internal event %d in state(%d)",
+                             cam_evt->server_event_type, m_state);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+        {
+            // No ops, but need to notify
+            LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalEvtResult(&result);
+        }
+       break;
+    case QCAMERA_SM_EVT_EVT_INTERNAL:
+       {
+           qcamera_sm_internal_evt_payload_t *internal_evt =
+               (qcamera_sm_internal_evt_payload_t *)payload;
+           switch (internal_evt->evt_type) {
+           case QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE:
+               rc = m_parent->waitDeferredWork(m_parent->mParamInitJob);
+               if (NO_ERROR != rc) {
+                   LOGE("Param init deferred work failed");
+               } else {
+                   rc = m_parent->mParameters.updateFlashMode(internal_evt->led_data);
+               }
+               break;
+           default:
+               LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+               break;
+           }
+       }
+       break;
+    case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+    default:
+        LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvtPreviewReadyState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ *              QCAMERA_SM_STATE_PREVIEW_READY.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtPreviewReadyState(qcamera_sm_evt_enum_t evt,
+                                                      void *payload)
+{
+    int32_t rc = NO_ERROR;
+    qcamera_api_result_t result;
+    memset(&result, 0, sizeof(qcamera_api_result_t));
+
+    LOGL("event (%d)", evt);
+    switch (evt) {
+    case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+        {
+            m_parent->setPreviewWindow((struct preview_stream_ops *)payload);
+            if (m_parent->mPreviewWindow != NULL) {
+                applyDelayedMsgs();
+                rc = m_parent->startPreview();
+                if (rc != NO_ERROR) {
+                    m_parent->unpreparePreview();
+                    m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+                } else {
+                    m_state = QCAMERA_SM_STATE_PREVIEWING;
+                }
+            }
+
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_CALLBACKS:
+        {
+            qcamera_sm_evt_setcb_payload_t *setcbs =
+                (qcamera_sm_evt_setcb_payload_t *)payload;
+            rc = m_parent->setCallBacks(setcbs->notify_cb,
+                                        setcbs->data_cb,
+                                        setcbs->data_cb_timestamp,
+                                        setcbs->get_memory,
+                                        setcbs->user);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+        {
+            rc = m_parent->enableMsgType(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+        {
+            rc = m_parent->disableMsgType(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+        {
+            int enabled = m_parent->msgTypeEnabled(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = enabled;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS:
+        {
+            bool needRestart = false;
+            rc = m_parent->updateParameters((char*)payload, needRestart);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS_STOP:
+        {
+            LOGD("Stopping preview...");
+            // need restart preview for parameters to take effect
+            m_parent->unpreparePreview();
+            // Clear memory pools
+            m_parent->m_memoryPool.clear();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS_COMMIT:
+        {
+            rc = m_parent->commitParameterChanges();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS_RESTART:
+        {
+            // prepare preview again
+            rc = m_parent->preparePreview();
+            if (rc != NO_ERROR) {
+                m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+            }
+            m_parent->setNeedRestart(false);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_GET_PARAMS:
+        {
+            result.params = m_parent->getParameters();
+            rc = result.params ? NO_ERROR : UNKNOWN_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PUT_PARAMS:
+        {
+            rc = m_parent->putParameters((char*)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREPARE_PREVIEW:
+        {
+            // no ops here
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+        {
+            rc = m_parent->startPreview();
+            if (rc != NO_ERROR) {
+                m_parent->unpreparePreview();
+                m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+            } else {
+                m_state = QCAMERA_SM_STATE_PREVIEWING;
+            }
+            // no ops here
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_PREVIEW:
+        {
+            if (m_parent->mPreviewWindow != NULL) {
+                rc = m_parent->startPreview();
+                if (rc != NO_ERROR) {
+                    m_parent->unpreparePreview();
+                    m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+                } else {
+                    m_state = QCAMERA_SM_STATE_PREVIEWING;
+                }
+            }
+            // no ops here
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_PREVIEW:
+        {
+            m_parent->unpreparePreview();
+            rc = 0;
+            m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 1;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RECORDING_ENABLED:
+        {
+            rc = 0;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 0;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+        {
+            rc = m_parent->storeMetaDataInBuffers(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DUMP:
+        {
+            rc = m_parent->dump(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+        {
+            rc = m_parent->autoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+        {
+            rc = m_parent->cancelAutoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SEND_COMMAND:
+        {
+            qcamera_sm_evt_command_payload_t *cmd_payload =
+                (qcamera_sm_evt_command_payload_t *)payload;
+            rc = m_parent->sendCommand(cmd_payload->cmd,
+                                       cmd_payload->arg1,
+                                       cmd_payload->arg2);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+        {
+            int32_t faceID = 0;
+            qcamera_sm_evt_reg_face_payload_t *reg_payload =
+                (qcamera_sm_evt_reg_face_payload_t *)payload;
+            rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+                                             reg_payload->config,
+                                             faceID);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+            result.handle = faceID;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PRE_START_RECORDING:
+    case QCAMERA_SM_EVT_RESTART_STOP_PREVIEW:
+    case QCAMERA_SM_EVT_RESTART_START_PREVIEW:
+    case QCAMERA_SM_EVT_START_RECORDING:
+    case QCAMERA_SM_EVT_STOP_RECORDING:
+    case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+    case QCAMERA_SM_EVT_PRE_TAKE_PICTURE:
+    case QCAMERA_SM_EVT_TAKE_PICTURE:
+    case QCAMERA_SM_EVT_CANCEL_PICTURE:
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+    case QCAMERA_SM_EVT_RELEASE:
+        {
+            LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_NOTIFY:
+        {
+            mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+            switch (cam_evt->server_event_type) {
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                {
+                    m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_SERVER_DIED,
+                                            0);
+                }
+                break;
+            default:
+                LOGE("Invalid internal event %d in state(%d)",
+                             cam_evt->server_event_type, m_state);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+        {
+            // No ops, but need to notify
+            LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalEvtResult(&result);
+        }
+       break;
+    case QCAMERA_SM_EVT_EVT_INTERNAL:
+       {
+           qcamera_sm_internal_evt_payload_t *internal_evt =
+                   (qcamera_sm_internal_evt_payload_t *)payload;
+           switch (internal_evt->evt_type) {
+           case QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE:
+               rc = m_parent->mParameters.updateFlashMode(internal_evt->led_data);
+               break;
+           default:
+               LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+               break;
+           }
+       }
+       break;
+    case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+    case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+    default:
+        LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvtPreviewingState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ *              QCAMERA_SM_STATE_PREVIEWING.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtPreviewingState(qcamera_sm_evt_enum_t evt,
+                                                    void *payload)
+{
+    int32_t rc = NO_ERROR;
+    qcamera_api_result_t result;
+    memset(&result, 0, sizeof(qcamera_api_result_t));
+
+    LOGL("event (%d)", evt);
+    switch (evt) {
+    case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+        {
+            // Error setting preview window during previewing
+            LOGE("Error!! cannot set preview window when preview is running");
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_CALLBACKS:
+        {
+            qcamera_sm_evt_setcb_payload_t *setcbs =
+                (qcamera_sm_evt_setcb_payload_t *)payload;
+            rc = m_parent->setCallBacks(setcbs->notify_cb,
+                                        setcbs->data_cb,
+                                        setcbs->data_cb_timestamp,
+                                        setcbs->get_memory,
+                                        setcbs->user);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+        {
+            int32_t enable_msgs = *((int32_t *)payload);
+            if (m_bDelayPreviewMsgs &&
+                    (enable_msgs & CAMERA_MSG_PREVIEW_FRAME)) {
+                enable_msgs &= ~CAMERA_MSG_PREVIEW_FRAME;
+                m_DelayedMsgs = CAMERA_MSG_PREVIEW_FRAME;
+            }
+            rc = m_parent->enableMsgType(enable_msgs);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+        {
+            int32_t disable_msgs = *((int32_t *)payload);
+            if (m_bDelayPreviewMsgs && m_DelayedMsgs) {
+                m_DelayedMsgs &= ~disable_msgs;
+                if (0 == m_DelayedMsgs) {
+                    m_bDelayPreviewMsgs = false;
+                }
+            }
+            rc = m_parent->disableMsgType(disable_msgs);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+        {
+            int32_t msgs = *((int32_t *)payload);
+            int enabled = m_parent->msgTypeEnabled(msgs);
+            if (m_bDelayPreviewMsgs && m_DelayedMsgs) {
+                enabled |= (msgs & m_DelayedMsgs);
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = enabled;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS:
+        {
+            bool needRestart = false;
+            rc = m_parent->updateParameters((char*)payload, needRestart);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS_STOP:
+        {
+            LOGD("Stopping preview...");
+            // stop preview
+            rc = m_parent->stopPreview();
+            // Clear memory pools
+            m_parent->m_memoryPool.clear();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS_COMMIT:
+        {
+            rc = m_parent->commitParameterChanges();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS_RESTART:
+        {
+            // start preview again
+            rc = m_parent->preparePreview();
+            if (rc == NO_ERROR) {
+                applyDelayedMsgs();
+                rc = m_parent->startPreview();
+                if (rc != NO_ERROR) {
+                    m_parent->unpreparePreview();
+                }
+                if (rc != NO_ERROR) {
+                    m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+                }
+            }
+            m_parent->setNeedRestart(false);
+            result.status            = rc;
+            result.request_api       = evt;
+            result.result_type       = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_GET_PARAMS:
+        {
+            result.params = m_parent->getParameters();
+            rc = result.params ? NO_ERROR : UNKNOWN_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PUT_PARAMS:
+        {
+            rc = m_parent->putParameters((char*)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREPARE_PREVIEW:
+        {
+            // no ops here
+            LOGW("Already in preview ready state, no ops here");
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_PREVIEW:
+    case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+        {
+            // no ops here
+            LOGW("Already in previewing, no ops here to start preview");
+            applyDelayedMsgs();
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_PREVIEW:
+        {
+            rc = m_parent->stopPreview();
+            applyDelayedMsgs();
+            m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+        {
+            applyDelayedMsgs();
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 1;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RECORDING_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 0;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+        {
+            rc = m_parent->storeMetaDataInBuffers(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DUMP:
+        {
+            rc = m_parent->dump(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+        {
+            rc = m_parent->autoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+        {
+            rc = m_parent->cancelAutoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PRE_START_RECORDING:
+        {
+            rc = m_parent->preStartRecording();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_RECORDING:
+        {
+            rc = m_parent->startRecording();
+            if (rc == NO_ERROR) {
+                // move state to recording state
+                m_state = QCAMERA_SM_STATE_RECORDING;
+                applyDelayedMsgs();
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+        {
+            rc = m_parent->prepareHardwareForSnapshot(FALSE);
+            if (rc == NO_ERROR) {
+                // Do not signal API result in this case.
+                // Need to wait for snapshot done in metadta.
+                m_state = QCAMERA_SM_STATE_PREPARE_SNAPSHOT;
+                applyDelayedMsgs();
+            } else {
+                // Do not change state in this case.
+                LOGE("prepareHardwareForSnapshot failed %d",
+                     rc);
+
+                result.status = rc;
+                result.request_api = evt;
+                result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+                m_parent->signalAPIResult(&result);
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_PRE_TAKE_PICTURE:
+        {
+            rc = m_parent->preTakePicture();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_TAKE_PICTURE:
+       {
+           LOGL("QCAMERA_SM_EVT_TAKE_PICTURE ");
+           if ( m_parent->mParameters.getRecordingHintValue() == true) {
+                m_parent->stopPreview();
+                m_parent->mParameters.updateRecordingHintValue(FALSE);
+                // start preview again
+                rc = m_parent->preparePreview();
+                if (rc == NO_ERROR) {
+                    rc = m_parent->startPreview();
+                    if (rc != NO_ERROR) {
+                        m_parent->unpreparePreview();
+                    }
+                }
+           }
+           if (m_parent->isZSLMode() || m_parent->isLongshotEnabled()) {
+               bool restartPreview = m_parent->isPreviewRestartEnabled();
+               if ((restartPreview) && (m_parent->mParameters.getManualCaptureMode()
+                       >= CAM_MANUAL_CAPTURE_TYPE_3)) {
+                   /* stop preview and disable ZSL now */
+                   m_parent->stopPreview();
+                   m_parent->mParameters.updateZSLModeValue(FALSE);
+                   m_RestoreZSL = TRUE;
+                   m_bDelayPreviewMsgs = true;
+                   m_state = QCAMERA_SM_STATE_PIC_TAKING;
+               } else {
+                   m_state = QCAMERA_SM_STATE_PREVIEW_PIC_TAKING;
+                   m_bDelayPreviewMsgs = true;
+               }
+
+               rc = m_parent->takePicture();
+               if (rc != NO_ERROR) {
+                   // move state to previewing state
+                   m_parent->unconfigureAdvancedCapture();
+                   m_state = QCAMERA_SM_STATE_PREVIEWING;
+               }
+               if (!(m_parent->isRetroPicture()) || (rc != NO_ERROR)) {
+                   LOGD("signal API result, m_state = %d",
+                          m_state);
+                   result.status = rc;
+                   result.request_api = evt;
+                   result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+                   m_parent->signalAPIResult(&result);
+               }
+           } else {
+               m_state = QCAMERA_SM_STATE_PIC_TAKING;
+               rc = m_parent->takePicture();
+               if (rc != NO_ERROR) {
+                   int32_t temp_rc = NO_ERROR;
+                   // move state to preview stopped state
+                   m_parent->unconfigureAdvancedCapture();
+                   m_parent->stopPreview();
+                   // start preview again
+                   temp_rc = m_parent->preparePreview();
+                   if (temp_rc == NO_ERROR) {
+                       temp_rc = m_parent->startPreview();
+                       if (temp_rc != NO_ERROR) {
+                           m_parent->unpreparePreview();
+                           m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+                       } else {
+                           m_state = QCAMERA_SM_STATE_PREVIEWING;
+                       }
+                   } else {
+                       m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+                   }
+               }
+               result.status = rc;
+               result.request_api = evt;
+               result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+               m_parent->signalAPIResult(&result);
+           }
+        }
+        break;
+    case QCAMERA_SM_EVT_SEND_COMMAND:
+        {
+            qcamera_sm_evt_command_payload_t *cmd_payload =
+                    (qcamera_sm_evt_command_payload_t *)payload;
+            rc = m_parent->sendCommand(cmd_payload->cmd,
+                    cmd_payload->arg1,
+                    cmd_payload->arg2);
+            m_bPreviewNeedsRestart =
+                    (QCAMERA_SM_EVT_RESTART_PERVIEW == cmd_payload->arg1);
+            m_bPreviewDelayedRestart =
+                    (QCAMERA_SM_EVT_DELAYED_RESTART == cmd_payload->arg2);
+
+#ifndef VANILLA_HAL
+            if ((CAMERA_CMD_LONGSHOT_ON == cmd_payload->cmd) &&
+                    (m_bPreviewNeedsRestart)) {
+                m_parent->stopPreview();
+                // Clear memory pools
+                m_parent->m_memoryPool.clear();
+
+                if (!m_bPreviewDelayedRestart) {
+                    // start preview again
+                    rc = m_parent->preparePreview();
+                    if (rc == NO_ERROR) {
+                        applyDelayedMsgs();
+                        rc = m_parent->startPreview();
+                        if (rc != NO_ERROR) {
+                            m_parent->unpreparePreview();
+                        }
+                    }
+                }
+            }
+#endif
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SEND_COMMAND_RESTART:
+        {
+#ifndef VANILLA_HAL
+            qcamera_sm_evt_command_payload_t *cmd_payload =
+                    (qcamera_sm_evt_command_payload_t *)payload;
+            if ((CAMERA_CMD_LONGSHOT_ON == cmd_payload->cmd) &&
+                    (m_bPreviewNeedsRestart) &&
+                    (m_bPreviewDelayedRestart)) {
+                // start preview again
+                rc = m_parent->preparePreview();
+                if (rc == NO_ERROR) {
+                    rc = m_parent->startPreview();
+                    if (rc != NO_ERROR) {
+                        m_parent->unpreparePreview();
+                    }
+                }
+            }
+#endif
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+        {
+            int32_t faceID = 0;
+            qcamera_sm_evt_reg_face_payload_t *reg_payload =
+                (qcamera_sm_evt_reg_face_payload_t *)payload;
+            rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+                                             reg_payload->config,
+                                             faceID);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+            result.handle = faceID;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_CANCEL_PICTURE:
+    case QCAMERA_SM_EVT_STOP_RECORDING:
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+    case QCAMERA_SM_EVT_RELEASE:
+        {
+            LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_INTERNAL:
+        {
+            qcamera_sm_internal_evt_payload_t *internal_evt =
+                (qcamera_sm_internal_evt_payload_t *)payload;
+            switch (internal_evt->evt_type) {
+            case QCAMERA_INTERNAL_EVT_FOCUS_UPDATE:
+                rc = m_parent->processAutoFocusEvent(internal_evt->focus_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE:
+                break;
+            case QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT:
+                rc = m_parent->processFaceDetectionResult(&internal_evt->faces_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS:
+                rc = m_parent->processHistogramStats(internal_evt->stats_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_CROP_INFO:
+                rc = m_parent->processZoomEvent(internal_evt->crop_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_ASD_UPDATE:
+                rc = m_parent->processASDUpdate(internal_evt->asd_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE:
+                rc = m_parent->mParameters.updateFlashMode(internal_evt->led_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_AWB_UPDATE:
+                rc = m_parent->transAwbMetaToParams(internal_evt->awb_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_AE_UPDATE:
+                rc = m_parent->processAEInfo(internal_evt->ae_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_FOCUS_POS_UPDATE:
+                rc = m_parent->processFocusPositionInfo(internal_evt->focus_pos);
+                break;
+            case QCAMERA_INTERNAL_EVT_HDR_UPDATE:
+                rc = m_parent->processHDRData(internal_evt->hdr_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_RETRO_AEC_UNLOCK:
+                rc = m_parent->processRetroAECUnlock();
+                break;
+            case QCAMERA_INTERNAL_EVT_ZSL_CAPTURE_DONE:
+                rc = m_parent->processZSLCaptureDone();
+                break;
+            default:
+                LOGE("Invalid internal event %d in state(%d)",
+                             internal_evt->evt_type, m_state);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_NOTIFY:
+        {
+            mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+            switch (cam_evt->server_event_type) {
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                {
+                    m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_SERVER_DIED,
+                                            0);
+                }
+                break;
+            default:
+                LOGW("no handling for server evt (%d) at this state",
+                       cam_evt->server_event_type);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+        {
+            rc = m_parent->updateThermalLevel(payload);
+        }
+        break;
+    case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+        {
+            // No ops, but need to notify
+            LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalEvtResult(&result);
+        }
+       break;
+    case QCAMERA_SM_EVT_RESTART_STOP_PREVIEW:
+        {
+            m_parent->stopPreview();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+       break;
+    case QCAMERA_SM_EVT_RESTART_START_PREVIEW:
+        {
+            rc = m_parent->preparePreview();
+            if (rc == NO_ERROR) {
+                rc = m_parent->startPreview();
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+       break;
+    case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+    default:
+        LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvtPrepareSnapshotState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ *              QCAMERA_SM_STATE_PREPARE_SNAPSHOT.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtPrepareSnapshotState(qcamera_sm_evt_enum_t evt,
+                                                    void *payload)
+{
+    int32_t rc = NO_ERROR;
+    qcamera_api_result_t result;
+    memset(&result, 0, sizeof(qcamera_api_result_t));
+
+    LOGL("event (%d)", evt);
+    switch (evt) {
+    case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+    case QCAMERA_SM_EVT_SET_CALLBACKS:
+    case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+    case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+    case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+    case QCAMERA_SM_EVT_SET_PARAMS:
+    case QCAMERA_SM_EVT_SET_PARAMS_STOP:
+    case QCAMERA_SM_EVT_SET_PARAMS_COMMIT:
+    case QCAMERA_SM_EVT_SET_PARAMS_RESTART:
+    case QCAMERA_SM_EVT_GET_PARAMS:
+    case QCAMERA_SM_EVT_PUT_PARAMS:
+    case QCAMERA_SM_EVT_PREPARE_PREVIEW:
+    case QCAMERA_SM_EVT_START_PREVIEW:
+    case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+    case QCAMERA_SM_EVT_STOP_PREVIEW:
+    case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+    case QCAMERA_SM_EVT_RECORDING_ENABLED:
+    case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+    case QCAMERA_SM_EVT_DUMP:
+    case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+    case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+    case QCAMERA_SM_EVT_PRE_START_RECORDING:
+    case QCAMERA_SM_EVT_RESTART_STOP_PREVIEW:
+    case QCAMERA_SM_EVT_RESTART_START_PREVIEW:
+    case QCAMERA_SM_EVT_START_RECORDING:
+    case QCAMERA_SM_EVT_PRE_TAKE_PICTURE:
+    case QCAMERA_SM_EVT_TAKE_PICTURE:
+    case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+    case QCAMERA_SM_EVT_SEND_COMMAND:
+    case QCAMERA_SM_EVT_CANCEL_PICTURE:
+    case QCAMERA_SM_EVT_STOP_RECORDING:
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+    case QCAMERA_SM_EVT_RELEASE:
+        {
+            LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_INTERNAL:
+        {
+            qcamera_sm_internal_evt_payload_t *internal_evt =
+                (qcamera_sm_internal_evt_payload_t *)payload;
+            switch (internal_evt->evt_type) {
+            case QCAMERA_INTERNAL_EVT_FOCUS_UPDATE:
+                rc = m_parent->processAutoFocusEvent(internal_evt->focus_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE:
+                m_parent->processPrepSnapshotDoneEvent(internal_evt->prep_snapshot_state);
+                m_state = QCAMERA_SM_STATE_PREVIEWING;
+
+                result.status = NO_ERROR;
+                result.request_api = QCAMERA_SM_EVT_PREPARE_SNAPSHOT;
+                result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+                m_parent->signalAPIResult(&result);
+                break;
+            case QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT:
+                rc = m_parent->processFaceDetectionResult(&internal_evt->faces_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS:
+                rc = m_parent->processHistogramStats(internal_evt->stats_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_CROP_INFO:
+                rc = m_parent->processZoomEvent(internal_evt->crop_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_ASD_UPDATE:
+                rc = m_parent->processASDUpdate(internal_evt->asd_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE:
+                LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+                break;
+            case QCAMERA_INTERNAL_EVT_AWB_UPDATE:
+                rc = m_parent->transAwbMetaToParams(internal_evt->awb_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_AE_UPDATE:
+                rc = m_parent->processAEInfo(internal_evt->ae_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_FOCUS_POS_UPDATE:
+                rc = m_parent->processFocusPositionInfo(internal_evt->focus_pos);
+                break;
+            case QCAMERA_INTERNAL_EVT_HDR_UPDATE:
+                rc = m_parent->processHDRData(internal_evt->hdr_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_RETRO_AEC_UNLOCK:
+                rc = m_parent->processRetroAECUnlock();
+                break;
+            case QCAMERA_INTERNAL_EVT_ZSL_CAPTURE_DONE:
+                rc = m_parent->processZSLCaptureDone();
+                break;
+            default:
+                LOGE("Invalid internal event %d in state(%d)",
+                             internal_evt->evt_type, m_state);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_NOTIFY:
+        {
+            mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+            switch (cam_evt->server_event_type) {
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                {
+                    // Send internal events to stop indefinite wait on prepare
+                    // snapshot done event.
+                    result.status = rc;
+                    result.request_api = QCAMERA_SM_EVT_PREPARE_SNAPSHOT;
+                    result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+                    m_parent->signalAPIResult(&result);
+
+                    result.status = rc;
+                    result.request_api = QCAMERA_SM_EVT_TAKE_PICTURE;
+                    result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+                    m_parent->signalAPIResult(&result);
+
+                    m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_SERVER_DIED,
+                                            0);
+                }
+                break;
+            default:
+                LOGE("Invalid internal event %d in state(%d)",
+                             cam_evt->server_event_type, m_state);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+        {
+            // No ops, but need to notify
+            LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalEvtResult(&result);
+        }
+       break;
+    case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+        {
+            rc = m_parent->updateThermalLevel(payload);
+        }
+        break;
+    case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+    default:
+        LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvtPicTakingState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ *              QCAMERA_SM_STATE_PIC_TAKING.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtPicTakingState(qcamera_sm_evt_enum_t evt,
+                                                   void *payload)
+{
+    int32_t rc = NO_ERROR;
+    qcamera_api_result_t result;
+    memset(&result, 0, sizeof(qcamera_api_result_t));
+
+    LOGL("event (%d)", evt);
+    switch (evt) {
+    case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+        {
+            // Error setting preview window during previewing
+            LOGE("Error!! cannot set preview window when preview is running");
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_CALLBACKS:
+        {
+            qcamera_sm_evt_setcb_payload_t *setcbs =
+                (qcamera_sm_evt_setcb_payload_t *)payload;
+            rc = m_parent->setCallBacks(setcbs->notify_cb,
+                                        setcbs->data_cb,
+                                        setcbs->data_cb_timestamp,
+                                        setcbs->get_memory,
+                                        setcbs->user);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+        {
+            rc = m_parent->enableMsgType(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+        {
+            rc = m_parent->disableMsgType(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+        {
+            int enabled = m_parent->msgTypeEnabled(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = enabled;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS:
+        {
+            bool needRestart = false;
+            rc = m_parent->updateParameters((char*)payload, needRestart);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS_STOP:
+        {
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS_COMMIT:
+        {
+            rc = m_parent->commitParameterChanges();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS_RESTART:
+        {
+            m_parent->setNeedRestart(false);
+            result.status           =  rc;
+            result.request_api      =  evt;
+            result.result_type      =  QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_GET_PARAMS:
+        {
+            result.params = m_parent->getParameters();
+            rc = result.params ? NO_ERROR : UNKNOWN_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PUT_PARAMS:
+        {
+            rc = m_parent->putParameters((char*)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_PREVIEW:
+        {
+            // cancel picture first
+            rc = m_parent->cancelPicture();
+            m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 0;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RECORDING_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 0;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+        {
+            rc = m_parent->storeMetaDataInBuffers(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DUMP:
+        {
+            rc = m_parent->dump(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+        {
+            rc = m_parent->autoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+        {
+            rc = m_parent->cancelAutoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SEND_COMMAND:
+        {
+            qcamera_sm_evt_command_payload_t *cmd_payload =
+                (qcamera_sm_evt_command_payload_t *)payload;
+            rc = m_parent->sendCommand(cmd_payload->cmd,
+                                       cmd_payload->arg1,
+                                       cmd_payload->arg2);
+#ifndef VANILLA_HAL
+            if ( CAMERA_CMD_LONGSHOT_OFF == cmd_payload->cmd ) {
+                // move state to previewing state
+                m_state = QCAMERA_SM_STATE_PREVIEWING;
+            }
+#endif
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_CANCEL_PICTURE:
+        {
+            rc = m_parent->cancelPicture();
+            m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+        {
+            int32_t faceID = 0;
+            qcamera_sm_evt_reg_face_payload_t *reg_payload =
+                (qcamera_sm_evt_reg_face_payload_t *)payload;
+            rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+                                             reg_payload->config,
+                                             faceID);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+            result.handle = faceID;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PRE_TAKE_PICTURE:
+        {
+           if ( m_parent->isLongshotEnabled() ) {
+               // no ops here, need to singal NO_ERROR
+               rc = NO_ERROR;
+            } else {
+                LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+                rc = INVALID_OPERATION;
+            }
+
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_TAKE_PICTURE:
+        {
+           if ( m_parent->isLongshotEnabled() ) {
+               rc = m_parent->longShot();
+            } else {
+                LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+                rc = INVALID_OPERATION;
+            }
+
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+    case QCAMERA_SM_EVT_PRE_START_RECORDING:
+    case QCAMERA_SM_EVT_RESTART_STOP_PREVIEW:
+    case QCAMERA_SM_EVT_RESTART_START_PREVIEW:
+    case QCAMERA_SM_EVT_START_RECORDING:
+    case QCAMERA_SM_EVT_STOP_RECORDING:
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+    case QCAMERA_SM_EVT_PREPARE_PREVIEW:
+    case QCAMERA_SM_EVT_START_PREVIEW:
+    case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+    case QCAMERA_SM_EVT_RELEASE:
+        {
+            LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_INTERNAL:
+        {
+            qcamera_sm_internal_evt_payload_t *internal_evt =
+                (qcamera_sm_internal_evt_payload_t *)payload;
+            switch (internal_evt->evt_type) {
+            case QCAMERA_INTERNAL_EVT_FOCUS_UPDATE:
+                rc = m_parent->processAutoFocusEvent(internal_evt->focus_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE:
+                break;
+            case QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT:
+                break;
+            case QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS:
+                break;
+            case QCAMERA_INTERNAL_EVT_CROP_INFO:
+                rc = m_parent->processZoomEvent(internal_evt->crop_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_ASD_UPDATE:
+                rc = m_parent->processASDUpdate(internal_evt->asd_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE:
+                LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+                break;
+            case QCAMERA_INTERNAL_EVT_AWB_UPDATE:
+                rc = m_parent->transAwbMetaToParams(internal_evt->awb_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_AE_UPDATE:
+                rc = m_parent->processAEInfo(internal_evt->ae_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_FOCUS_POS_UPDATE:
+                rc = m_parent->processFocusPositionInfo(internal_evt->focus_pos);
+                break;
+            case QCAMERA_INTERNAL_EVT_HDR_UPDATE:
+                rc = m_parent->processHDRData(internal_evt->hdr_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_RETRO_AEC_UNLOCK:
+                rc = m_parent->processRetroAECUnlock();
+                break;
+            case QCAMERA_INTERNAL_EVT_ZSL_CAPTURE_DONE:
+                rc = m_parent->processZSLCaptureDone();
+                break;
+            default:
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_NOTIFY:
+        {
+            mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+            switch (cam_evt->server_event_type) {
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                {
+                    // Send internal events to stop indefinite wait on prepare
+                    // snapshot done event.
+                    result.status = rc;
+                    result.request_api = QCAMERA_SM_EVT_PREPARE_SNAPSHOT;
+                    result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+                    m_parent->signalAPIResult(&result);
+
+                    result.status = rc;
+                    result.request_api = QCAMERA_SM_EVT_TAKE_PICTURE;
+                    result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+                    m_parent->signalAPIResult(&result);
+
+                    m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_SERVER_DIED,
+                                            0);
+                }
+                break;
+            case CAM_EVENT_TYPE_CAC_DONE:
+                if (m_parent->isCACEnabled() || m_parent->mParameters.isOEMFeatEnabled()) {
+                    LOGD("[LONG_SHOT_DBG] : Received CAC Done");
+                    if (m_parent->isLongshotEnabled()
+                            && !m_parent->isCaptureShutterEnabled()) {
+                        // play shutter sound for longshot
+                        // after CAC stage is done
+                        m_parent->playShutter();
+                    }
+                    m_parent->mCACDoneReceived = TRUE;
+                }
+                break;
+            default:
+                LOGH("no handling for server evt (%d) at this state",
+                       cam_evt->server_event_type);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+        {
+            qcamera_jpeg_evt_payload_t *jpeg_job =
+                (qcamera_jpeg_evt_payload_t *)payload;
+            rc = m_parent->processJpegNotify(jpeg_job);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_CAPTURE_CHANNEL:
+        {
+            bool restartPreview = m_parent->isPreviewRestartEnabled();
+            rc = m_parent->stopCaptureChannel(restartPreview);
+
+            if (restartPreview && (NO_ERROR == rc)) {
+                rc = m_parent->preparePreview();
+                if (NO_ERROR == rc) {
+                    m_parent->m_bPreviewStarted = true;
+                    applyDelayedMsgs();
+                    rc = m_parent->startPreview();
+                }
+            }
+
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+        {
+            rc = m_parent->cancelPicture();
+
+            bool restartPreview = m_parent->isPreviewRestartEnabled();
+            if (restartPreview) {
+                if (m_parent->mParameters.getManualCaptureMode()
+                        >= CAM_MANUAL_CAPTURE_TYPE_3) {
+                    m_parent->mParameters.updateZSLModeValue(m_RestoreZSL);
+                    m_RestoreZSL = FALSE;
+                    rc = m_parent->preparePreview();
+                    if (NO_ERROR == rc) {
+                        m_parent->m_bPreviewStarted = true;
+                        applyDelayedMsgs();
+                        rc = m_parent->startPreview();
+                    }
+                }
+                m_state = QCAMERA_SM_STATE_PREVIEWING;
+            } else {
+                m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+            }
+
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalEvtResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+        {
+            rc = m_parent->updateThermalLevel(payload);
+        }
+        break;
+    default:
+        LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvtRecordingState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ *              QCAMERA_SM_STATE_RECORDING.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtRecordingState(qcamera_sm_evt_enum_t evt,
+                                                   void *payload)
+{
+    int32_t rc = NO_ERROR;
+    qcamera_api_result_t result;
+    memset(&result, 0, sizeof(qcamera_api_result_t));
+
+    LOGL("event (%d)", evt);
+    switch (evt) {
+    case QCAMERA_SM_EVT_PREPARE_PREVIEW:
+    case QCAMERA_SM_EVT_START_PREVIEW:
+    case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+        {
+            // WA: CTS test VideoSnapshot will try to
+            //     start preview during video recording.
+            LOGH("CTS video restart op");
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_CALLBACKS:
+        {
+            qcamera_sm_evt_setcb_payload_t *setcbs =
+                (qcamera_sm_evt_setcb_payload_t *)payload;
+            rc = m_parent->setCallBacks(setcbs->notify_cb,
+                                        setcbs->data_cb,
+                                        setcbs->data_cb_timestamp,
+                                        setcbs->get_memory,
+                                        setcbs->user);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+        {
+            rc = m_parent->enableMsgType(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+        {
+            rc = m_parent->disableMsgType(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+        {
+            int enabled = m_parent->msgTypeEnabled(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = enabled;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS:
+        {
+            bool needRestart = false;
+            rc = m_parent->updateParameters((char*)payload, needRestart);
+            if (rc == NO_ERROR) {
+                if (needRestart) {
+                    // cannot set parameters that requires restart during recording
+                    LOGE("Error!! cannot set parameters that requires restart during recording");
+                    rc = BAD_VALUE;
+                }
+            }
+            if (rc != NO_ERROR) {
+                m_parent->setNeedRestart(false);
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS_COMMIT:
+        {
+            rc = m_parent->commitParameterChanges();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS_STOP:
+    case QCAMERA_SM_EVT_SET_PARAMS_RESTART:
+        {
+            ALOGE("%s: Error!! cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_GET_PARAMS:
+        {
+            result.params = m_parent->getParameters();
+            rc = result.params ? NO_ERROR : UNKNOWN_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PUT_PARAMS:
+        {
+            rc = m_parent->putParameters((char*)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 0;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RECORDING_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 1;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+        {
+            rc = m_parent->storeMetaDataInBuffers(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DUMP:
+        {
+            rc = m_parent->dump(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+        {
+            rc = m_parent->autoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+        {
+            rc = m_parent->cancelAutoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SEND_COMMAND:
+        {
+            qcamera_sm_evt_command_payload_t *cmd_payload =
+                (qcamera_sm_evt_command_payload_t *)payload;
+            rc = m_parent->sendCommand(cmd_payload->cmd,
+                                       cmd_payload->arg1,
+                                       cmd_payload->arg2);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PRE_TAKE_PICTURE:
+        {
+            // No ops here, send NO_ERROR.
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_TAKE_PICTURE:
+        {
+            m_state = QCAMERA_SM_STATE_VIDEO_PIC_TAKING;
+            rc = m_parent->takeLiveSnapshot();
+            if (rc != NO_ERROR) {
+                m_parent->unconfigureAdvancedCapture();
+                m_state = QCAMERA_SM_STATE_RECORDING;
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PRE_START_RECORDING:
+    case QCAMERA_SM_EVT_RESTART_STOP_PREVIEW:
+    case QCAMERA_SM_EVT_RESTART_START_PREVIEW:
+    case QCAMERA_SM_EVT_START_RECORDING:
+        {
+            // no ops here
+            LOGW("already in recording state, no ops for start_recording");
+            rc = 0;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_RECORDING:
+        {
+            rc = m_parent->stopRecording();
+            m_state = QCAMERA_SM_STATE_PREVIEWING;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_PREVIEW:
+        {
+            rc = m_parent->stopRecording();
+            m_state = QCAMERA_SM_STATE_PREVIEWING;
+
+            rc = m_parent->stopPreview();
+            m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+        {
+            rc = m_parent->releaseRecordingFrame((const void *)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+        {
+            int32_t faceID = 0;
+            qcamera_sm_evt_reg_face_payload_t *reg_payload =
+                (qcamera_sm_evt_reg_face_payload_t *)payload;
+            rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+                                             reg_payload->config,
+                                             faceID);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+            result.handle = faceID;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+        {
+            //In Video snapshot, prepare hardware is a no-op.
+            result.status = NO_ERROR;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_CANCEL_PICTURE:
+    case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+    case QCAMERA_SM_EVT_RELEASE:
+        {
+            LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_INTERNAL:
+        {
+            qcamera_sm_internal_evt_payload_t *internal_evt =
+                (qcamera_sm_internal_evt_payload_t *)payload;
+            switch (internal_evt->evt_type) {
+            case QCAMERA_INTERNAL_EVT_FOCUS_UPDATE:
+                rc = m_parent->processAutoFocusEvent(internal_evt->focus_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE:
+                break;
+            case QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT:
+                rc = m_parent->processFaceDetectionResult(&internal_evt->faces_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS:
+                rc = m_parent->processHistogramStats(internal_evt->stats_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_CROP_INFO:
+                rc = m_parent->processZoomEvent(internal_evt->crop_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_ASD_UPDATE:
+                rc = m_parent->processASDUpdate(internal_evt->asd_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE:
+                LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+                break;
+            case QCAMERA_INTERNAL_EVT_AWB_UPDATE:
+                rc = m_parent->transAwbMetaToParams(internal_evt->awb_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_AE_UPDATE:
+                rc = m_parent->processAEInfo(internal_evt->ae_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_FOCUS_POS_UPDATE:
+                rc = m_parent->processFocusPositionInfo(internal_evt->focus_pos);
+                break;
+            case QCAMERA_INTERNAL_EVT_HDR_UPDATE:
+                rc = m_parent->processHDRData(internal_evt->hdr_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_RETRO_AEC_UNLOCK:
+                rc = m_parent->processRetroAECUnlock();
+                break;
+            case QCAMERA_INTERNAL_EVT_ZSL_CAPTURE_DONE:
+                rc = m_parent->processZSLCaptureDone();
+                break;
+            default:
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_NOTIFY:
+        {
+            mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+            switch (cam_evt->server_event_type) {
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                {
+                    m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_SERVER_DIED,
+                                            0);
+                }
+                break;
+            default:
+                LOGE("Invalid internal event %d in state(%d)",
+                             cam_evt->server_event_type, m_state);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+        {
+            rc = m_parent->updateThermalLevel(payload);
+        }
+        break;
+    case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+        {
+            // No ops, but need to notify
+            LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalEvtResult(&result);
+        }
+       break;
+    case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+    default:
+        LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvtVideoPicTakingState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ *              QCAMERA_SM_STATE_VIDEO_PIC_TAKING.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtVideoPicTakingState(qcamera_sm_evt_enum_t evt,
+                                                        void *payload)
+{
+    int32_t rc = NO_ERROR;
+    qcamera_api_result_t result;
+    memset(&result, 0, sizeof(qcamera_api_result_t));
+
+    LOGL("event (%d)", evt);
+    switch (evt) {
+    case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+        {
+            // Error setting preview window during previewing
+            LOGE("Error!! cannot set preview window when preview is running");
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_CALLBACKS:
+        {
+            qcamera_sm_evt_setcb_payload_t *setcbs =
+                (qcamera_sm_evt_setcb_payload_t *)payload;
+            rc = m_parent->setCallBacks(setcbs->notify_cb,
+                                        setcbs->data_cb,
+                                        setcbs->data_cb_timestamp,
+                                        setcbs->get_memory,
+                                        setcbs->user);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+        {
+            rc = m_parent->enableMsgType(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+        {
+            rc = m_parent->disableMsgType(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+        {
+            int enabled = m_parent->msgTypeEnabled(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = enabled;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS:
+        {
+            bool needRestart = false;
+            rc = m_parent->updateParameters((char*)payload, needRestart);
+            if (rc == NO_ERROR) {
+                if (needRestart) {
+                    // cannot set parameters that requires restart during recording
+                    LOGE("Error!! cannot set parameters that requires restart during recording");
+                    rc = BAD_VALUE;
+                }
+            }
+            if (rc != NO_ERROR) {
+                m_parent->setNeedRestart(false);
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS_COMMIT:
+        {
+            rc = m_parent->commitParameterChanges();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS_STOP:
+    case QCAMERA_SM_EVT_SET_PARAMS_RESTART:
+        {
+            ALOGE("%s: Error!! cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_GET_PARAMS:
+        {
+            result.params = m_parent->getParameters();
+            rc = result.params ? NO_ERROR : UNKNOWN_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PUT_PARAMS:
+        {
+            rc = m_parent->putParameters((char*)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 1;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RECORDING_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 1;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+        {
+            rc = m_parent->storeMetaDataInBuffers(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DUMP:
+        {
+            rc = m_parent->dump(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+        {
+            rc = m_parent->autoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+        {
+            rc = m_parent->cancelAutoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SEND_COMMAND:
+        {
+            qcamera_sm_evt_command_payload_t *cmd_payload =
+                (qcamera_sm_evt_command_payload_t *)payload;
+            rc = m_parent->sendCommand(cmd_payload->cmd,
+                                       cmd_payload->arg1,
+                                       cmd_payload->arg2);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_RECORDING:
+        {
+            rc = m_parent->cancelLiveSnapshot();
+            m_state = QCAMERA_SM_STATE_RECORDING;
+
+            rc = m_parent->stopRecording();
+            m_state = QCAMERA_SM_STATE_PREVIEWING;
+
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+        {
+            rc = m_parent->releaseRecordingFrame((const void *)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_CANCEL_PICTURE:
+        {
+            rc = m_parent->cancelLiveSnapshot();
+            m_state = QCAMERA_SM_STATE_RECORDING;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+        {
+            int32_t faceID = 0;
+            qcamera_sm_evt_reg_face_payload_t *reg_payload =
+                (qcamera_sm_evt_reg_face_payload_t *)payload;
+            rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+                                             reg_payload->config,
+                                             faceID);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+            result.handle = faceID;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_PREVIEW:
+        {
+            rc = m_parent->cancelLiveSnapshot();
+            m_state = QCAMERA_SM_STATE_RECORDING;
+
+            rc = m_parent->stopRecording();
+            m_state = QCAMERA_SM_STATE_PREVIEWING;
+
+            rc = m_parent->stopPreview();
+            m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PRE_START_RECORDING:
+    case QCAMERA_SM_EVT_RESTART_STOP_PREVIEW:
+    case QCAMERA_SM_EVT_RESTART_START_PREVIEW:
+    case QCAMERA_SM_EVT_START_RECORDING:
+    case QCAMERA_SM_EVT_PREPARE_PREVIEW:
+    case QCAMERA_SM_EVT_START_PREVIEW:
+    case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+    case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+    case QCAMERA_SM_EVT_PRE_TAKE_PICTURE:
+    case QCAMERA_SM_EVT_TAKE_PICTURE:
+    case QCAMERA_SM_EVT_RELEASE:
+        {
+            LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_INTERNAL:
+        {
+            qcamera_sm_internal_evt_payload_t *internal_evt =
+                (qcamera_sm_internal_evt_payload_t *)payload;
+            switch (internal_evt->evt_type) {
+            case QCAMERA_INTERNAL_EVT_FOCUS_UPDATE:
+                rc = m_parent->processAutoFocusEvent(internal_evt->focus_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE:
+                break;
+            case QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT:
+                rc = m_parent->processFaceDetectionResult(&internal_evt->faces_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS:
+                rc = m_parent->processHistogramStats(internal_evt->stats_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_CROP_INFO:
+                rc = m_parent->processZoomEvent(internal_evt->crop_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_ASD_UPDATE:
+                rc = m_parent->processASDUpdate(internal_evt->asd_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE:
+                LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+                break;
+            case QCAMERA_INTERNAL_EVT_AWB_UPDATE:
+                rc = m_parent->transAwbMetaToParams(internal_evt->awb_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_AE_UPDATE:
+                rc = m_parent->processAEInfo(internal_evt->ae_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_FOCUS_POS_UPDATE:
+                rc = m_parent->processFocusPositionInfo(internal_evt->focus_pos);
+                break;
+            case QCAMERA_INTERNAL_EVT_HDR_UPDATE:
+                rc = m_parent->processHDRData(internal_evt->hdr_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_RETRO_AEC_UNLOCK:
+                rc = m_parent->processRetroAECUnlock();
+                break;
+            case QCAMERA_INTERNAL_EVT_ZSL_CAPTURE_DONE:
+                rc = m_parent->processZSLCaptureDone();
+                break;
+            default:
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_NOTIFY:
+        {
+            mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+            switch (cam_evt->server_event_type) {
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                {
+                    m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_SERVER_DIED,
+                                            0);
+                }
+                break;
+            default:
+                LOGE("Invalid internal event %d in state(%d)",
+                             cam_evt->server_event_type, m_state);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+        {
+            qcamera_jpeg_evt_payload_t *jpeg_job =
+                (qcamera_jpeg_evt_payload_t *)payload;
+            rc = m_parent->processJpegNotify(jpeg_job);
+        }
+        break;
+    case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+        {
+            rc = m_parent->cancelLiveSnapshot();
+            m_state = QCAMERA_SM_STATE_RECORDING;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalEvtResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+        {
+            rc = m_parent->updateThermalLevel(payload);
+        }
+        break;
+    default:
+        LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvtPreviewPicTakingState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ *              QCAMERA_SM_STATE_PREVIEW_PIC_TAKING.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtPreviewPicTakingState(qcamera_sm_evt_enum_t evt,
+                                                          void *payload)
+{
+    int32_t rc = NO_ERROR;
+    qcamera_api_result_t result;
+    memset(&result, 0, sizeof(qcamera_api_result_t));
+
+    LOGL("event (%d)", evt);
+    switch (evt) {
+    case QCAMERA_SM_EVT_SET_CALLBACKS:
+        {
+            qcamera_sm_evt_setcb_payload_t *setcbs =
+                (qcamera_sm_evt_setcb_payload_t *)payload;
+            rc = m_parent->setCallBacks(setcbs->notify_cb,
+                                        setcbs->data_cb,
+                                        setcbs->data_cb_timestamp,
+                                        setcbs->get_memory,
+                                        setcbs->user);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+        {
+            rc = m_parent->enableMsgType(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+        {
+            rc = m_parent->disableMsgType(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+        {
+            int enabled = m_parent->msgTypeEnabled(*((int32_t *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = enabled;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS:
+        {
+            bool needRestart = false;
+            rc = m_parent->updateParameters((char*)payload, needRestart);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS_STOP:
+        {
+            // need restart preview for parameters to take effect
+            LOGD("Stopping preview...");
+            // stop preview
+            rc = m_parent->stopPreview();
+            // Clear memory pools
+            m_parent->m_memoryPool.clear();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS_COMMIT:
+        {
+            // commit parameter changes to server
+            rc = m_parent->commitParameterChanges();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS_RESTART:
+        {
+            // start preview again
+            rc = m_parent->preparePreview();
+            if (rc == NO_ERROR) {
+                applyDelayedMsgs();
+                rc = m_parent->startPreview();
+                if (rc != NO_ERROR) {
+                    m_parent->unpreparePreview();
+                }
+            }
+            if (rc != NO_ERROR) {
+                m_state = QCAMERA_SM_STATE_PIC_TAKING;
+            }
+            m_parent->setNeedRestart(false);
+            result.status           =  rc;
+            result.request_api      =  evt;
+            result.result_type      =  QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_GET_PARAMS:
+        {
+            result.params = m_parent->getParameters();
+            rc = result.params ? NO_ERROR : UNKNOWN_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PUT_PARAMS:
+        {
+            rc = m_parent->putParameters((char*)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 1;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RECORDING_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 0;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+        {
+            rc = m_parent->storeMetaDataInBuffers(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DUMP:
+        {
+            rc = m_parent->dump(*((int *)payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+        {
+            rc = m_parent->autoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+        {
+            rc = m_parent->cancelAutoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SEND_COMMAND:
+        {
+            qcamera_sm_evt_command_payload_t *cmd_payload =
+                (qcamera_sm_evt_command_payload_t *)payload;
+            rc = m_parent->sendCommand(cmd_payload->cmd,
+                                       cmd_payload->arg1,
+                                       cmd_payload->arg2);
+#ifndef VANILLA_HAL
+            if ( CAMERA_CMD_LONGSHOT_OFF == cmd_payload->cmd ) {
+                // move state to previewing state
+                m_state = QCAMERA_SM_STATE_PREVIEWING;
+            }
+#endif
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+        {
+            rc = m_parent->releaseRecordingFrame((const void *)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_CANCEL_PICTURE:
+        {
+            if (m_parent->isZSLMode() || m_parent->isLongshotEnabled()) {
+                rc = m_parent->cancelPicture();
+            } else {
+                rc = m_parent->cancelLiveSnapshot();
+            }
+            m_state = QCAMERA_SM_STATE_PREVIEWING;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_PREVIEW:
+        {
+            if (m_parent->isZSLMode()) {
+                // cancel picture first
+                rc = m_parent->cancelPicture();
+                m_parent->stopChannel(QCAMERA_CH_TYPE_ZSL);
+            } else if (m_parent->isLongshotEnabled()) {
+                // just cancel picture
+                rc = m_parent->cancelPicture();
+            } else {
+                rc = m_parent->cancelLiveSnapshot();
+                m_parent->stopChannel(QCAMERA_CH_TYPE_PREVIEW);
+            }
+            // unprepare preview
+            m_parent->unpreparePreview();
+            m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PRE_START_RECORDING:
+        {
+            if (m_parent->isZSLMode()) {
+                LOGE("Error!! cannot handle evt(%d) in state(%d) in ZSL mode", evt, m_state);
+                rc = INVALID_OPERATION;
+            } else if (m_parent->isLongshotEnabled()) {
+                LOGE("Error!! cannot handle evt(%d) in state(%d) in Longshot mode", evt, m_state);
+                rc = INVALID_OPERATION;
+            } else {
+                rc = m_parent->preStartRecording();
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_RECORDING:
+        {
+            if (m_parent->isZSLMode()) {
+                LOGE("Error!! cannot handle evt(%d) in state(%d) in ZSL mode",
+                       evt, m_state);
+                rc = INVALID_OPERATION;
+            } else if (m_parent->isLongshotEnabled()) {
+                LOGE("Error!! cannot handle evt(%d) in state(%d) in Longshot mode",
+                       evt, m_state);
+                rc = INVALID_OPERATION;
+            } else {
+                rc = m_parent->startRecording();
+                if (rc == NO_ERROR) {
+                    m_state = QCAMERA_SM_STATE_VIDEO_PIC_TAKING;
+                }
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+        {
+            int32_t faceID = 0;
+            qcamera_sm_evt_reg_face_payload_t *reg_payload =
+                (qcamera_sm_evt_reg_face_payload_t *)payload;
+            rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+                                             reg_payload->config,
+                                             faceID);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+            result.handle = faceID;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PRE_TAKE_PICTURE:
+        {
+           if ( m_parent->isLongshotEnabled() ) {
+               // no ops here, need to singal NO_ERROR
+               rc = NO_ERROR;
+            } else {
+                LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+                rc = INVALID_OPERATION;
+            }
+
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_TAKE_PICTURE:
+        {
+            if ( m_parent->isLongshotEnabled() ) {
+               rc = m_parent->longShot();
+            } else {
+                LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+                rc = INVALID_OPERATION;
+            }
+
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+
+    case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+        {
+          LOGD("Prepare Snapshot");
+          if (m_parent->isRetroPicture()) {
+              LOGD("Prepare Snapshot in Retro Mode");
+              rc = m_parent->prepareHardwareForSnapshot(FALSE);
+              if (rc != NO_ERROR) {
+                  LOGE("prepareHardwareForSnapshot failed %d",
+                       rc);
+                  result.status = rc;
+                  result.request_api = evt;
+                  result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+                  m_parent->signalAPIResult(&result);
+              }
+          }
+          else {
+              LOGE("Error!! cannot handle evt(%d) in state(%d)",
+                 evt, m_state);
+              rc = INVALID_OPERATION;
+              result.status = rc;
+              result.request_api = evt;
+              result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+              m_parent->signalAPIResult(&result);
+          }
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_RECORDING:
+    case QCAMERA_SM_EVT_PREPARE_PREVIEW:
+    case QCAMERA_SM_EVT_START_PREVIEW:
+    case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+    case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+    case QCAMERA_SM_EVT_RELEASE:
+        {
+            LOGE("Error!! cannot handle evt(%d) in state(%d)", evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_INTERNAL:
+        {
+            qcamera_sm_internal_evt_payload_t *internal_evt =
+                (qcamera_sm_internal_evt_payload_t *)payload;
+            switch (internal_evt->evt_type) {
+            case QCAMERA_INTERNAL_EVT_FOCUS_UPDATE:
+                rc = m_parent->processAutoFocusEvent(internal_evt->focus_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE:
+                LOGD("Received QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE event");
+                if (m_parent->isRetroPicture()) {
+                    m_parent->processPrepSnapshotDoneEvent(internal_evt->prep_snapshot_state);
+                    LOGD("Retro picture");
+                    result.status = NO_ERROR;
+                    result.request_api = QCAMERA_SM_EVT_PREPARE_SNAPSHOT;
+                    result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+                    m_parent->signalAPIResult(&result);
+                }
+                else {
+                    LOGE("Invalid Case for  "
+                            "QCAMERA_INTERNAL_EVT_READY_FOR_SNAPSHOT event");
+                }
+                break;
+            case QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT:
+                rc = m_parent->processFaceDetectionResult(&internal_evt->faces_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_READY_FOR_SNAPSHOT:
+                // This is valid only in Retro picture Mode
+                if (m_parent->isRetroPicture()) {
+                    LOGD("Received QCAMERA_INTERNAL_EVT_READY_FOR_SNAPSHOT event");
+                    result.status = NO_ERROR;
+                    result.request_api = QCAMERA_SM_EVT_TAKE_PICTURE;
+                    result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+                    m_parent->signalAPIResult(&result);
+                }
+                else {
+                    LOGD("Wrong Case for QCAMERA_INTERNAL_EVT_READY_FOR_SNAPSHOT event");
+                }
+                break;
+            case QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS:
+                rc = m_parent->processHistogramStats(internal_evt->stats_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_CROP_INFO:
+                rc = m_parent->processZoomEvent(internal_evt->crop_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_ASD_UPDATE:
+                rc = m_parent->processASDUpdate(internal_evt->asd_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE:
+                LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+                break;
+            case QCAMERA_INTERNAL_EVT_AWB_UPDATE:
+                rc = m_parent->transAwbMetaToParams(internal_evt->awb_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_AE_UPDATE:
+                rc = m_parent->processAEInfo(internal_evt->ae_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_FOCUS_POS_UPDATE:
+                rc = m_parent->processFocusPositionInfo(internal_evt->focus_pos);
+                break;
+            case QCAMERA_INTERNAL_EVT_HDR_UPDATE:
+                rc = m_parent->processHDRData(internal_evt->hdr_data);
+                break;
+            case QCAMERA_INTERNAL_EVT_RETRO_AEC_UNLOCK:
+                rc = m_parent->processRetroAECUnlock();
+                break;
+            case QCAMERA_INTERNAL_EVT_ZSL_CAPTURE_DONE:
+                rc = m_parent->processZSLCaptureDone();
+                break;
+            default:
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_NOTIFY:
+        {
+            mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+            switch (cam_evt->server_event_type) {
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                {
+                    // Send internal events to stop indefinite wait on prepare
+                    // snapshot done event.
+                    result.status = rc;
+                    result.request_api = QCAMERA_SM_EVT_PREPARE_SNAPSHOT;
+                    result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+                    m_parent->signalAPIResult(&result);
+
+                    result.status = rc;
+                    result.request_api = QCAMERA_SM_EVT_TAKE_PICTURE;
+                    result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+                    m_parent->signalAPIResult(&result);
+
+                    m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_SERVER_DIED,
+                                            0);
+                }
+                break;
+            case CAM_EVENT_TYPE_CAC_DONE:
+                if (m_parent->isCACEnabled() || m_parent->mParameters.isOEMFeatEnabled()) {
+                    LOGD("[LONG_SHOT_DBG] : Received CAC Done");
+                    if ((m_parent->isLongshotEnabled())
+                            && (!m_parent->isCaptureShutterEnabled())) {
+                        // play shutter sound for longshot
+                        // after CAC stage is done
+                        m_parent->playShutter();
+                    }
+                    m_parent->mCACDoneReceived = TRUE;
+                }
+                break;
+            default:
+                LOGE("Invalid internal event %d in state(%d)",
+                             cam_evt->server_event_type, m_state);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+        {
+            LOGL("Calling Process Jpeg Notify");
+            qcamera_jpeg_evt_payload_t *jpeg_job =
+                (qcamera_jpeg_evt_payload_t *)payload;
+            rc = m_parent->processJpegNotify(jpeg_job);
+        }
+        break;
+    case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+        {
+            LOGL("Snapshot Done");
+            if (m_parent->isZSLMode() || m_parent->isLongshotEnabled()) {
+                rc = m_parent->cancelPicture();
+            } else {
+                rc = m_parent->cancelLiveSnapshot();
+            }
+            m_state = QCAMERA_SM_STATE_PREVIEWING;
+            if (m_parent->isRetroPicture()){
+                result.status = rc;
+                result.request_api = evt;
+                result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+                LOGL("\n Signalling for JPEG snapshot done!!");
+                m_parent->signalAPIResult(&result);
+
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalEvtResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+        {
+            rc = m_parent->updateThermalLevel(payload);
+        }
+        break;
+    case QCAMERA_SM_EVT_RESTART_STOP_PREVIEW:
+        {
+            m_parent->stopPreview();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+       break;
+    case QCAMERA_SM_EVT_RESTART_START_PREVIEW:
+        {
+            rc = m_parent->preparePreview();
+            if (rc == NO_ERROR) {
+                rc = m_parent->startPreview();
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+       break;
+    default:
+        LOGW("Cannot handle evt(%d) in state(%d)", evt, m_state);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : isRecording
+ *
+ * DESCRIPTION: check if recording is in process.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : true -- recording
+ *              false -- not in recording mode
+ *==========================================================================*/
+bool QCameraStateMachine::isRecording()
+{
+    switch (m_state) {
+    case QCAMERA_SM_STATE_RECORDING:
+    case QCAMERA_SM_STATE_VIDEO_PIC_TAKING:
+        return true;
+    default:
+        return false;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : isPreviewRunning
+ *
+ * DESCRIPTION: check if preview is in process.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : true -- preview running
+ *              false -- preview stopped
+ *==========================================================================*/
+bool QCameraStateMachine::isPreviewRunning()
+{
+    switch (m_state) {
+    case QCAMERA_SM_STATE_PREVIEWING:
+    case QCAMERA_SM_STATE_RECORDING:
+    case QCAMERA_SM_STATE_VIDEO_PIC_TAKING:
+    case QCAMERA_SM_STATE_PREVIEW_PIC_TAKING:
+    case QCAMERA_SM_STATE_PREPARE_SNAPSHOT:
+    case QCAMERA_SM_STATE_PREVIEW_READY:
+        return true;
+    default:
+        return false;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : isPreviewReady
+ *
+ * DESCRIPTION: check if preview is in ready state.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : true -- preview is in ready state
+ *              false -- preview is stopped
+ *==========================================================================*/
+bool QCameraStateMachine::isPreviewReady()
+{
+    switch (m_state) {
+    case QCAMERA_SM_STATE_PREVIEW_READY:
+        return true;
+    default:
+        return false;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : isCaptureRunning
+ *
+ * DESCRIPTION: check if image capture is in process.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : true -- capture running
+ *              false -- capture stopped
+ *==========================================================================*/
+bool QCameraStateMachine::isCaptureRunning()
+{
+    switch (m_state) {
+    case QCAMERA_SM_STATE_PIC_TAKING:
+    case QCAMERA_SM_STATE_VIDEO_PIC_TAKING:
+    case QCAMERA_SM_STATE_PREVIEW_PIC_TAKING:
+        return true;
+    default:
+        return false;
+    }
+}
+/*===========================================================================
+ * FUNCTION   : isNonZSLCaptureRunning
+ *
+ * DESCRIPTION: check if image capture is in process in non ZSL mode.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : true -- capture running in non ZSL mode
+ *              false -- Either in not capture mode or captur is not in non ZSL mode
+ *==========================================================================*/
+bool QCameraStateMachine::isNonZSLCaptureRunning()
+{
+    switch (m_state) {
+    case QCAMERA_SM_STATE_PIC_TAKING:
+        return true;
+    default:
+        return false;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : dump
+ *
+ * DESCRIPTION: Composes a string based on current configuration
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : Formatted string
+ *==========================================================================*/
+String8 QCameraStateMachine::dump()
+{
+    String8 str("\n");
+    char s[128];
+
+    snprintf(s, 128, "Is Preview Running: %d\n", isPreviewRunning());
+    str += s;
+
+    snprintf(s, 128, "Is Capture Running: %d\n", isCaptureRunning());
+    str += s;
+
+    snprintf(s, 128, "Is Non ZSL Capture Running: %d\n",
+        isNonZSLCaptureRunning());
+    str += s;
+
+    snprintf(s, 128, "Current State: %d \n", m_state);
+    str += s;
+
+    switch(m_state){
+        case QCAMERA_SM_STATE_PREVIEW_STOPPED:
+        snprintf(s, 128, " QCAMERA_SM_STATE_PREVIEW_STOPPED \n");
+        break;
+
+        case QCAMERA_SM_STATE_PREVIEW_READY:
+        snprintf(s, 128, " QCAMERA_SM_STATE_PREVIEW_READY \n");
+        break;
+
+        case QCAMERA_SM_STATE_PREVIEWING:
+        snprintf(s, 128, " QCAMERA_SM_STATE_PREVIEWING \n");
+        break;
+
+        case QCAMERA_SM_STATE_PREPARE_SNAPSHOT:
+        snprintf(s, 128, " QCAMERA_SM_STATE_PREPARE_SNAPSHOT \n");
+        break;
+
+        case QCAMERA_SM_STATE_PIC_TAKING:
+        snprintf(s, 128, " QCAMERA_SM_STATE_PIC_TAKING \n");
+        break;
+
+        case QCAMERA_SM_STATE_RECORDING:
+        snprintf(s, 128, " QCAMERA_SM_STATE_RECORDING \n");
+        break;
+
+        case QCAMERA_SM_STATE_VIDEO_PIC_TAKING:
+        snprintf(s, 128, " QCAMERA_SM_STATE_VIDEO_PIC_TAKING \n");
+        break;
+
+        case QCAMERA_SM_STATE_PREVIEW_PIC_TAKING:
+        snprintf(s, 128, " QCAMERA_SM_STATE_PREVIEW_PIC_TAKING \n");
+        break;
+    }
+    str += s;
+
+    return str;
+}
+
+}; // namespace qcamera
diff --git a/msmcobalt/QCamera2/HAL/QCameraStateMachine.h b/msmcobalt/QCamera2/HAL/QCameraStateMachine.h
new file mode 100644
index 0000000..b02ba06
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/QCameraStateMachine.h
@@ -0,0 +1,263 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_STATEMACHINE_H__
+#define __QCAMERA_STATEMACHINE_H__
+
+// System dependencies
+#include <pthread.h>
+
+// Camera dependencies
+#include "QCameraQueue.h"
+#include "QCameraChannel.h"
+#include "cam_semaphore.h"
+
+extern "C" {
+#include "mm_camera_interface.h"
+}
+
+namespace qcamera {
+
+class QCamera2HardwareInterface;
+
+typedef enum {
+    /*******BEGIN OF: API EVT*********/
+    QCAMERA_SM_EVT_SET_PREVIEW_WINDOW = 1,   // set preview window
+    QCAMERA_SM_EVT_SET_CALLBACKS,            // set callbacks
+    QCAMERA_SM_EVT_ENABLE_MSG_TYPE,          // enable msg type
+    QCAMERA_SM_EVT_DISABLE_MSG_TYPE,         // disable msg type
+    QCAMERA_SM_EVT_MSG_TYPE_ENABLED,         // query certain msg type is enabled
+
+    QCAMERA_SM_EVT_SET_PARAMS,               // set parameters
+    QCAMERA_SM_EVT_SET_PARAMS_STOP,          // stop camera after set params, if necessary
+    QCAMERA_SM_EVT_SET_PARAMS_COMMIT,        // commit set params
+    QCAMERA_SM_EVT_SET_PARAMS_RESTART,       // restart after set params, if necessary
+    QCAMERA_SM_EVT_GET_PARAMS,               // get parameters
+    QCAMERA_SM_EVT_PUT_PARAMS,               // put parameters, release param buf
+
+    QCAMERA_SM_EVT_PREPARE_PREVIEW,          // prepare preview (zsl, camera mode, camcorder mode)
+    QCAMERA_SM_EVT_START_PREVIEW,            // start preview (zsl, camera mode, camcorder mode)
+    QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW,  // start no display preview (zsl, camera mode, camcorder mode)
+    QCAMERA_SM_EVT_STOP_PREVIEW,             // stop preview (zsl, camera mode, camcorder mode)
+    QCAMERA_SM_EVT_PREVIEW_ENABLED,          // query if preview is running
+
+    QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS,   // request to store meta data in video buffers
+    QCAMERA_SM_EVT_PRE_START_RECORDING,      // pre start recording, to prepare for recording
+    QCAMERA_SM_EVT_START_RECORDING,          // start recording
+    QCAMERA_SM_EVT_STOP_RECORDING,           // stop recording
+    QCAMERA_SM_EVT_RECORDING_ENABLED,        // query if recording is running
+    QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME,  // release recording frame
+
+    QCAMERA_SM_EVT_PREPARE_SNAPSHOT,         // prepare snapshot in case LED needs to be flashed
+    QCAMERA_SM_EVT_PRE_TAKE_PICTURE,         // pre take picutre (to restart preview if necessary)
+    QCAMERA_SM_EVT_TAKE_PICTURE,             // take picutre (zsl, regualr capture, live snapshot
+    QCAMERA_SM_EVT_CANCEL_PICTURE,           // cancel picture
+
+    QCAMERA_SM_EVT_START_AUTO_FOCUS,         // start auto focus
+    QCAMERA_SM_EVT_STOP_AUTO_FOCUS,          // stop auto focus
+    QCAMERA_SM_EVT_SEND_COMMAND,             // send command
+
+    QCAMERA_SM_EVT_RELEASE,                  // release camera resource
+    QCAMERA_SM_EVT_DUMP,                     // dump
+    QCAMERA_SM_EVT_REG_FACE_IMAGE,           // register a face image in imaging lib
+    /*******END OF: API EVT*********/
+
+    QCAMERA_SM_EVT_EVT_INTERNAL,             // internal evt notify
+    QCAMERA_SM_EVT_EVT_NOTIFY,               // evt notify from server
+    QCAMERA_SM_EVT_JPEG_EVT_NOTIFY,          // evt notify from jpeg
+    QCAMERA_SM_EVT_SNAPSHOT_DONE,            // internal evt that snapshot is done
+    QCAMERA_SM_EVT_THERMAL_NOTIFY,           // evt notify from thermal daemon
+    QCAMERA_SM_EVT_STOP_CAPTURE_CHANNEL,     // stop capture channel
+    QCAMERA_SM_EVT_RESTART_PERVIEW,          // internal preview restart
+    QCAMERA_SM_EVT_DELAYED_RESTART,          // preview restart needs delay (dual camera mode)
+    QCAMERA_SM_EVT_SEND_COMMAND_RESTART,     // restart after send command (if necessary)
+    QCAMERA_SM_EVT_RESTART_START_PREVIEW,    // preview start as part of restart (dual camera mode)
+    QCAMERA_SM_EVT_RESTART_STOP_PREVIEW,     // preview stop as part of restart (dual camera mode)
+    QCAMERA_SM_EVT_MAX
+} qcamera_sm_evt_enum_t;
+
+typedef enum {
+    QCAMERA_API_RESULT_TYPE_DEF,             // default type, no additional info
+    QCAMERA_API_RESULT_TYPE_ENABLE_FLAG,     // msg_enabled, preview_enabled, recording_enabled
+    QCAMERA_API_RESULT_TYPE_PARAMS,          // returned parameters in string
+    QCAMERA_API_RESULT_TYPE_HANDLE,          // returned handle in int
+    QCAMERA_API_RESULT_TYPE_MAX
+} qcamera_api_result_type_t;
+
+typedef struct {
+    int32_t status;                          // api call status
+    qcamera_sm_evt_enum_t request_api;       // api evt requested
+    qcamera_api_result_type_t result_type;   // result type
+    union {
+        int enabled;                          // result_type == QCAMERA_API_RESULT_TYPE_ENABLE_FLAG
+        char *params;                         // result_type == QCAMERA_API_RESULT_TYPE_PARAMS
+        int handle;                           // result_type ==QCAMERA_API_RESULT_TYPE_HANDLE
+    };
+} qcamera_api_result_t;
+
+typedef struct api_result_list {
+   qcamera_api_result_t result;
+   struct api_result_list *next;
+}api_result_list;
+
+// definition for payload type of setting callback
+typedef struct {
+    camera_notify_callback notify_cb;
+    camera_data_callback data_cb;
+    camera_data_timestamp_callback data_cb_timestamp;
+    camera_request_memory get_memory;
+    void *user;
+} qcamera_sm_evt_setcb_payload_t;
+
+// definition for payload type of sending command
+typedef struct {
+    int32_t cmd;
+    int32_t arg1;
+    int32_t arg2;
+} qcamera_sm_evt_command_payload_t;
+
+// definition for payload type of sending command
+typedef struct {
+    void *img_ptr;
+    cam_pp_offline_src_config_t *config;
+} qcamera_sm_evt_reg_face_payload_t;
+
+typedef enum {
+    QCAMERA_INTERNAL_EVT_FOCUS_UPDATE,       // focus updating result
+    QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE, // prepare snapshot done
+    QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT, // face detection result
+    QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS,    // histogram
+    QCAMERA_INTERNAL_EVT_CROP_INFO,          // crop info
+    QCAMERA_INTERNAL_EVT_ASD_UPDATE,         // asd update result
+    QCAMERA_INTERNAL_EVT_READY_FOR_SNAPSHOT, // Ready for Prepare Snapshot
+    QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE, // Led mode override
+    QCAMERA_INTERNAL_EVT_AWB_UPDATE,         // awb update result
+    QCAMERA_INTERNAL_EVT_AE_UPDATE,          // ae update result
+    QCAMERA_INTERNAL_EVT_FOCUS_POS_UPDATE,   // focus position update result
+    QCAMERA_INTERNAL_EVT_HDR_UPDATE,         // HDR scene update
+    QCAMERA_INTERNAL_EVT_RETRO_AEC_UNLOCK,   // retro burst AEC unlock event
+    QCAMERA_INTERNAL_EVT_ZSL_CAPTURE_DONE,   // ZSL capture done event
+    QCAMERA_INTERNAL_EVT_MAX
+} qcamera_internal_evt_type_t;
+
+typedef struct {
+    qcamera_internal_evt_type_t evt_type;
+    union {
+        cam_auto_focus_data_t focus_data;
+        cam_prep_snapshot_state_t prep_snapshot_state;
+        cam_faces_data_t faces_data;
+        cam_hist_stats_t stats_data;
+        cam_crop_data_t crop_data;
+        cam_asd_decision_t asd_data;
+        cam_flash_mode_t led_data;
+        cam_awb_params_t awb_data;
+        cam_3a_params_t ae_data;
+        cam_focus_pos_info_t focus_pos;
+        cam_asd_hdr_scene_data_t hdr_data;
+    };
+} qcamera_sm_internal_evt_payload_t;
+
+class QCameraStateMachine
+{
+public:
+    QCameraStateMachine(QCamera2HardwareInterface *ctrl);
+    virtual ~QCameraStateMachine();
+    int32_t procAPI(qcamera_sm_evt_enum_t evt, void *api_payload);
+    int32_t procEvt(qcamera_sm_evt_enum_t evt, void *evt_payload);
+
+    bool isPreviewRunning(); // check if preview is running
+    bool isPreviewReady(); // check if preview is ready
+    bool isCaptureRunning(); // check if image capture is running
+    bool isNonZSLCaptureRunning(); // check if image capture is running in non ZSL mode
+    String8 dump(); //returns the state information in a string
+    bool isPrepSnapStateRunning();
+    bool isRecording();
+    void releaseThread();
+
+    bool isPreviewCallbackNeeded() { return m_bPreviewCallbackNeeded; };
+    int32_t setPreviewCallbackNeeded(bool enabled) {m_bPreviewCallbackNeeded=enabled; return 0;};
+private:
+    typedef enum {
+        QCAMERA_SM_STATE_PREVIEW_STOPPED,          // preview is stopped
+        QCAMERA_SM_STATE_PREVIEW_READY,            // preview started but preview window is not set yet
+        QCAMERA_SM_STATE_PREVIEWING,               // previewing
+        QCAMERA_SM_STATE_PREPARE_SNAPSHOT,         // prepare snapshot in case aec estimation is
+                                                   // needed for LED flash
+        QCAMERA_SM_STATE_PIC_TAKING,               // taking picture (preview stopped)
+        QCAMERA_SM_STATE_RECORDING,                // recording (preview running)
+        QCAMERA_SM_STATE_VIDEO_PIC_TAKING,         // taking live snapshot during recording (preview running)
+        QCAMERA_SM_STATE_PREVIEW_PIC_TAKING        // taking ZSL/live snapshot (recording stopped but preview running)
+    } qcamera_state_enum_t;
+
+    typedef enum
+    {
+        QCAMERA_SM_CMD_TYPE_API,                   // cmd from API
+        QCAMERA_SM_CMD_TYPE_EVT,                   // cmd from mm-camera-interface/mm-jpeg-interface event
+        QCAMERA_SM_CMD_TYPE_EXIT,                  // cmd for exiting statemachine cmdThread
+        QCAMERA_SM_CMD_TYPE_MAX
+    } qcamera_sm_cmd_type_t;
+
+    typedef struct {
+        qcamera_sm_cmd_type_t cmd;                  // cmd type (where it comes from)
+        qcamera_sm_evt_enum_t evt;                  // event type
+        void *evt_payload;                          // ptr to payload
+    } qcamera_sm_cmd_t;
+
+    int32_t stateMachine(qcamera_sm_evt_enum_t evt, void *payload);
+    int32_t procEvtPreviewStoppedState(qcamera_sm_evt_enum_t evt, void *payload);
+    int32_t procEvtPreviewReadyState(qcamera_sm_evt_enum_t evt, void *payload);
+    int32_t procEvtPreviewingState(qcamera_sm_evt_enum_t evt, void *payload);
+    int32_t procEvtPrepareSnapshotState(qcamera_sm_evt_enum_t evt, void *payload);
+    int32_t procEvtPicTakingState(qcamera_sm_evt_enum_t evt, void *payload);
+    int32_t procEvtRecordingState(qcamera_sm_evt_enum_t evt, void *payload);
+    int32_t procEvtVideoPicTakingState(qcamera_sm_evt_enum_t evt, void *payload);
+    int32_t procEvtPreviewPicTakingState(qcamera_sm_evt_enum_t evt, void *payload);
+
+    // main statemachine process routine
+    static void *smEvtProcRoutine(void *data);
+
+    int32_t applyDelayedMsgs();
+
+    QCamera2HardwareInterface *m_parent;  // ptr to HWI
+    qcamera_state_enum_t m_state;         // statemachine state
+    QCameraQueue api_queue;               // cmd queue for APIs
+    QCameraQueue evt_queue;               // cmd queue for evt from mm-camera-intf/mm-jpeg-intf
+    pthread_t cmd_pid;                    // cmd thread ID
+    cam_semaphore_t cmd_sem;              // semaphore for cmd thread
+    bool m_bDelayPreviewMsgs;             // Delay preview callback enable during ZSL snapshot
+    bool m_bPreviewNeedsRestart;          // Preview needs restart
+    bool m_bPreviewDelayedRestart;        // Preview delayed restart
+    int32_t m_DelayedMsgs;
+    bool m_RestoreZSL;
+    bool m_bPreviewCallbackNeeded;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_STATEMACHINE_H__ */
diff --git a/msmcobalt/QCamera2/HAL/QCameraStream.cpp b/msmcobalt/QCamera2/HAL/QCameraStream.cpp
new file mode 100644
index 0000000..9bb02b8
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/QCameraStream.cpp
@@ -0,0 +1,2662 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCameraStream"
+
+// System dependencies
+#include <utils/Errors.h>
+
+// Camera dependencies
+#include "QCameraBufferMaps.h"
+#include "QCamera2HWI.h"
+#include "QCameraStream.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+#define CAMERA_MIN_ALLOCATED_BUFFERS     3
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : get_bufs
+ *
+ * DESCRIPTION: static function entry to allocate stream buffers
+ *
+ * PARAMETERS :
+ *   @offset     : offset info of stream buffers
+ *   @num_bufs   : number of buffers allocated
+ *   @initial_reg_flag: flag to indicate if buffer needs to be registered
+ *                      at kernel initially
+ *   @bufs       : output of allocated buffers
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::get_bufs(
+                     cam_frame_len_offset_t *offset,
+                     uint8_t *num_bufs,
+                     uint8_t **initial_reg_flag,
+                     mm_camera_buf_def_t **bufs,
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                     void *user_data)
+{
+    QCameraStream *stream = reinterpret_cast<QCameraStream *>(user_data);
+    if (!stream) {
+        LOGE("getBufs invalid stream pointer");
+        return NO_MEMORY;
+    }
+
+    if (stream->mStreamInfo != NULL
+            && stream->mStreamInfo->streaming_mode == CAM_STREAMING_MODE_BATCH) {
+        //Batch Mode. Allocate Butch buffers
+        return stream->allocateBatchBufs(offset, num_bufs,
+                initial_reg_flag, bufs, ops_tbl);
+    } else {
+        // Plane Buffer. Allocate plane buffer
+        return stream->getBufs(offset, num_bufs,
+                initial_reg_flag, bufs, ops_tbl);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : get_bufs_deffered
+ *
+ * DESCRIPTION: static function entry to allocate deffered stream buffers
+ *
+ * PARAMETERS :
+ *   @offset     : offset info of stream buffers
+ *   @num_bufs   : number of buffers allocated
+ *   @initial_reg_flag: flag to indicate if buffer needs to be registered
+ *                      at kernel initially
+ *   @bufs       : output of allocated buffers
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::get_bufs_deffered(
+        cam_frame_len_offset_t * /* offset */,
+        uint8_t *num_bufs,
+        uint8_t **initial_reg_flag,
+        mm_camera_buf_def_t **bufs,
+        mm_camera_map_unmap_ops_tbl_t * ops_tbl,
+        void *user_data)
+{
+    QCameraStream *stream = reinterpret_cast<QCameraStream *>(user_data);
+
+    if (!stream) {
+        LOGE("getBufs invalid stream pointer");
+        return NO_MEMORY;
+    }
+
+    return stream->getBufsDeferred(NULL /*offset*/, num_bufs, initial_reg_flag, bufs,
+            ops_tbl);
+}
+
+/*===========================================================================
+ * FUNCTION   : put_bufs
+ *
+ * DESCRIPTION: static function entry to deallocate stream buffers
+ *
+ * PARAMETERS :
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::put_bufs(
+        mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+        void *user_data)
+{
+    QCameraStream *stream = reinterpret_cast<QCameraStream *>(user_data);
+    if (!stream) {
+        LOGE("putBufs invalid stream pointer");
+        return NO_MEMORY;
+    }
+
+    if (stream->mStreamInfo != NULL
+            && stream->mStreamInfo->streaming_mode == CAM_STREAMING_MODE_BATCH) {
+        //Batch Mode. release  Butch buffers
+        return stream->releaseBatchBufs(ops_tbl);
+    } else {
+        // Plane Buffer. release  plane buffer
+        return stream->putBufs(ops_tbl);
+    }
+
+}
+
+/*===========================================================================
+ * FUNCTION   : put_bufs_deffered
+ *
+ * DESCRIPTION: static function entry to deallocate deffered stream buffers
+ *
+ * PARAMETERS :
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::put_bufs_deffered(
+        mm_camera_map_unmap_ops_tbl_t * /*ops_tbl */,
+        void * user_data )
+{
+    QCameraStream *stream = reinterpret_cast<QCameraStream *>(user_data);
+
+    if (!stream) {
+        LOGE("put_bufs_deffered invalid stream pointer");
+        return NO_MEMORY;
+    }
+
+    return stream->putBufsDeffered();
+}
+
+/*===========================================================================
+ * FUNCTION   : invalidate_buf
+ *
+ * DESCRIPTION: static function entry to invalidate a specific stream buffer
+ *
+ * PARAMETERS :
+ *   @index      : index of the stream buffer to invalidate
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::invalidate_buf(uint32_t index, void *user_data)
+{
+    QCameraStream *stream = reinterpret_cast<QCameraStream *>(user_data);
+    if (!stream) {
+        LOGE("invalid stream pointer");
+        return NO_MEMORY;
+    }
+
+    if (stream->mStreamInfo->is_secure == SECURE){
+        return 0;
+    }
+
+    if (stream->mStreamInfo->streaming_mode == CAM_STREAMING_MODE_BATCH) {
+        for (int i = 0; i < stream->mBufDefs[index].user_buf.bufs_used; i++) {
+            uint32_t buf_idx = stream->mBufDefs[index].user_buf.buf_idx[i];
+            stream->invalidateBuf(buf_idx);
+        }
+    } else {
+        return stream->invalidateBuf(index);
+    }
+
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : clean_invalidate_buf
+ *
+ * DESCRIPTION: static function entry to clean invalidate a specific stream buffer
+ *
+ * PARAMETERS :
+ *   @index      : index of the stream buffer to clean invalidate
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::clean_invalidate_buf(uint32_t index, void *user_data)
+{
+    QCameraStream *stream = reinterpret_cast<QCameraStream *>(user_data);
+    if (!stream) {
+        LOGE("invalid stream pointer");
+        return NO_MEMORY;
+    }
+
+    if (stream->mStreamInfo->is_secure == SECURE){
+        return 0;
+    }
+
+    if (stream->mStreamInfo->streaming_mode == CAM_STREAMING_MODE_BATCH) {
+        for (int i = 0; i < stream->mBufDefs[index].user_buf.bufs_used; i++) {
+            uint32_t buf_idx = stream->mBufDefs[index].user_buf.buf_idx[i];
+            stream->cleanInvalidateBuf(buf_idx);
+        }
+    } else {
+        return stream->cleanInvalidateBuf(index);
+    }
+
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : set_config_ops
+ *
+ * DESCRIPTION: static function update mm-interface ops functions
+ *
+ * PARAMETERS :
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::set_config_ops(mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+        void *user_data)
+{
+    QCameraStream *stream = reinterpret_cast<QCameraStream *>(user_data);
+    if (!stream) {
+        LOGE("Stream invalid");
+        return NO_MEMORY;
+    }
+
+    stream->m_MemOpsTbl = *ops_tbl;
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraStream
+ *
+ * DESCRIPTION: constructor of QCameraStream
+ *
+ * PARAMETERS :
+ *   @allocator  : memory allocator obj
+ *   @camHandle  : camera handle
+ *   @chId       : channel handle
+ *   @camOps     : ptr to camera ops table
+ *   @paddingInfo: ptr to padding info
+ *   @deffered   : deferred stream
+ *   @online_rotation: rotation applied online
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraStream::QCameraStream(QCameraAllocator &allocator,
+        uint32_t camHandle, uint32_t chId,
+        mm_camera_ops_t *camOps, cam_padding_info_t *paddingInfo,
+        bool deffered, cam_rotation_t online_rotation):
+        mDumpFrame(0),
+        mDumpMetaFrame(0),
+        mDumpSkipCnt(0),
+        mStreamTimestamp(0),
+        mCamHandle(camHandle),
+        mChannelHandle(chId),
+        mHandle(0),
+        mCamOps(camOps),
+        mStreamInfo(NULL),
+        mNumBufs(0),
+        mNumPlaneBufs(0),
+        mNumBufsNeedAlloc(0),
+        mRegFlags(NULL),
+        mDataCB(NULL),
+        mSYNCDataCB(NULL),
+        mUserData(NULL),
+        mDataQ(releaseFrameData, this),
+        mStreamInfoBuf(NULL),
+        mMiscBuf(NULL),
+        mStreamBufs(NULL),
+        mStreamBatchBufs(NULL),
+        mAllocator(allocator),
+        mBufDefs(NULL),
+        mPlaneBufDefs(NULL),
+        mOnlineRotation(online_rotation),
+        mStreamBufsAcquired(false),
+        m_bActive(false),
+        mDynBufAlloc(false),
+        mBufAllocPid(0),
+        mDefferedAllocation(deffered),
+        wait_for_cond(false),
+        mAllocTaskId(0),
+        mMapTaskId(0),
+        mSyncCBEnabled(false)
+{
+    mMemVtbl.user_data = this;
+    if ( !deffered ) {
+        mMemVtbl.get_bufs = get_bufs;
+        mMemVtbl.put_bufs = put_bufs;
+    } else {
+        mMemVtbl.get_bufs = get_bufs_deffered;
+        mMemVtbl.put_bufs = put_bufs_deffered;
+    }
+    mMemVtbl.invalidate_buf = invalidate_buf;
+    mMemVtbl.clean_invalidate_buf = clean_invalidate_buf;
+    mMemVtbl.set_config_ops = set_config_ops;
+    memset(&mFrameLenOffset, 0, sizeof(mFrameLenOffset));
+    memcpy(&mPaddingInfo, paddingInfo, sizeof(cam_padding_info_t));
+    memset(&mCropInfo, 0, sizeof(cam_rect_t));
+    memset(&m_MemOpsTbl, 0, sizeof(mm_camera_map_unmap_ops_tbl_t));
+    memset(&m_OutputCrop, 0, sizeof(cam_stream_parm_buffer_t));
+    memset(&m_ImgProp, 0, sizeof(cam_stream_parm_buffer_t));
+    memset(&mAllocTask, 0, sizeof(mAllocTask));
+    memset(&mMapTask, 0, sizeof(mMapTask));
+    pthread_mutex_init(&mCropLock, NULL);
+    pthread_mutex_init(&mParameterLock, NULL);
+    mCurMetaMemory = NULL;
+    mCurBufIndex = -1;
+    mCurMetaIndex = -1;
+    mFirstTimeStamp = 0;
+    memset (&mStreamMetaMemory, 0,
+            (sizeof(MetaMemory) * CAMERA_MIN_VIDEO_BATCH_BUFFERS));
+    pthread_mutex_init(&m_lock, NULL);
+    pthread_cond_init(&m_cond, NULL);
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraStream
+ *
+ * DESCRIPTION: deconstructor of QCameraStream
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraStream::~QCameraStream()
+{
+    pthread_mutex_destroy(&mCropLock);
+    pthread_mutex_destroy(&mParameterLock);
+
+    mAllocator.waitForBackgroundTask(mAllocTaskId);
+    mAllocator.waitForBackgroundTask(mMapTaskId);
+    if (mBufAllocPid != 0) {
+        cond_signal(true);
+        LOGL("Wait for buf allocation thread dead");
+        // Wait for the allocation of additional stream buffers
+        pthread_join(mBufAllocPid, NULL);
+        mBufAllocPid = 0;
+    }
+
+    if (mDefferedAllocation) {
+        mStreamBufsAcquired = false;
+        releaseBuffs();
+    }
+
+    unmapStreamInfoBuf();
+    releaseStreamInfoBuf();
+
+    if (mMiscBuf) {
+        unMapBuf(mMiscBuf, CAM_MAPPING_BUF_TYPE_MISC_BUF, NULL);
+        releaseMiscBuf();
+    }
+
+    // delete stream
+    if (mHandle > 0) {
+        mCamOps->delete_stream(mCamHandle, mChannelHandle, mHandle);
+        mHandle = 0;
+    }
+    pthread_mutex_destroy(&m_lock);
+    pthread_cond_destroy(&m_cond);
+}
+
+/*===========================================================================
+ * FUNCTION   : unmapStreamInfoBuf
+ *
+ * DESCRIPTION: Unmap stream info buffer
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::unmapStreamInfoBuf()
+{
+    int rc = NO_ERROR;
+
+    if (mStreamInfoBuf != NULL) {
+        rc = mCamOps->unmap_stream_buf(mCamHandle,
+            mChannelHandle,
+            mHandle,
+            CAM_MAPPING_BUF_TYPE_STREAM_INFO,
+            0,
+            -1);
+
+        if (rc < 0) {
+            LOGE("Failed to unmap stream info buffer");
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseMiscBuf
+ *
+ * DESCRIPTION: Release misc buffers
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::releaseMiscBuf()
+{
+    int rc = NO_ERROR;
+
+    if (mMiscBuf != NULL) {
+        mMiscBuf->deallocate();
+        delete mMiscBuf;
+        mMiscBuf = NULL;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseStreamInfoBuf
+ *
+ * DESCRIPTION: Release stream info buffer
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::releaseStreamInfoBuf()
+{
+    int rc = NO_ERROR;
+
+    if (mStreamInfoBuf != NULL) {
+        mStreamInfoBuf->deallocate();
+        delete mStreamInfoBuf;
+        mStreamInfoBuf = NULL;
+        mStreamInfo = NULL;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : deleteStream
+ *
+ * DESCRIPTION: Deletes a camera stream
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraStream::deleteStream()
+{
+    if (mHandle > 0) {
+        acquireStreamBufs();
+        releaseBuffs();
+        unmapStreamInfoBuf();
+        mCamOps->delete_stream(mCamHandle, mChannelHandle, mHandle);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : unMapBuf
+ *
+ * DESCRIPTION: unmaps buffers
+ *
+ * PARAMETERS :
+ *   @heapBuf      : heap buffer handler
+ *   @bufType      : buffer type
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::unMapBuf(QCameraMemory *Buf,
+        cam_mapping_buf_type bufType, __unused mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+    int32_t rc = NO_ERROR;
+    uint8_t cnt;
+    ssize_t bufSize = BAD_INDEX;
+    uint32_t i;
+
+    cnt = Buf->getCnt();
+    for (i = 0; i < cnt; i++) {
+        bufSize = Buf->getSize(i);
+        if (BAD_INDEX != bufSize) {
+            if (m_MemOpsTbl.unmap_ops == NULL ) {
+                rc = mCamOps->unmap_stream_buf(mCamHandle, mChannelHandle, mHandle,
+                        bufType, i, -1);
+            } else {
+                rc = m_MemOpsTbl.unmap_ops(i, -1, bufType, m_MemOpsTbl.userdata);
+            }
+            if (rc < 0) {
+                LOGE("Failed to unmap buffer");
+                break;
+            }
+        } else {
+            LOGE("Failed to retrieve buffer size (bad index)");
+            rc = BAD_INDEX;
+            break;
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mapBufs
+ *
+ * DESCRIPTION: maps buffers
+ *
+ * PARAMETERS :
+ *   @heapBuf      : heap buffer handler
+ *   @bufType      : buffer type
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::mapBufs(QCameraMemory *Buf,
+        cam_mapping_buf_type bufType, __unused mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+    int32_t rc = NO_ERROR;
+    uint32_t i = 0;
+
+    QCameraBufferMaps bufferMaps;
+    for (i = 0; i < Buf->getCnt(); i++) {
+        ssize_t bufSize = Buf->getSize(i);
+        if (BAD_INDEX == bufSize) {
+            LOGE("Failed to retrieve buffer size (bad index)");
+            return BAD_INDEX;
+        }
+
+        rc = bufferMaps.enqueue(bufType, mHandle, i /*buf index*/, -1 /*plane index*/,
+                0 /*cookie*/, Buf->getFd(i), bufSize, Buf->getPtr(i));
+
+        if (rc < 0) {
+            LOGE("Failed to map buffers");
+            return BAD_INDEX;
+        }
+    }
+
+    cam_buf_map_type_list bufMapList;
+    rc = bufferMaps.getCamBufMapList(bufMapList);
+    if (rc < 0) {
+        LOGE("Failed to map buffers");
+        return BAD_INDEX;
+    }
+
+    if (m_MemOpsTbl.bundled_map_ops == NULL) {
+        rc = mCamOps->map_stream_bufs(mCamHandle, mChannelHandle, &bufMapList);
+    } else {
+        rc = m_MemOpsTbl.bundled_map_ops(&bufMapList, m_MemOpsTbl.userdata);
+    }
+
+    if (rc < 0) {
+        LOGE("Failed to map buffer");
+        rc = BAD_INDEX;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : backgroundAllocate
+ *
+ * DESCRIPTION: schedule buffers to be allocated in the background
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::backgroundAllocate(void *data) {
+    QCameraStream *stream = (QCameraStream*)data;
+    int32_t rc = stream->allocateBuffers();
+    if (rc != NO_ERROR) {
+        LOGE("Error allocating buffers !!!");
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : backgroundMap
+ *
+ * DESCRIPTION: map buffers in the background
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::backgroundMap(void *data) {
+    QCameraStream *stream = (QCameraStream*)data;
+    int32_t rc = stream->mapBuffers();
+    if (rc != NO_ERROR) {
+        LOGE("Error mapping buffers !!!");
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : init
+ *
+ * DESCRIPTION: initialize stream obj
+ *
+ * PARAMETERS :
+ *   @streamInfoBuf: ptr to buf that contains stream info
+ *   @miscBuf      : ptr to buf that contains misc bufs
+ *   @stream_cb    : stream data notify callback. Can be NULL if not needed
+ *   @userdata     : user data ptr
+ *   @bDynallocBuf : flag to indicate if buffer allocation can be in 2 steps
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::init(QCameraHeapMemory *streamInfoBuf,
+        QCameraHeapMemory *miscBuf,
+        uint8_t minNumBuffers,
+        stream_cb_routine stream_cb,
+        void *userdata,
+        bool bDynallocBuf)
+{
+    int32_t rc = OK;
+
+    // assign and map stream info memory
+    mStreamInfoBuf = streamInfoBuf;
+    mStreamInfo = reinterpret_cast<cam_stream_info_t *>(mStreamInfoBuf->getPtr(0));
+    mNumBufs = minNumBuffers;
+    mDynBufAlloc = bDynallocBuf;
+
+    // Calculate buffer size for deffered allocation
+    if (mDefferedAllocation) {
+        rc = calcOffset(mStreamInfo);
+        if (rc < 0) {
+            LOGE("Failed to calculate stream offset");
+            goto done;
+        }
+
+        mAllocTask.bgFunction = backgroundAllocate;
+        mAllocTask.bgArgs = this;
+        mAllocTaskId = mAllocator.scheduleBackgroundTask(&mAllocTask);
+        if (mAllocTaskId == 0) {
+            LOGE("Failed to schedule buffer alloction");
+            rc = -ENOMEM;
+            goto done;
+        }
+    }
+
+    mHandle = mCamOps->add_stream(mCamHandle, mChannelHandle);
+    if (!mHandle) {
+        LOGE("add_stream failed");
+        rc = UNKNOWN_ERROR;
+        goto done;
+    }
+
+    rc = mapBufs(mStreamInfoBuf, CAM_MAPPING_BUF_TYPE_STREAM_INFO, NULL);
+    if (rc < 0) {
+        LOGE("Failed to map stream info buffer");
+        goto err1;
+    }
+
+    mMiscBuf = miscBuf;
+    if (miscBuf) {
+        rc = mapBufs(mMiscBuf, CAM_MAPPING_BUF_TYPE_MISC_BUF, NULL);
+        if (rc < 0) {
+            LOGE("Failed to map miscellaneous buffer");
+            releaseMiscBuf();
+            goto err1;
+        }
+    }
+
+    rc = configStream();
+    if (rc < 0) {
+        LOGE("Failed to config stream ");
+        goto err1;
+    }
+
+    if (mDefferedAllocation) {
+        mMapTask.bgFunction = backgroundMap;
+        mMapTask.bgArgs = this;
+        mMapTaskId = mAllocator.scheduleBackgroundTask(&mMapTask);
+        if (mMapTaskId == 0) {
+            LOGE("Failed to schedule buffer alloction");
+            rc = -ENOMEM;
+            goto err1;
+        }
+    }
+
+    mDataCB = stream_cb;
+    mUserData = userdata;
+    return 0;
+
+err1:
+    mCamOps->delete_stream(mCamHandle, mChannelHandle, mHandle);
+    mHandle = 0;
+done:
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : calcOffset
+ *
+ * DESCRIPTION: calculate frame offset based on format and padding information
+ *
+ * PARAMETERS :
+ *   @streamInfo  : stream information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t QCameraStream::calcOffset(cam_stream_info_t *streamInfo)
+{
+    int32_t rc = 0;
+
+    cam_dimension_t dim = streamInfo->dim;
+    if (streamInfo->pp_config.feature_mask & CAM_QCOM_FEATURE_ROTATION &&
+            streamInfo->stream_type != CAM_STREAM_TYPE_VIDEO) {
+        if (streamInfo->pp_config.rotation == ROTATE_90 ||
+                streamInfo->pp_config.rotation == ROTATE_270) {
+            // rotated by 90 or 270, need to switch width and height
+            dim.width = streamInfo->dim.height;
+            dim.height = streamInfo->dim.width;
+        }
+    }
+
+    switch (streamInfo->stream_type) {
+    case CAM_STREAM_TYPE_PREVIEW:
+    case CAM_STREAM_TYPE_CALLBACK:
+        rc = mm_stream_calc_offset_preview(streamInfo,
+                &dim,
+                &mPaddingInfo,
+                &streamInfo->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_POSTVIEW:
+        rc = mm_stream_calc_offset_post_view(streamInfo->fmt,
+                &dim,
+                &streamInfo->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_SNAPSHOT:
+        rc = mm_stream_calc_offset_snapshot(streamInfo->fmt,
+                &dim,
+                &mPaddingInfo,
+                &streamInfo->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_OFFLINE_PROC:
+        rc = mm_stream_calc_offset_postproc(streamInfo,
+                &mPaddingInfo,
+                &streamInfo->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        rc = mm_stream_calc_offset_video(streamInfo->fmt,
+                &dim, &streamInfo->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_RAW:
+        rc = mm_stream_calc_offset_raw(streamInfo->fmt,
+                &dim,
+                &mPaddingInfo,
+                &streamInfo->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_ANALYSIS:
+        rc = mm_stream_calc_offset_analysis(streamInfo->fmt,
+                &dim,
+                &mPaddingInfo,
+                &streamInfo->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_METADATA:
+        rc = mm_stream_calc_offset_metadata(&dim,
+                &mPaddingInfo,
+                &streamInfo->buf_planes);
+        break;
+    default:
+        LOGE("not supported for stream type %d",
+                 streamInfo->stream_type);
+        rc = -1;
+        break;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : start
+ *
+ * DESCRIPTION: start stream. Will start main stream thread to handle stream
+ *              related ops.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::start()
+{
+    int32_t rc = 0;
+    mDataQ.init();
+    rc = mProcTh.launch(dataProcRoutine, this);
+    if (rc == NO_ERROR) {
+        m_bActive = true;
+    }
+
+    mCurMetaMemory = NULL;
+    mCurBufIndex = -1;
+    mCurMetaIndex = -1;
+    mFirstTimeStamp = 0;
+    memset (&mStreamMetaMemory, 0,
+            (sizeof(MetaMemory) * CAMERA_MIN_VIDEO_BATCH_BUFFERS));
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop
+ *
+ * DESCRIPTION: stop stream. Will stop main stream thread
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::stop()
+{
+    int32_t rc = 0;
+    m_bActive = false;
+    mAllocator.waitForBackgroundTask(mAllocTaskId);
+    mAllocator.waitForBackgroundTask(mMapTaskId);
+    rc = mProcTh.exit();
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : syncRuntimeParams
+ *
+ * DESCRIPTION: query and sync runtime parameters like output crop
+ *              buffer info etc.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::syncRuntimeParams()
+{
+    int32_t ret = NO_ERROR;
+
+    memset(&m_OutputCrop, 0, sizeof(cam_stream_parm_buffer_t));
+    m_OutputCrop.type = CAM_STREAM_PARAM_TYPE_GET_OUTPUT_CROP;
+
+    ret = getParameter(m_OutputCrop);
+    if (ret != NO_ERROR) {
+        LOGE("stream getParameter for output crop failed");
+        return ret;
+    }
+
+    memset(&m_ImgProp, 0, sizeof(cam_stream_parm_buffer_t));
+    m_ImgProp.type = CAM_STREAM_PARAM_TYPE_GET_IMG_PROP;
+
+    ret = getParameter(m_ImgProp);
+    if (ret != NO_ERROR) {
+        LOGE("stream getParameter for image prop failed");
+        return ret;
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : processZoomDone
+ *
+ * DESCRIPTION: process zoom done event
+ *
+ * PARAMETERS :
+ *   @previewWindoe : preview window ops table to set preview crop window
+ *   @crop_info     : crop info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::processZoomDone(preview_stream_ops_t *previewWindow,
+                                       cam_crop_data_t &crop_info)
+{
+    int32_t rc = 0;
+
+    if (!m_bActive) {
+        LOGL("Stream not active");
+        return NO_ERROR;
+    }
+
+    // get stream param for crop info
+    for (int i = 0; i < crop_info.num_of_streams; i++) {
+        if (crop_info.crop_info[i].stream_id == mStreamInfo->stream_svr_id) {
+            pthread_mutex_lock(&mCropLock);
+            mCropInfo = crop_info.crop_info[i].crop;
+            pthread_mutex_unlock(&mCropLock);
+
+            // update preview window crop if it's preview/postview stream
+            if ( (previewWindow != NULL) &&
+                 (mStreamInfo->stream_type == CAM_STREAM_TYPE_PREVIEW ||
+                  mStreamInfo->stream_type == CAM_STREAM_TYPE_POSTVIEW) ) {
+                rc = previewWindow->set_crop(previewWindow,
+                                             mCropInfo.left,
+                                             mCropInfo.top,
+                                             mCropInfo.width,
+                                             mCropInfo.height);
+            }
+            break;
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : processDataNotify
+ *
+ * DESCRIPTION: process stream data notify
+ *
+ * PARAMETERS :
+ *   @frame   : stream frame received
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::processDataNotify(mm_camera_super_buf_t *frame)
+{
+    LOGD("\n");
+
+    if (mDataQ.enqueue((void *)frame)) {
+        return mProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+    } else {
+        if (!m_bActive) {
+            LOGW("Stream thread is not active, no ops here %d", getMyType());
+        } else {
+            bufDone(frame->bufs[0]->buf_idx);
+        }
+        free(frame);
+        return NO_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : dataNotifySYNCCB
+ *
+ * DESCRIPTION: This function registered with interface for
+ *                        SYNC callback if SYNC callback registered.
+ *
+ * PARAMETERS :
+ *   @recvd_frame   : stream frame received
+ *   @userdata      : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraStream::dataNotifySYNCCB(mm_camera_super_buf_t *recvd_frame,
+        void *userdata)
+{
+    LOGD("\n");
+    QCameraStream* stream = (QCameraStream *)userdata;
+    if (stream == NULL ||
+        recvd_frame == NULL ||
+        recvd_frame->bufs[0] == NULL ||
+        recvd_frame->bufs[0]->stream_id != stream->getMyHandle()) {
+        LOGE("Not a valid stream to handle buf");
+        return;
+    }
+    if ((stream->mSyncCBEnabled) && (stream->mSYNCDataCB != NULL))
+        stream->mSYNCDataCB(recvd_frame, stream, stream->mUserData);
+    return;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : dataNotifyCB
+ *
+ * DESCRIPTION: callback for data notify. This function is registered with
+ *              mm-camera-interface to handle data notify
+ *
+ * PARAMETERS :
+ *   @recvd_frame   : stream frame received
+ *   userdata       : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraStream::dataNotifyCB(mm_camera_super_buf_t *recvd_frame,
+                                 void *userdata)
+{
+    LOGD("\n");
+    QCameraStream* stream = (QCameraStream *)userdata;
+    if (stream == NULL ||
+        recvd_frame == NULL ||
+        recvd_frame->bufs[0] == NULL ||
+        recvd_frame->bufs[0]->stream_id != stream->getMyHandle()) {
+        LOGE("Not a valid stream to handle buf");
+        return;
+    }
+
+    mm_camera_super_buf_t *frame =
+        (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+    if (frame == NULL) {
+        LOGE("No mem for mm_camera_buf_def_t");
+        stream->bufDone(recvd_frame->bufs[0]->buf_idx);
+        return;
+    }
+    *frame = *recvd_frame;
+    stream->processDataNotify(frame);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : dataProcRoutine
+ *
+ * DESCRIPTION: function to process data in the main stream thread
+ *
+ * PARAMETERS :
+ *   @data    : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void *QCameraStream::dataProcRoutine(void *data)
+{
+    int running = 1;
+    int ret;
+    QCameraStream *pme = (QCameraStream *)data;
+    QCameraCmdThread *cmdThread = &pme->mProcTh;
+    cmdThread->setName("CAM_strmDatProc");
+
+    LOGD("E");
+    do {
+        do {
+            ret = cam_sem_wait(&cmdThread->cmd_sem);
+            if (ret != 0 && errno != EINVAL) {
+                LOGE("cam_sem_wait error (%s)",
+                       strerror(errno));
+                return NULL;
+            }
+        } while (ret != 0);
+
+        // we got notified about new cmd avail in cmd queue
+        camera_cmd_type_t cmd = cmdThread->getCmd();
+        switch (cmd) {
+        case CAMERA_CMD_TYPE_DO_NEXT_JOB:
+            {
+                LOGH("Do next job");
+                mm_camera_super_buf_t *frame =
+                    (mm_camera_super_buf_t *)pme->mDataQ.dequeue();
+                if (NULL != frame) {
+                    if (pme->mDataCB != NULL) {
+                        pme->mDataCB(frame, pme, pme->mUserData);
+                    } else {
+                        // no data cb routine, return buf here
+                        pme->bufDone(frame->bufs[0]->buf_idx);
+                        free(frame);
+                    }
+                }
+            }
+            break;
+        case CAMERA_CMD_TYPE_EXIT:
+            LOGH("Exit");
+            /* flush data buf queue */
+            pme->mDataQ.flush();
+            running = 0;
+            break;
+        default:
+            break;
+        }
+    } while (running);
+    LOGH("X");
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : bufDone
+ *
+ * DESCRIPTION: return stream buffer to kernel
+ *
+ * PARAMETERS :
+ *   @index   : index of buffer to be returned
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::bufDone(uint32_t index)
+{
+    int32_t rc = NO_ERROR;
+
+    if (index >= mNumBufs || mBufDefs == NULL)
+        return BAD_INDEX;
+
+    rc = mCamOps->qbuf(mCamHandle, mChannelHandle, &mBufDefs[index]);
+
+    if (rc < 0)
+        return rc;
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : bufDone
+ *
+ * DESCRIPTION: return stream buffer to kernel
+ *
+ * PARAMETERS :
+ *   @opaque    : stream frame/metadata buf to be returned
+ *   @isMetaData: flag if returned opaque is a metadatabuf or the real frame ptr
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::bufDone(const void *opaque, bool isMetaData)
+{
+    int32_t rc = NO_ERROR;
+    int index = -1;
+
+    if ((mStreamInfo != NULL)
+            && (mStreamInfo->streaming_mode == CAM_STREAMING_MODE_BATCH)
+            && (mStreamBatchBufs != NULL)) {
+        index = mStreamBatchBufs->getMatchBufIndex(opaque, isMetaData);
+    } else if (mStreamBufs != NULL){
+        index = mStreamBufs->getMatchBufIndex(opaque, isMetaData);
+    }
+
+    if (index == -1 || index >= mNumBufs || mBufDefs == NULL) {
+        LOGE("Cannot find buf for opaque data = %p", opaque);
+        return BAD_INDEX;
+    }
+
+    if ((CAMERA_MIN_VIDEO_BATCH_BUFFERS > index)
+            && mStreamMetaMemory[index].numBuffers > 0) {
+        for (int i= 0; i < mStreamMetaMemory[index].numBuffers; i++) {
+            uint8_t buf_idx = mStreamMetaMemory[index].buf_index[i];
+            bufDone((uint32_t)buf_idx);
+        }
+        mStreamMetaMemory[index].consumerOwned = FALSE;
+        mStreamMetaMemory[index].numBuffers = 0;
+    } else {
+        LOGH("Buffer Index = %d, Frame Idx = %d", index,
+                mBufDefs[index].frame_idx);
+        rc = bufDone((uint32_t)index);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumQueuedBuf
+ *
+ * DESCRIPTION: return queued buffer count
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : queued buffer count
+ *==========================================================================*/
+int32_t QCameraStream::getNumQueuedBuf()
+{
+    int32_t rc = -1;
+    if (mHandle > 0) {
+        rc = mCamOps->get_queued_buf_count(mCamHandle, mChannelHandle, mHandle);
+    }
+    if (rc == -1) {
+        LOGE("stream is not in active state. Invalid operation");
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getBufs
+ *
+ * DESCRIPTION: allocate stream buffers
+ *
+ * PARAMETERS :
+ *   @offset     : offset info of stream buffers
+ *   @num_bufs   : number of buffers allocated
+ *   @initial_reg_flag: flag to indicate if buffer needs to be registered
+ *                      at kernel initially
+ *   @bufs       : output of allocated buffers
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getBufs(cam_frame_len_offset_t *offset,
+        uint8_t *num_bufs,
+        uint8_t **initial_reg_flag,
+        mm_camera_buf_def_t **bufs,
+        mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+    int rc = NO_ERROR;
+    uint8_t *regFlags;
+
+    if (!ops_tbl) {
+        LOGE("ops_tbl is NULL");
+        return INVALID_OPERATION;
+    }
+
+    mFrameLenOffset = *offset;
+
+    uint8_t numBufAlloc = mNumBufs;
+    mNumBufsNeedAlloc = 0;
+    if (mDynBufAlloc) {
+        numBufAlloc = CAMERA_MIN_ALLOCATED_BUFFERS;
+        if (numBufAlloc > mNumBufs) {
+            mDynBufAlloc = false;
+            numBufAlloc = mNumBufs;
+        } else {
+            mNumBufsNeedAlloc = (uint8_t)(mNumBufs - numBufAlloc);
+        }
+    }
+
+    /* For some stream types, buffer allocation may have already begun
+     * preemptively. If this is the case, we need to wait for the
+     * preemptive allocation to complete before proceeding. */
+    mAllocator.waitForDeferredAlloc(mStreamInfo->stream_type);
+
+    //Allocate stream buffer
+    mStreamBufs = mAllocator.allocateStreamBuf(mStreamInfo->stream_type,
+            mFrameLenOffset.frame_len, mFrameLenOffset.mp[0].stride,
+            mFrameLenOffset.mp[0].scanline, numBufAlloc);
+    if (!mStreamBufs) {
+        LOGE("Failed to allocate stream buffers");
+        return NO_MEMORY;
+    }
+
+    mNumBufs = (uint8_t)(numBufAlloc + mNumBufsNeedAlloc);
+    uint8_t numBufsToMap = mStreamBufs->getMappable();
+
+    QCameraBufferMaps bufferMaps;
+    for (uint32_t i = 0; i < numBufsToMap; i++) {
+        ssize_t bufSize = mStreamBufs->getSize(i);
+        if (BAD_INDEX == bufSize) {
+            LOGE("Failed to retrieve buffer size (bad index)");
+            return INVALID_OPERATION;
+        }
+
+        rc = bufferMaps.enqueue(CAM_MAPPING_BUF_TYPE_STREAM_BUF,
+                0 /*stream id*/, i /*buf index*/, -1 /*plane index*/,
+                0 /*cookie*/, mStreamBufs->getFd(i), bufSize,
+                mStreamBufs->getPtr(i));
+
+        if (rc < 0) {
+            LOGE("Failed to map buffers");
+            return BAD_INDEX;
+        }
+    }
+
+    cam_buf_map_type_list bufMapList;
+    rc = bufferMaps.getCamBufMapList(bufMapList);
+    if (rc == NO_ERROR) {
+        rc = ops_tbl->bundled_map_ops(&bufMapList, ops_tbl->userdata);
+    }
+    if (rc < 0) {
+        LOGE("map_stream_buf failed: %d", rc);
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+        mStreamBufs = NULL;
+        return INVALID_OPERATION;
+    }
+
+    //regFlags array is allocated by us, but consumed and freed by mm-camera-interface
+    regFlags = (uint8_t *)malloc(sizeof(uint8_t) * mNumBufs);
+    if (!regFlags) {
+        LOGE("Out of memory");
+        for (uint32_t i = 0; i < numBufsToMap; i++) {
+            ops_tbl->unmap_ops(i, -1, CAM_MAPPING_BUF_TYPE_STREAM_BUF, ops_tbl->userdata);
+        }
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+        mStreamBufs = NULL;
+        return NO_MEMORY;
+    }
+    memset(regFlags, 0, sizeof(uint8_t) * mNumBufs);
+
+    mBufDefs = (mm_camera_buf_def_t *)malloc(mNumBufs * sizeof(mm_camera_buf_def_t));
+    if (mBufDefs == NULL) {
+        LOGE("getRegFlags failed %d", rc);
+        for (uint32_t i = 0; i < numBufsToMap; i++) {
+            ops_tbl->unmap_ops(i, -1, CAM_MAPPING_BUF_TYPE_STREAM_BUF, ops_tbl->userdata);
+        }
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+        mStreamBufs = NULL;
+        free(regFlags);
+        regFlags = NULL;
+        return INVALID_OPERATION;
+    }
+    memset(mBufDefs, 0, mNumBufs * sizeof(mm_camera_buf_def_t));
+    for (uint32_t i = 0; i < numBufsToMap; i++) {
+        mStreamBufs->getBufDef(mFrameLenOffset, mBufDefs[i], i);
+    }
+
+    rc = mStreamBufs->getRegFlags(regFlags);
+    if (rc < 0) {
+        LOGE("getRegFlags failed %d", rc);
+        for (uint32_t i = 0; i < numBufsToMap; i++) {
+            ops_tbl->unmap_ops(i, -1, CAM_MAPPING_BUF_TYPE_STREAM_BUF, ops_tbl->userdata);
+        }
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+        mStreamBufs = NULL;
+        free(mBufDefs);
+        mBufDefs = NULL;
+        free(regFlags);
+        regFlags = NULL;
+        return INVALID_OPERATION;
+    }
+
+    *num_bufs = mNumBufs;
+    *initial_reg_flag = regFlags;
+    *bufs = mBufDefs;
+    LOGH("stream type: %d, mRegFlags: 0x%x, numBufs: %d",
+             mStreamInfo->stream_type, regFlags, mNumBufs);
+
+    if (mNumBufsNeedAlloc > 0) {
+        pthread_mutex_lock(&m_lock);
+        wait_for_cond = TRUE;
+        pthread_mutex_unlock(&m_lock);
+        LOGH("Still need to allocate %d buffers",
+               mNumBufsNeedAlloc);
+        // start another thread to allocate the rest of buffers
+        pthread_create(&mBufAllocPid,
+                       NULL,
+                       BufAllocRoutine,
+                       this);
+        pthread_setname_np(mBufAllocPid, "CAM_strmBuf");
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getBufsDeferred
+ *
+ * DESCRIPTION: allocate deferred stream buffers
+ *
+ * PARAMETERS :
+ *   @offset     : offset info of stream buffers
+ *   @num_bufs   : number of buffers allocated
+ *   @initial_reg_flag: flag to indicate if buffer needs to be registered
+ *                      at kernel initially
+ *   @bufs       : output of allocated buffers
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getBufsDeferred(__unused cam_frame_len_offset_t *offset,
+        uint8_t *num_bufs,
+        uint8_t **initial_reg_flag,
+        mm_camera_buf_def_t **bufs,
+        __unused mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+    int32_t rc = NO_ERROR;
+    // wait for allocation
+    rc = mAllocator.waitForBackgroundTask(mAllocTaskId);
+    if (rc != NO_ERROR) {
+        LOGE("Allocation Failed");
+        return NO_MEMORY;
+    }
+
+    if (!mRegFlags || !mBufDefs) {
+        LOGE("reg flags or buf defs uninitialized");
+        return NO_MEMORY;
+    }
+
+    *initial_reg_flag   = mRegFlags;
+    *num_bufs           = mNumBufs;
+    *bufs               = mBufDefs;
+
+    LOGH("stream type: %d, mRegFlags: 0x%x, numBufs: %d",
+             getMyType(), mRegFlags, mNumBufs);
+
+    return NO_ERROR;
+}
+/*===========================================================================
+ * FUNCTION   : mapNewBuffer
+ *
+ * DESCRIPTION: map a new stream buffer
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::mapNewBuffer(uint32_t index)
+{
+    LOGH("E - index = %d", index);
+
+    int rc = NO_ERROR;
+
+    if (mStreamBufs == NULL) {
+        LOGE("Invalid Operation");
+        return INVALID_OPERATION;
+    }
+
+    ssize_t bufSize = mStreamBufs->getSize(index);
+    if (BAD_INDEX == bufSize) {
+        LOGE("Failed to retrieve buffer size (bad index)");
+        return INVALID_OPERATION;
+    }
+
+    cam_buf_map_type_list bufMapList;
+    rc = QCameraBufferMaps::makeSingletonBufMapList(
+            CAM_MAPPING_BUF_TYPE_STREAM_BUF, 0 /*stream id*/, index,
+            -1 /*plane index*/, 0 /*cookie*/, mStreamBufs->getFd(index),
+            bufSize, bufMapList, mStreamBufs->getPtr(index));
+
+    if (rc == NO_ERROR) {
+        rc = m_MemOpsTbl.bundled_map_ops(&bufMapList, m_MemOpsTbl.userdata);
+    }
+    if (rc < 0) {
+        LOGE("map_stream_buf failed: %d", rc);
+        rc = INVALID_OPERATION;
+    } else {
+        mStreamBufs->getBufDef(mFrameLenOffset, mBufDefs[index], index);
+    }
+
+    LOGH("X - rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocateBuffers
+ *
+ * DESCRIPTION: allocate stream buffers
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::allocateBuffers()
+{
+    int32_t rc = NO_ERROR;
+
+    mFrameLenOffset = mStreamInfo->buf_planes.plane_info;
+
+    if (mStreamInfo->streaming_mode == CAM_STREAMING_MODE_BATCH) {
+        return allocateBatchBufs(&mFrameLenOffset,
+                &mNumBufs, &mRegFlags,
+                &mBufDefs, NULL);
+    }
+
+    /* This allocation is running in the deferred context, so it
+     * is safe (and necessary) to assume any preemptive allocation
+     * is already complete. Therefore, no need to wait here. */
+
+    uint8_t numBufAlloc = mNumBufs;
+    mNumBufsNeedAlloc = 0;
+    if (mDynBufAlloc) {
+        numBufAlloc = CAMERA_MIN_ALLOCATED_BUFFERS;
+        if (numBufAlloc > mNumBufs) {
+            mDynBufAlloc = false;
+            numBufAlloc = mNumBufs;
+        } else {
+            mNumBufsNeedAlloc = (uint8_t)(mNumBufs - numBufAlloc);
+        }
+    }
+
+    //Allocate and map stream info buffer
+    mStreamBufs = mAllocator.allocateStreamBuf(mStreamInfo->stream_type,
+            mFrameLenOffset.frame_len,
+            mFrameLenOffset.mp[0].stride,
+            mFrameLenOffset.mp[0].scanline,
+            numBufAlloc);
+
+    if (!mStreamBufs) {
+        LOGE("Failed to allocate stream buffers");
+        return NO_MEMORY;
+    }
+
+    mNumBufs = (uint8_t)(numBufAlloc + mNumBufsNeedAlloc);
+    uint8_t numBufsToMap = mStreamBufs->getMappable();
+
+    //regFlags array is allocated by us,
+    // but consumed and freed by mm-camera-interface
+    mRegFlags = (uint8_t *)malloc(sizeof(uint8_t) * mNumBufs);
+    if (!mRegFlags) {
+        LOGE("Out of memory");
+        for (uint32_t i = 0; i < numBufsToMap; i++) {
+            unmapBuf(CAM_MAPPING_BUF_TYPE_STREAM_BUF, i, -1, NULL);
+        }
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+        mStreamBufs = NULL;
+        return NO_MEMORY;
+    }
+    memset(mRegFlags, 0, sizeof(uint8_t) * mNumBufs);
+
+    size_t bufDefsSize = mNumBufs * sizeof(mm_camera_buf_def_t);
+    mBufDefs = (mm_camera_buf_def_t *)malloc(bufDefsSize);
+    if (mBufDefs == NULL) {
+        LOGE("getRegFlags failed %d", rc);
+        for (uint32_t i = 0; i < numBufsToMap; i++) {
+            unmapBuf(CAM_MAPPING_BUF_TYPE_STREAM_BUF, i, -1, NULL);
+        }
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+        mStreamBufs = NULL;
+        free(mRegFlags);
+        mRegFlags = NULL;
+        return INVALID_OPERATION;
+    }
+    memset(mBufDefs, 0, bufDefsSize);
+    for (uint32_t i = 0; i < numBufsToMap; i++) {
+        mStreamBufs->getBufDef(mFrameLenOffset, mBufDefs[i], i);
+    }
+
+    rc = mStreamBufs->getRegFlags(mRegFlags);
+    if (rc < 0) {
+        LOGE("getRegFlags failed %d", rc);
+        for (uint32_t i = 0; i < numBufsToMap; i++) {
+            unmapBuf(CAM_MAPPING_BUF_TYPE_STREAM_BUF, i, -1, NULL);
+        }
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+        mStreamBufs = NULL;
+        free(mBufDefs);
+        mBufDefs = NULL;
+        free(mRegFlags);
+        mRegFlags = NULL;
+        return INVALID_OPERATION;
+    }
+
+    if (mNumBufsNeedAlloc > 0) {
+        pthread_mutex_lock(&m_lock);
+        wait_for_cond = TRUE;
+        pthread_mutex_unlock(&m_lock);
+        LOGH("Still need to allocate %d buffers",
+               mNumBufsNeedAlloc);
+        // start another thread to allocate the rest of buffers
+        pthread_create(&mBufAllocPid,
+                       NULL,
+                       BufAllocRoutine,
+                       this);
+        pthread_setname_np(mBufAllocPid, "CAM_strmBufAlloc");
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mapBuffers
+ *
+ * DESCRIPTION: map stream buffers
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::mapBuffers()
+{
+    int32_t rc = NO_ERROR;
+    QCameraBufferMaps bufferMaps;
+
+    rc = mAllocator.waitForBackgroundTask(mAllocTaskId);
+    if (rc != NO_ERROR) {
+        LOGE("Allocation Failed");
+        return NO_MEMORY;
+    }
+
+    if (mStreamBufs == NULL) {
+        LOGE("Stream buffers not allocated");
+        return UNKNOWN_ERROR;
+    }
+
+    uint8_t numBufsToMap = mStreamBufs->getMappable();
+    for (uint32_t i = 0; i < numBufsToMap; i++) {
+        ssize_t bufSize = mStreamBufs->getSize(i);
+        if (BAD_INDEX != bufSize) {
+            rc = bufferMaps.enqueue(CAM_MAPPING_BUF_TYPE_STREAM_BUF, mHandle,
+                    i /*buf index*/, -1 /*plane index*/, 0 /*cookie*/,
+                    mStreamBufs->getFd(i), bufSize,
+                    mStreamBufs->getPtr(i));
+
+            if (rc < 0) {
+                LOGE("Failed to map buffers");
+                rc = BAD_INDEX;
+                break;
+            }
+        } else {
+            LOGE("Bad index %u", i);
+            rc = BAD_INDEX;
+            break;
+        }
+    }
+
+    cam_buf_map_type_list bufMapList;
+    if (rc == NO_ERROR) {
+        rc = bufferMaps.getCamBufMapList(bufMapList);
+    }
+    if (rc == NO_ERROR) {
+        rc = mapBufs(bufMapList, NULL);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocateBatchBufs
+ *
+ * DESCRIPTION: allocate stream batch buffers and stream buffers
+ *
+ * PARAMETERS :
+ *   @offset     : offset info of stream buffers
+ *   @num_bufs   : number of buffers allocated
+ *   @initial_reg_flag: flag to indicate if buffer needs to be registered
+ *                      at kernel initially
+ *   @bufs       : output of allocated buffers
+ *   @plane_bufs    : output of allocated plane buffers
+  *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::allocateBatchBufs(cam_frame_len_offset_t *offset,
+        uint8_t *num_bufs, uint8_t **initial_reg_flag,
+        mm_camera_buf_def_t **bufs, mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+    int rc = NO_ERROR;
+    uint8_t *regFlags;
+    QCameraBufferMaps bufferMaps;
+    QCameraBufferMaps planeBufferMaps;
+
+    mFrameLenOffset = *offset;
+
+    LOGH("Batch Buffer allocation stream type = %d", getMyType());
+
+    //Allocate stream batch buffer
+    mStreamBatchBufs = mAllocator.allocateStreamUserBuf (mStreamInfo);
+    if (!mStreamBatchBufs) {
+        LOGE("Failed to allocate stream batch buffers");
+        return NO_MEMORY;
+    }
+
+    uint8_t numBufsToMap = mStreamBatchBufs->getMappable();
+
+    //map batch buffers
+    for (uint32_t i = 0; i < numBufsToMap; i++) {
+        rc = bufferMaps.enqueue(CAM_MAPPING_BUF_TYPE_STREAM_USER_BUF,
+                0 /*stream id*/, i /*buf index*/, -1 /*plane index*/,
+                0 /*cookie*/, mStreamBatchBufs->getFd(i),
+                mNumBufs, mStreamBatchBufs->getPtr(i));
+
+        if (rc < 0) {
+            LOGE("Failed to map buffers");
+            rc = BAD_INDEX;
+            break;
+        }
+    }
+
+    cam_buf_map_type_list bufMapList;
+    if (rc == NO_ERROR) {
+        rc = bufferMaps.getCamBufMapList(bufMapList);
+    }
+    if (rc == NO_ERROR) {
+        rc = mapBufs(bufMapList, ops_tbl);
+    }
+    if (rc < 0) {
+        LOGE("Failed to map stream batch buffers");
+        mStreamBatchBufs->deallocate();
+        delete mStreamBatchBufs;
+        mStreamBatchBufs = NULL;
+        return NO_MEMORY;
+    }
+
+    /*calculate stream Buffer count*/
+    mNumPlaneBufs =
+            (mNumBufs * mStreamInfo->user_buf_info.frame_buf_cnt);
+
+    /* For some stream types, buffer allocation may have already begun
+     * preemptively. If this is the case, we need to wait for the
+     * preemptive allocation to complete before proceeding. */
+    mAllocator.waitForDeferredAlloc(mStreamInfo->stream_type);
+
+    //Allocate stream buffer
+    mStreamBufs = mAllocator.allocateStreamBuf(mStreamInfo->stream_type,
+            mFrameLenOffset.frame_len,mFrameLenOffset.mp[0].stride,
+            mFrameLenOffset.mp[0].scanline,mNumPlaneBufs);
+    if (!mStreamBufs) {
+        LOGE("Failed to allocate stream buffers");
+        rc = NO_MEMORY;
+        goto err1;
+    }
+
+    //Map plane stream buffers
+    for (uint32_t i = 0; i < mNumPlaneBufs; i++) {
+        ssize_t bufSize = mStreamBufs->getSize(i);
+        if (BAD_INDEX != bufSize) {
+            rc = planeBufferMaps.enqueue(CAM_MAPPING_BUF_TYPE_STREAM_BUF,
+                    0 /*stream id*/, i /*buf index*/, -1 /*plane index*/,
+                    0 /*cookie*/, mStreamBufs->getFd(i), bufSize,
+                    mStreamBufs->getPtr(i));
+
+            if (rc < 0) {
+                LOGE("Failed to map buffers");
+                mStreamBufs->deallocate();
+                delete mStreamBufs;
+                mStreamBufs = NULL;
+                rc = INVALID_OPERATION;
+                goto err1;
+            }
+        } else {
+            LOGE("Failed to retrieve buffer size (bad index)");
+            mStreamBufs->deallocate();
+            delete mStreamBufs;
+            mStreamBufs = NULL;
+            rc = INVALID_OPERATION;
+            goto err1;
+        }
+    }
+
+    cam_buf_map_type_list planeBufMapList;
+    rc = planeBufferMaps.getCamBufMapList(planeBufMapList);
+    if (rc == NO_ERROR) {
+        rc = mapBufs(planeBufMapList, ops_tbl);
+    }
+
+    if (rc < 0) {
+        LOGE("map_stream_buf failed: %d", rc);
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+        mStreamBufs = NULL;
+        rc = INVALID_OPERATION;
+        goto err1;
+    }
+
+    LOGD("BATCH Buf Count = %d, Plane Buf Cnt = %d",
+            mNumBufs, mNumPlaneBufs);
+
+    //regFlags array is allocated by us, but consumed and freed by mm-camera-interface
+    regFlags = (uint8_t *)malloc(sizeof(uint8_t) * mNumBufs);
+    if (!regFlags) {
+        LOGE("Out of memory");
+        for (uint32_t i = 0; i < mNumPlaneBufs; i++) {
+            unmapBuf(CAM_MAPPING_BUF_TYPE_STREAM_BUF, i, -1, ops_tbl);
+        }
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+        mStreamBufs = NULL;
+        rc = NO_MEMORY;
+        goto err1;
+    }
+    memset(regFlags, 0, sizeof(uint8_t) * mNumBufs);
+    for (uint32_t i = 0; i < mNumBufs; i++) {
+        regFlags[i] = 1;
+    }
+
+    mBufDefs = (mm_camera_buf_def_t *)malloc(mNumBufs * sizeof(mm_camera_buf_def_t));
+    if (mBufDefs == NULL) {
+        LOGE("getRegFlags failed %d", rc);
+        for (uint32_t i = 0; i < mNumPlaneBufs; i++) {
+            unmapBuf(CAM_MAPPING_BUF_TYPE_STREAM_BUF, i, -1, ops_tbl);
+        }
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+        mStreamBufs = NULL;
+        free(regFlags);
+        regFlags = NULL;
+        rc = INVALID_OPERATION;
+        goto err1;
+    }
+    memset(mBufDefs, 0, mNumBufs * sizeof(mm_camera_buf_def_t));
+
+    mPlaneBufDefs = (mm_camera_buf_def_t *)
+            malloc(mNumPlaneBufs * (sizeof(mm_camera_buf_def_t)));
+    if (mPlaneBufDefs == NULL) {
+        LOGE("No Memory");
+        free(regFlags);
+        regFlags = NULL;
+        free(mBufDefs);
+        mBufDefs = NULL;
+        for (uint32_t i = 0; i < mNumPlaneBufs; i++) {
+            unmapBuf(CAM_MAPPING_BUF_TYPE_STREAM_BUF, i, -1, ops_tbl);
+        }
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+        mStreamBufs = NULL;
+        free(regFlags);
+        regFlags = NULL;
+        rc = INVALID_OPERATION;
+        goto err1;
+    }
+    memset(mPlaneBufDefs, 0,
+             mNumPlaneBufs * (sizeof(mm_camera_buf_def_t)));
+
+    for (uint32_t i = 0; i < mStreamInfo->num_bufs; i++) {
+        mStreamBatchBufs->getUserBufDef(mStreamInfo->user_buf_info,
+                mBufDefs[i], i, mFrameLenOffset, mPlaneBufDefs,
+                mStreamBufs);
+    }
+
+    *num_bufs = mNumBufs;
+    *initial_reg_flag = regFlags;
+    *bufs = mBufDefs;
+    LOGH("stream type: %d, numBufs: %d mNumPlaneBufs: %d",
+             mStreamInfo->stream_type, mNumBufs, mNumPlaneBufs);
+
+    return NO_ERROR;
+
+err1:
+    mStreamBatchBufs->deallocate();
+    delete mStreamBatchBufs;
+    mStreamBatchBufs = NULL;
+    return rc;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : releaseBuffs
+ *
+ * DESCRIPTION: method to deallocate stream buffers
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::releaseBuffs()
+{
+    int rc = NO_ERROR;
+
+    if (mStreamInfo->streaming_mode == CAM_STREAMING_MODE_BATCH) {
+        return releaseBatchBufs(NULL);
+    }
+
+    if ((NULL != mBufDefs) && (mStreamBufs != NULL)) {
+        uint8_t numBufsToUnmap = mStreamBufs->getMappable();
+        for (uint32_t i = 0; i < numBufsToUnmap; i++) {
+            rc = unmapBuf(CAM_MAPPING_BUF_TYPE_STREAM_BUF, i, -1, NULL);
+            if (rc < 0) {
+                LOGE("map_stream_buf failed: %d", rc);
+            }
+        }
+
+        // mBufDefs just keep a ptr to the buffer
+        // mm-camera-interface own the buffer, so no need to free
+        mBufDefs = NULL;
+        memset(&mFrameLenOffset, 0, sizeof(mFrameLenOffset));
+    }
+    if (!mStreamBufsAcquired && (mStreamBufs != NULL)) {
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+        mStreamBufs = NULL;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseBatchBufs
+ *
+ * DESCRIPTION: method to deallocate stream buffers and batch buffers
+ *
+ * PARAMETERS :
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+
+ *==========================================================================*/
+int32_t QCameraStream::releaseBatchBufs(mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+    int rc = NO_ERROR;
+
+    if (NULL != mPlaneBufDefs) {
+        for (uint32_t i = 0; i < mNumPlaneBufs; i++) {
+            rc = unmapBuf(CAM_MAPPING_BUF_TYPE_STREAM_BUF, i, -1, ops_tbl);
+            if (rc < 0) {
+                LOGE("map_stream_buf failed: %d", rc);
+            }
+        }
+
+        // mBufDefs just keep a ptr to the buffer
+        // mm-camera-interface own the buffer, so no need to free
+        mPlaneBufDefs = NULL;
+        memset(&mFrameLenOffset, 0, sizeof(mFrameLenOffset));
+        mNumPlaneBufs = 0;
+    }
+
+    if (mStreamBufs != NULL) {
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+    }
+
+    mBufDefs = NULL;
+
+    if (mStreamBatchBufs != NULL) {
+        for (uint8_t i = 0; i < mStreamBatchBufs->getCnt(); i++) {
+            unmapBuf(CAM_MAPPING_BUF_TYPE_STREAM_USER_BUF, i, -1, ops_tbl);
+        }
+        mStreamBatchBufs->deallocate();
+        delete mStreamBatchBufs;
+        mStreamBatchBufs = NULL;
+    }
+    return rc;
+
+}
+
+/*===========================================================================
+ * FUNCTION   : BufAllocRoutine
+ *
+ * DESCRIPTION: function to allocate additional stream buffers
+ *
+ * PARAMETERS :
+ *   @data    : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void *QCameraStream::BufAllocRoutine(void *data)
+{
+    QCameraStream *pme = (QCameraStream *)data;
+    int32_t rc = NO_ERROR;
+
+    LOGH("E");
+    pme->cond_wait();
+    if (pme->mNumBufsNeedAlloc > 0) {
+        uint8_t numBufAlloc = (uint8_t)(pme->mNumBufs - pme->mNumBufsNeedAlloc);
+        rc = pme->mAllocator.allocateMoreStreamBuf(pme->mStreamBufs,
+                                                   pme->mFrameLenOffset.frame_len,
+                                                   pme->mNumBufsNeedAlloc);
+        if (rc != NO_ERROR) {
+            LOGE("Failed to allocate buffers");
+            pme->mNumBufsNeedAlloc = 0;
+            return NULL;
+        }
+
+        pme->mNumBufsNeedAlloc = 0;
+        QCameraBufferMaps bufferMaps;
+        for (uint32_t i = numBufAlloc; i < pme->mNumBufs; i++) {
+            ssize_t bufSize = pme->mStreamBufs->getSize(i);
+            if (BAD_INDEX == bufSize) {
+                LOGE("Failed to retrieve buffer size (bad index)");
+                return NULL;
+            }
+
+            rc = bufferMaps.enqueue(CAM_MAPPING_BUF_TYPE_STREAM_BUF,
+                    pme->mHandle, i /*buf index*/, -1 /*plane index*/,
+                    0 /*cookie*/, pme->mStreamBufs->getFd(i), bufSize,
+                    pme->mStreamBufs->getPtr(i));
+
+            if (rc < 0) {
+                LOGE("Failed to map buffers");
+                return NULL;
+            }
+        }
+
+        cam_buf_map_type_list bufMapList;
+        rc = bufferMaps.getCamBufMapList(bufMapList);
+        if (rc == NO_ERROR) {
+            rc = pme->m_MemOpsTbl.bundled_map_ops(&bufMapList, pme->m_MemOpsTbl.userdata);
+        }
+        if (rc != 0) {
+            LOGE("Failed to map buffers with return code %d", rc);
+            return NULL;
+        }
+
+        for (uint32_t i = numBufAlloc; i < pme->mNumBufs; i++) {
+            pme->mStreamBufs->getBufDef(pme->mFrameLenOffset, pme->mBufDefs[i], i);
+            pme->mCamOps->qbuf(pme->mCamHandle, pme->mChannelHandle,
+                    &pme->mBufDefs[i]);
+        }
+    }
+    LOGH("X");
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : cond_signal
+ *
+ * DESCRIPTION: signal if flag "wait_for_cond" is set
+ *
+ *==========================================================================*/
+void QCameraStream::cond_signal(bool forceExit)
+{
+    pthread_mutex_lock(&m_lock);
+    if(wait_for_cond == TRUE){
+        wait_for_cond = FALSE;
+        if (forceExit) {
+            mNumBufsNeedAlloc = 0;
+        }
+        pthread_cond_signal(&m_cond);
+    }
+    pthread_mutex_unlock(&m_lock);
+}
+
+
+/*===========================================================================
+ * FUNCTION   : cond_wait
+ *
+ * DESCRIPTION: wait on if flag "wait_for_cond" is set
+ *
+ *==========================================================================*/
+void QCameraStream::cond_wait()
+{
+    pthread_mutex_lock(&m_lock);
+    while (wait_for_cond == TRUE) {
+        pthread_cond_wait(&m_cond, &m_lock);
+    }
+    pthread_mutex_unlock(&m_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : putBufs
+ *
+ * DESCRIPTION: deallocate stream buffers
+ *
+ * PARAMETERS :
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::putBufs(mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+    int rc = NO_ERROR;
+
+    if (mBufAllocPid != 0) {
+        cond_signal(true);
+        LOGL("wait for buf allocation thread dead");
+        pthread_join(mBufAllocPid, NULL);
+        mBufAllocPid = 0;
+        LOGL("return from buf allocation thread");
+    }
+
+    uint8_t numBufsToUnmap = mStreamBufs->getMappable();
+    for (uint32_t i = 0; i < numBufsToUnmap; i++) {
+        rc = ops_tbl->unmap_ops(i, -1, CAM_MAPPING_BUF_TYPE_STREAM_BUF, ops_tbl->userdata);
+        if (rc < 0) {
+            LOGE("map_stream_buf failed: %d", rc);
+        }
+    }
+    mBufDefs = NULL; // mBufDefs just keep a ptr to the buffer
+                     // mm-camera-interface own the buffer, so no need to free
+    memset(&mFrameLenOffset, 0, sizeof(mFrameLenOffset));
+    if ( !mStreamBufsAcquired ) {
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+        mStreamBufs = NULL;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : putBufsDeffered
+ *
+ * DESCRIPTION: function to deallocate deffered stream buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::putBufsDeffered()
+{
+    if (mBufAllocPid != 0) {
+        cond_signal(true);
+        LOGH("%s: wait for buf allocation thread dead", __func__);
+        // Wait for the allocation of additional stream buffers
+        pthread_join(mBufAllocPid, NULL);
+        mBufAllocPid = 0;
+        LOGH("%s: return from buf allocation thread", __func__);
+    }
+    // Deallocation of the deffered stream buffers handled separately
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : invalidateBuf
+ *
+ * DESCRIPTION: invalidate a specific stream buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer to invalidate
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::invalidateBuf(uint32_t index)
+{
+    if (mStreamBufs == NULL) {
+        LOGE("Invalid Operation");
+        return INVALID_OPERATION;
+    }
+    return mStreamBufs->invalidateCache(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : cleanInvalidateBuf
+ *
+ * DESCRIPTION: clean invalidate a specific stream buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer to clean invalidate
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::cleanInvalidateBuf(uint32_t index)
+{
+    if (mStreamBufs == NULL) {
+        LOGE("Invalid Operation");
+        return INVALID_OPERATION;
+    }
+    return mStreamBufs->cleanInvalidateCache(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : isTypeOf
+ *
+ * DESCRIPTION: helper function to determine if the stream is of the queried type
+ *
+ * PARAMETERS :
+ *   @type    : stream type as of queried
+ *
+ * RETURN     : true/false
+ *==========================================================================*/
+bool QCameraStream::isTypeOf(cam_stream_type_t type)
+{
+    if (mStreamInfo != NULL && (mStreamInfo->stream_type == type)) {
+        return true;
+    } else {
+        return false;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : isOrignalTypeOf
+ *
+ * DESCRIPTION: helper function to determine if the original stream is of the
+ *              queried type if it's reproc stream
+ *
+ * PARAMETERS :
+ *   @type    : stream type as of queried
+ *
+ * RETURN     : true/false
+ *==========================================================================*/
+bool QCameraStream::isOrignalTypeOf(cam_stream_type_t type)
+{
+    if (mStreamInfo != NULL &&
+        mStreamInfo->stream_type == CAM_STREAM_TYPE_OFFLINE_PROC &&
+        mStreamInfo->reprocess_config.pp_type == CAM_ONLINE_REPROCESS_TYPE &&
+        mStreamInfo->reprocess_config.online.input_stream_type == type) {
+        return true;
+    } else if (
+        mStreamInfo != NULL &&
+        mStreamInfo->stream_type == CAM_STREAM_TYPE_OFFLINE_PROC &&
+        mStreamInfo->reprocess_config.pp_type == CAM_OFFLINE_REPROCESS_TYPE &&
+        mStreamInfo->reprocess_config.offline.input_type == type) {
+        return true;
+    } else {
+        return false;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getMyType
+ *
+ * DESCRIPTION: return stream type
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : stream type
+ *==========================================================================*/
+cam_stream_type_t QCameraStream::getMyType()
+{
+    if (mStreamInfo != NULL) {
+        return mStreamInfo->stream_type;
+    } else {
+        return CAM_STREAM_TYPE_DEFAULT;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getMyOriginalType
+ *
+ * DESCRIPTION: return stream type
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : stream type
+ *==========================================================================*/
+cam_stream_type_t QCameraStream::getMyOriginalType()
+{
+    if (mStreamInfo != NULL) {
+        if (mStreamInfo->stream_type == CAM_STREAM_TYPE_OFFLINE_PROC &&
+                mStreamInfo->reprocess_config.pp_type == CAM_ONLINE_REPROCESS_TYPE) {
+            return mStreamInfo->reprocess_config.online.input_stream_type;
+        } else if (mStreamInfo->stream_type == CAM_STREAM_TYPE_OFFLINE_PROC &&
+                mStreamInfo->reprocess_config.pp_type == CAM_OFFLINE_REPROCESS_TYPE) {
+            return mStreamInfo->reprocess_config.offline.input_type;
+        } else {
+            return mStreamInfo->stream_type;
+        }
+    } else {
+        return CAM_STREAM_TYPE_DEFAULT;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getFrameOffset
+ *
+ * DESCRIPTION: query stream buffer frame offset info
+ *
+ * PARAMETERS :
+ *   @offset  : reference to struct to store the queried frame offset info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getFrameOffset(cam_frame_len_offset_t &offset)
+{
+    if (NULL == mStreamInfo) {
+        return NO_INIT;
+    }
+
+    offset = mFrameLenOffset;
+    if ((ROTATE_90 == mOnlineRotation) || (ROTATE_270 == mOnlineRotation)
+            || (offset.frame_len == 0) || (offset.num_planes == 0)) {
+        // Re-calculate frame offset in case of online rotation
+        cam_stream_info_t streamInfo = *mStreamInfo;
+        getFrameDimension(streamInfo.dim);
+        calcOffset(&streamInfo);
+        offset = streamInfo.buf_planes.plane_info;
+    }
+
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCropInfo
+ *
+ * DESCRIPTION: query crop info of the stream
+ *
+ * PARAMETERS :
+ *   @crop    : reference to struct to store the queried crop info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getCropInfo(cam_rect_t &crop)
+{
+    pthread_mutex_lock(&mCropLock);
+    crop = mCropInfo;
+    pthread_mutex_unlock(&mCropLock);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setCropInfo
+ *
+ * DESCRIPTION: set crop info of the stream
+ *
+ * PARAMETERS :
+ *   @crop    : struct to store new crop info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::setCropInfo(cam_rect_t crop)
+{
+    pthread_mutex_lock(&mCropLock);
+    mCropInfo = crop;
+    pthread_mutex_unlock(&mCropLock);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFrameDimension
+ *
+ * DESCRIPTION: query stream frame dimension info
+ *
+ * PARAMETERS :
+ *   @dim     : reference to struct to store the queried frame dimension
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getFrameDimension(cam_dimension_t &dim)
+{
+    if (mStreamInfo != NULL) {
+        if ((ROTATE_90 == mOnlineRotation) || (ROTATE_270 == mOnlineRotation)) {
+            dim.width = mStreamInfo->dim.height;
+            dim.height = mStreamInfo->dim.width;
+        } else {
+            dim = mStreamInfo->dim;
+        }
+        return 0;
+    }
+    return -1;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFormat
+ *
+ * DESCRIPTION: query stream format
+ *
+ * PARAMETERS :
+ *   @fmt     : reference to stream format
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getFormat(cam_format_t &fmt)
+{
+    if (mStreamInfo != NULL) {
+        fmt = mStreamInfo->fmt;
+        return 0;
+    }
+    return -1;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMyServerID
+ *
+ * DESCRIPTION: query server stream ID
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : stream ID from server
+ *==========================================================================*/
+uint32_t QCameraStream::getMyServerID() {
+    if (mStreamInfo != NULL) {
+        return mStreamInfo->stream_svr_id;
+    } else {
+        return 0;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : acquireStreamBufs
+ *
+ * DESCRIPTION: acquire stream buffers and postpone their release.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::acquireStreamBufs()
+{
+    mStreamBufsAcquired = true;
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : mapBuf
+ *
+ * DESCRIPTION: map stream related buffer to backend server
+ *
+ * PARAMETERS :
+ *   @buf_type : mapping type of buffer
+ *   @buf_idx  : index of buffer
+ *   @plane_idx: plane index
+ *   @fd       : fd of the buffer
+ *   @buffer   : buffer address
+ *   @size     : lenght of the buffer
+ *   @ops_tbl  : ptr to buf mapping/unmapping ops
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::mapBuf(uint8_t buf_type, uint32_t buf_idx,
+        int32_t plane_idx, int fd, void *buffer, size_t size,
+        mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+    cam_buf_map_type_list bufMapList;
+    int32_t rc = QCameraBufferMaps::makeSingletonBufMapList(
+           (cam_mapping_buf_type)buf_type, mHandle, buf_idx, plane_idx,
+           0 /*cookie*/, fd, size, bufMapList, buffer);
+
+    if (rc != NO_ERROR) {
+        return rc;
+    }
+
+    return mapBufs(bufMapList, ops_tbl);
+}
+
+/*===========================================================================
+ * FUNCTION   : mapBufs
+ *
+ * DESCRIPTION: map stream related buffers to backend server
+ *
+ * PARAMETERS :
+ *   @bufMapList : buffer mapping information
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+
+int32_t QCameraStream::mapBufs(cam_buf_map_type_list bufMapList,
+        __unused mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+    if (m_MemOpsTbl.bundled_map_ops != NULL) {
+        return m_MemOpsTbl.bundled_map_ops(&bufMapList, m_MemOpsTbl.userdata);
+    } else {
+        return mCamOps->map_stream_bufs(mCamHandle, mChannelHandle,
+                &bufMapList);
+    }
+
+}
+
+/*===========================================================================
+ * FUNCTION   : unmapBuf
+ *
+ * DESCRIPTION: unmap stream related buffer to backend server
+ *
+ * PARAMETERS :
+ *   @buf_type : mapping type of buffer
+ *   @buf_idx  : index of buffer
+ *   @plane_idx: plane index
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::unmapBuf(uint8_t buf_type, uint32_t buf_idx, int32_t plane_idx,
+        mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+    if (ops_tbl != NULL) {
+        return ops_tbl->unmap_ops(buf_idx, plane_idx,
+                (cam_mapping_buf_type)buf_type, ops_tbl->userdata);
+    } else {
+        return mCamOps->unmap_stream_buf(mCamHandle, mChannelHandle,
+                mHandle, buf_type, buf_idx, plane_idx);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setParameter
+ *
+ * DESCRIPTION: set stream based parameters
+ *
+ * PARAMETERS :
+ *   @param   : ptr to parameters to be set
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::setParameter(cam_stream_parm_buffer_t &param)
+{
+    int32_t rc = NO_ERROR;
+    pthread_mutex_lock(&mParameterLock);
+    mStreamInfo->parm_buf = param;
+    rc = mCamOps->set_stream_parms(mCamHandle,
+                                   mChannelHandle,
+                                   mHandle,
+                                   &mStreamInfo->parm_buf);
+    if (rc == NO_ERROR) {
+        param = mStreamInfo->parm_buf;
+    }
+    pthread_mutex_unlock(&mParameterLock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getParameter
+ *
+ * DESCRIPTION: get stream based parameters
+ *
+ * PARAMETERS :
+ *   @param   : ptr to parameters to be red
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getParameter(cam_stream_parm_buffer_t &param)
+{
+    int32_t rc = NO_ERROR;
+    pthread_mutex_lock(&mParameterLock);
+    mStreamInfo->parm_buf = param;
+    rc = mCamOps->get_stream_parms(mCamHandle,
+                                   mChannelHandle,
+                                   mHandle,
+                                   &mStreamInfo->parm_buf);
+    if (rc == NO_ERROR) {
+        param = mStreamInfo->parm_buf;
+    }
+    pthread_mutex_unlock(&mParameterLock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseFrameData
+ *
+ * DESCRIPTION: callback function to release frame data node
+ *
+ * PARAMETERS :
+ *   @data      : ptr to post process input data
+ *   @user_data : user data ptr (QCameraReprocessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraStream::releaseFrameData(void *data, void *user_data)
+{
+    QCameraStream *pme = (QCameraStream *)user_data;
+    mm_camera_super_buf_t *frame = (mm_camera_super_buf_t *)data;
+    if (NULL != pme) {
+        pme->bufDone(frame->bufs[0]->buf_idx);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : configStream
+ *
+ * DESCRIPTION: send stream configuration to back end
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::configStream()
+{
+    int rc = NO_ERROR;
+
+    // Configure the stream
+    mm_camera_stream_config_t stream_config;
+    stream_config.stream_info = mStreamInfo;
+    stream_config.mem_vtbl = mMemVtbl;
+    stream_config.stream_cb_sync = NULL;
+    stream_config.stream_cb = dataNotifyCB;
+    stream_config.padding_info = mPaddingInfo;
+    stream_config.userdata = this;
+    rc = mCamOps->config_stream(mCamHandle,
+                mChannelHandle, mHandle, &stream_config);
+    if (rc < 0) {
+        LOGE("Failed to config stream, rc = %d", rc);
+        mCamOps->unmap_stream_buf(mCamHandle,
+                mChannelHandle,
+                mHandle,
+                CAM_MAPPING_BUF_TYPE_STREAM_INFO,
+                0,
+                -1);
+        return UNKNOWN_ERROR;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSyncDataCB
+ *
+ * DESCRIPTION: register callback with mm-interface for this stream
+ *
+ * PARAMETERS :
+       @stream_cb   : Callback function
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              non-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::setSyncDataCB(stream_cb_routine data_cb)
+{
+    int32_t rc = NO_ERROR;
+
+    if (mCamOps != NULL) {
+        mSYNCDataCB = data_cb;
+        rc = mCamOps->register_stream_buf_cb(mCamHandle,
+                mChannelHandle, mHandle, dataNotifySYNCCB, MM_CAMERA_STREAM_CB_TYPE_SYNC,
+                this);
+        if (rc == NO_ERROR) {
+            mSyncCBEnabled = TRUE;
+            return rc;
+        }
+    }
+    LOGE("Interface handle is NULL");
+    return UNKNOWN_ERROR;
+}
+
+}; // namespace qcamera
diff --git a/msmcobalt/QCamera2/HAL/QCameraStream.h b/msmcobalt/QCamera2/HAL/QCameraStream.h
new file mode 100644
index 0000000..4200f5d
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/QCameraStream.h
@@ -0,0 +1,272 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_STREAM_H__
+#define __QCAMERA_STREAM_H__
+
+// Camera dependencies
+#include "camera.h"
+#include "QCameraCmdThread.h"
+#include "QCameraMem.h"
+#include "QCameraAllocator.h"
+
+extern "C" {
+#include "mm_camera_interface.h"
+}
+
+namespace qcamera {
+
+class QCameraStream;
+typedef void (*stream_cb_routine)(mm_camera_super_buf_t *frame,
+                                  QCameraStream *stream,
+                                  void *userdata);
+
+#define CAMERA_MAX_CONSUMER_BATCH_BUFFER_SIZE   16
+#define CAMERA_MIN_VIDEO_BATCH_BUFFERS          3
+
+
+class QCameraStream
+{
+public:
+    QCameraStream(QCameraAllocator &allocator,
+            uint32_t camHandle, uint32_t chId,
+            mm_camera_ops_t *camOps, cam_padding_info_t *paddingInfo,
+            bool deffered = false, cam_rotation_t online_rotation = ROTATE_0);
+    virtual ~QCameraStream();
+    virtual int32_t init(QCameraHeapMemory *streamInfoBuf,
+            QCameraHeapMemory *miscBuf,
+            uint8_t minStreamBufNum,
+            stream_cb_routine stream_cb,
+            void *userdata,
+            bool bDynallocBuf);
+    virtual int32_t processZoomDone(preview_stream_ops_t *previewWindow,
+                                    cam_crop_data_t &crop_info);
+    virtual int32_t bufDone(uint32_t index);
+    virtual int32_t bufDone(const void *opaque, bool isMetaData);
+    virtual int32_t processDataNotify(mm_camera_super_buf_t *bufs);
+    virtual int32_t start();
+    virtual int32_t stop();
+
+    /* Used for deffered allocation of buffers */
+    virtual int32_t allocateBuffers();
+    virtual int32_t mapBuffers();
+    virtual int32_t releaseBuffs();
+
+    static void dataNotifyCB(mm_camera_super_buf_t *recvd_frame, void *userdata);
+    static void dataNotifySYNCCB(mm_camera_super_buf_t *recvd_frame,
+            void *userdata);
+    static void *dataProcRoutine(void *data);
+    static void *BufAllocRoutine(void *data);
+    uint32_t getMyHandle() const {return mHandle;}
+    bool isTypeOf(cam_stream_type_t type);
+    bool isOrignalTypeOf(cam_stream_type_t type);
+    int32_t getFrameOffset(cam_frame_len_offset_t &offset);
+    int32_t getCropInfo(cam_rect_t &crop);
+    int32_t setCropInfo(cam_rect_t crop);
+    int32_t getFrameDimension(cam_dimension_t &dim);
+    int32_t getFormat(cam_format_t &fmt);
+    QCameraMemory *getStreamBufs() {return mStreamBufs;};
+    QCameraHeapMemory *getStreamInfoBuf() {return mStreamInfoBuf;};
+    QCameraHeapMemory *getMiscBuf() {return mMiscBuf;};
+    uint32_t getMyServerID();
+    cam_stream_type_t getMyType();
+    cam_stream_type_t getMyOriginalType();
+    int32_t acquireStreamBufs();
+
+    int32_t mapBuf(uint8_t buf_type, uint32_t buf_idx,
+            int32_t plane_idx, int fd, void *buffer, size_t size,
+            mm_camera_map_unmap_ops_tbl_t *ops_tbl = NULL);
+    int32_t mapBufs(cam_buf_map_type_list bufMapList,
+            mm_camera_map_unmap_ops_tbl_t *ops_tbl = NULL);
+    int32_t mapNewBuffer(uint32_t index);
+    int32_t unmapBuf(uint8_t buf_type, uint32_t buf_idx, int32_t plane_idx,
+            mm_camera_map_unmap_ops_tbl_t *ops_tbl = NULL);
+    int32_t setParameter(cam_stream_parm_buffer_t &param);
+    int32_t getParameter(cam_stream_parm_buffer_t &param);
+    int32_t syncRuntimeParams();
+    cam_stream_parm_buffer_t getOutputCrop() { return m_OutputCrop;};
+    cam_stream_parm_buffer_t getImgProp() { return m_ImgProp;};
+
+    static void releaseFrameData(void *data, void *user_data);
+    int32_t configStream();
+    bool isDeffered() const { return mDefferedAllocation; }
+    bool isSyncCBEnabled() {return mSyncCBEnabled;};
+    void deleteStream();
+
+    uint8_t getBufferCount() { return mNumBufs; }
+    uint32_t getChannelHandle() { return mChannelHandle; }
+    int32_t getNumQueuedBuf();
+
+    uint32_t mDumpFrame;
+    uint32_t mDumpMetaFrame;
+    uint32_t mDumpSkipCnt;
+
+    void cond_wait();
+    void cond_signal(bool forceExit = false);
+
+    int32_t setSyncDataCB(stream_cb_routine data_cb);
+    //Stream time stamp. We need this for preview stream to update display
+    nsecs_t mStreamTimestamp;
+
+    //Frame Buffer will be stored here in case framework batch mode.
+    camera_memory_t *mCurMetaMemory; // Current metadata buffer ptr
+    int8_t mCurBufIndex;             // Buffer count filled in current metadata
+    int8_t mCurMetaIndex;            // Active metadata buffer index
+
+    nsecs_t mFirstTimeStamp;         // Timestamp of first frame in Metadata.
+
+    // Buffer storage structure.
+    typedef struct {
+        bool consumerOwned; // Metadata is with Consumer if TRUE
+        uint8_t numBuffers; // Num of buffer need to released
+        uint8_t buf_index[CAMERA_MAX_CONSUMER_BATCH_BUFFER_SIZE];
+    } MetaMemory;
+    MetaMemory mStreamMetaMemory[CAMERA_MIN_VIDEO_BATCH_BUFFERS];
+
+private:
+    uint32_t mCamHandle;
+    uint32_t mChannelHandle;
+    uint32_t mHandle; // stream handle from mm-camera-interface
+    mm_camera_ops_t *mCamOps;
+    cam_stream_info_t *mStreamInfo; // ptr to stream info buf
+    mm_camera_stream_mem_vtbl_t mMemVtbl;
+    uint8_t mNumBufs;
+    uint8_t mNumPlaneBufs;
+    uint8_t mNumBufsNeedAlloc;
+    uint8_t *mRegFlags;
+    stream_cb_routine mDataCB;
+    stream_cb_routine mSYNCDataCB;
+    void *mUserData;
+
+    QCameraQueue     mDataQ;
+    QCameraCmdThread mProcTh; // thread for dataCB
+
+    QCameraHeapMemory *mStreamInfoBuf;
+    QCameraHeapMemory *mMiscBuf;
+    QCameraMemory *mStreamBufs;
+    QCameraMemory *mStreamBatchBufs;
+    QCameraAllocator &mAllocator;
+    mm_camera_buf_def_t *mBufDefs;
+    mm_camera_buf_def_t *mPlaneBufDefs;
+    cam_frame_len_offset_t mFrameLenOffset;
+    cam_padding_info_t mPaddingInfo;
+    cam_rect_t mCropInfo;
+    cam_rotation_t mOnlineRotation;
+    pthread_mutex_t mCropLock; // lock to protect crop info
+    pthread_mutex_t mParameterLock; // lock to sync access to parameters
+    bool mStreamBufsAcquired;
+    bool m_bActive; // if stream mProcTh is active
+    bool mDynBufAlloc; // allow buf allocation in 2 steps
+    pthread_t mBufAllocPid;
+    mm_camera_map_unmap_ops_tbl_t m_MemOpsTbl;
+    cam_stream_parm_buffer_t m_OutputCrop;
+    cam_stream_parm_buffer_t m_ImgProp;
+
+    static int32_t get_bufs(
+                     cam_frame_len_offset_t *offset,
+                     uint8_t *num_bufs,
+                     uint8_t **initial_reg_flag,
+                     mm_camera_buf_def_t **bufs,
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                     void *user_data);
+
+    static int32_t get_bufs_deffered(
+            cam_frame_len_offset_t *offset,
+            uint8_t *num_bufs,
+            uint8_t **initial_reg_flag,
+            mm_camera_buf_def_t **bufs,
+            mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+            void *user_data);
+
+    static int32_t put_bufs(
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                     void *user_data);
+
+    static int32_t put_bufs_deffered(
+            mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+            void *user_data);
+
+    static int32_t set_config_ops(
+            mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+            void *user_data);
+
+    static int32_t invalidate_buf(uint32_t index, void *user_data);
+    static int32_t clean_invalidate_buf(uint32_t index, void *user_data);
+
+    static int32_t backgroundAllocate(void* data);
+    static int32_t backgroundMap(void* data);
+
+    int32_t getBufs(cam_frame_len_offset_t *offset,
+                     uint8_t *num_bufs,
+                     uint8_t **initial_reg_flag,
+                     mm_camera_buf_def_t **bufs,
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl);
+    int32_t getBufsDeferred(cam_frame_len_offset_t *offset,
+            uint8_t *num_bufs,
+            uint8_t **initial_reg_flag,
+            mm_camera_buf_def_t **bufs,
+            mm_camera_map_unmap_ops_tbl_t *ops_tbl);
+    int32_t putBufs(mm_camera_map_unmap_ops_tbl_t *ops_tbl);
+    int32_t putBufsDeffered();
+
+    /* Used for deffered allocation of buffers */
+    int32_t allocateBatchBufs(cam_frame_len_offset_t *offset,
+            uint8_t *num_bufs, uint8_t **initial_reg_flag,
+            mm_camera_buf_def_t **bufs, mm_camera_map_unmap_ops_tbl_t *ops_tbl);
+
+    int32_t releaseBatchBufs(mm_camera_map_unmap_ops_tbl_t *ops_tbl);
+
+    int32_t invalidateBuf(uint32_t index);
+    int32_t cleanInvalidateBuf(uint32_t index);
+    int32_t calcOffset(cam_stream_info_t *streamInfo);
+    int32_t unmapStreamInfoBuf();
+    int32_t releaseStreamInfoBuf();
+    int32_t releaseMiscBuf();
+    int32_t mapBufs(QCameraMemory *heapBuf, cam_mapping_buf_type bufType,
+            mm_camera_map_unmap_ops_tbl_t *ops_tbl = NULL);
+    int32_t unMapBuf(QCameraMemory *heapBuf, cam_mapping_buf_type bufType,
+            mm_camera_map_unmap_ops_tbl_t *ops_tbl = NULL);
+
+    bool mDefferedAllocation;
+
+    bool wait_for_cond;
+    pthread_mutex_t m_lock;
+    pthread_cond_t m_cond;
+
+    BackgroundTask mAllocTask;
+    uint32_t mAllocTaskId;
+    BackgroundTask mMapTask;
+    uint32_t mMapTaskId;
+
+    bool mSyncCBEnabled;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_STREAM_H__ */
diff --git a/msmcobalt/QCamera2/HAL/QCameraThermalAdapter.cpp b/msmcobalt/QCamera2/HAL/QCameraThermalAdapter.cpp
new file mode 100644
index 0000000..7579f9a
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/QCameraThermalAdapter.cpp
@@ -0,0 +1,177 @@
+/* Copyright (c) 2013-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#define LOG_TAG "QCameraThermalAdapter"
+
+// System dependencies
+#include <dlfcn.h>
+#include <utils/Errors.h>
+
+// Camera dependencies
+#include "QCamera2HWI.h"
+#include "QCameraThermalAdapter.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+using namespace android;
+
+namespace qcamera {
+
+
+QCameraThermalAdapter& QCameraThermalAdapter::getInstance()
+{
+    static QCameraThermalAdapter instance;
+    return instance;
+}
+
+QCameraThermalAdapter::QCameraThermalAdapter() :
+                                        mCallback(NULL),
+                                        mHandle(NULL),
+                                        mRegister(NULL),
+                                        mUnregister(NULL),
+                                        mCameraHandle(0),
+                                        mCamcorderHandle(0)
+{
+}
+
+int QCameraThermalAdapter::init(QCameraThermalCallback *thermalCb)
+{
+    const char *error = NULL;
+    int rc = NO_ERROR;
+
+    LOGD("E");
+    mHandle = dlopen("/vendor/lib/libthermalclient.so", RTLD_NOW);
+    if (!mHandle) {
+        error = dlerror();
+        LOGE("dlopen failed with error %s",
+                     error ? error : "");
+        rc = UNKNOWN_ERROR;
+        goto error;
+    }
+    *(void **)&mRegister = dlsym(mHandle, "thermal_client_register_callback");
+    if (!mRegister) {
+        error = dlerror();
+        LOGE("dlsym failed with error code %s",
+                     error ? error: "");
+        rc = UNKNOWN_ERROR;
+        goto error2;
+    }
+    *(void **)&mUnregister = dlsym(mHandle, "thermal_client_unregister_callback");
+    if (!mUnregister) {
+        error = dlerror();
+        LOGE("dlsym failed with error code %s",
+                     error ? error: "");
+        rc = UNKNOWN_ERROR;
+        goto error2;
+    }
+
+    mCallback = thermalCb;
+
+    // Register camera and camcorder callbacks
+    mCameraHandle = mRegister(mStrCamera, thermalCallback, NULL);
+    if (mCameraHandle < 0) {
+        LOGE("thermal_client_register_callback failed %d",
+                         mCameraHandle);
+        rc = UNKNOWN_ERROR;
+        goto error2;
+    }
+    mCamcorderHandle = mRegister(mStrCamcorder, thermalCallback, NULL);
+    if (mCamcorderHandle < 0) {
+        LOGE("thermal_client_register_callback failed %d",
+                         mCamcorderHandle);
+        rc = UNKNOWN_ERROR;
+        goto error3;
+    }
+
+    LOGD("X");
+    return rc;
+
+error3:
+    mCamcorderHandle = 0;
+    mUnregister(mCameraHandle);
+error2:
+    mCameraHandle = 0;
+    dlclose(mHandle);
+    mHandle = NULL;
+error:
+    LOGD("X");
+    return rc;
+}
+
+void QCameraThermalAdapter::deinit()
+{
+    LOGD("E");
+    if (mUnregister) {
+        if (mCameraHandle) {
+            mUnregister(mCameraHandle);
+            mCameraHandle = 0;
+        }
+        if (mCamcorderHandle) {
+            mUnregister(mCamcorderHandle);
+            mCamcorderHandle = 0;
+        }
+    }
+    if (mHandle)
+        dlclose(mHandle);
+
+    mHandle = NULL;
+    mRegister = NULL;
+    mUnregister = NULL;
+    mCallback = NULL;
+    LOGD("X");
+}
+
+char QCameraThermalAdapter::mStrCamera[] = "camera";
+char QCameraThermalAdapter::mStrCamcorder[] = "camcorder";
+
+int QCameraThermalAdapter::thermalCallback(int level,
+                void *userdata, void *data)
+{
+    int rc = 0;
+    LOGD("E");
+    QCameraThermalCallback *mcb = getInstance().mCallback;
+
+    if (mcb) {
+        mcb->setThermalLevel((qcamera_thermal_level_enum_t) level);
+        rc = mcb->thermalEvtHandle(mcb->getThermalLevel(), userdata, data);
+    }
+    LOGD("X");
+    return rc;
+}
+
+qcamera_thermal_level_enum_t *QCameraThermalCallback::getThermalLevel() {
+    return &mLevel;
+}
+
+void QCameraThermalCallback::setThermalLevel(qcamera_thermal_level_enum_t level) {
+    mLevel = level;
+}
+}; //namespace qcamera
diff --git a/msmcobalt/QCamera2/HAL/QCameraThermalAdapter.h b/msmcobalt/QCamera2/HAL/QCameraThermalAdapter.h
new file mode 100644
index 0000000..9afc90f
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/QCameraThermalAdapter.h
@@ -0,0 +1,91 @@
+/* Copyright (c) 2013, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_THERMAL_ADAPTER__
+#define __QCAMERA_THERMAL_ADAPTER__
+
+namespace qcamera {
+
+typedef enum {
+    QCAMERA_THERMAL_NO_ADJUSTMENT = 0,
+    QCAMERA_THERMAL_SLIGHT_ADJUSTMENT,
+    QCAMERA_THERMAL_BIG_ADJUSTMENT,
+    QCAMERA_THERMAL_MAX_ADJUSTMENT,
+    QCAMERA_THERMAL_SHUTDOWN = 10
+} qcamera_thermal_level_enum_t;
+
+typedef enum {
+    QCAMERA_THERMAL_ADJUST_FPS,
+    QCAMERA_THERMAL_ADJUST_FRAMESKIP,
+} qcamera_thermal_mode;
+
+class QCameraThermalCallback
+{
+public:
+    virtual int thermalEvtHandle(qcamera_thermal_level_enum_t *level,
+            void *userdata, void *data) = 0;
+    virtual ~QCameraThermalCallback() {}
+    qcamera_thermal_level_enum_t *getThermalLevel();
+    void setThermalLevel(qcamera_thermal_level_enum_t level);
+
+private:
+    qcamera_thermal_level_enum_t mLevel;
+};
+
+class QCameraThermalAdapter
+{
+public:
+    static QCameraThermalAdapter& getInstance();
+
+    int init(QCameraThermalCallback *thermalCb);
+    void deinit();
+
+private:
+    static char mStrCamera[];
+    static char mStrCamcorder[];
+
+    static int thermalCallback(int level, void *userdata, void *data);
+
+    QCameraThermalCallback *mCallback;
+    void *mHandle;
+    int (*mRegister)(char *name,
+            int (*callback)(int, void *userdata, void *data), void *data);
+    int (*mUnregister)(int handle);
+    int mCameraHandle;
+    int mCamcorderHandle;
+
+    QCameraThermalAdapter();
+    QCameraThermalAdapter(QCameraThermalAdapter const& copy); // not implemented
+    QCameraThermalAdapter& operator=(QCameraThermalAdapter const& copy); // not implemented
+
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_THERMAL_ADAPTER__ */
diff --git a/msmcobalt/QCamera2/HAL/android/QCamera2External.h b/msmcobalt/QCamera2/HAL/android/QCamera2External.h
new file mode 100644
index 0000000..37e8f56
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/android/QCamera2External.h
@@ -0,0 +1,47 @@
+/* Copyright (c) 2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#ifndef __QCAMERA2EXTERNAL_H__
+#define __QCAMERA2EXTERNAL_H__
+
+// System dependencies
+#include <utils/Errors.h>
+
+// Display dependencies
+#include "QServiceUtils.h"
+
+namespace qcamera {
+
+inline android::status_t setCameraLaunchStatus(uint32_t on) {
+    return ::setCameraLaunchStatus(on);
+}
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA2EXTERNAL_H__ */
diff --git a/msmcobalt/QCamera2/HAL/test/Android.mk b/msmcobalt/QCamera2/HAL/test/Android.mk
new file mode 100644
index 0000000..50a186e
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/test/Android.mk
@@ -0,0 +1,64 @@
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+    qcamera_test.cpp \
+
+LOCAL_SHARED_LIBRARIES:= \
+    libdl \
+    libui \
+    libutils \
+    libcutils \
+    libbinder \
+    libmedia \
+    libui \
+    libgui \
+    libcamera_client \
+    libskia \
+    libstagefright \
+    libstagefright_foundation \
+
+ifneq (1,$(filter 1,$(shell echo "$$(( $(PLATFORM_SDK_VERSION) >= 18 ))" )))
+
+LOCAL_SHARED_LIBRARIES += \
+    libmedia_native \
+
+LOCAL_32_BIT_ONLY := $(BOARD_QTI_CAMERA_32BIT_ONLY)
+LOCAL_CFLAGS += -DUSE_JB_MR1
+
+endif
+
+LOCAL_C_INCLUDES += \
+    frameworks/base/include/ui \
+    frameworks/base/include/surfaceflinger \
+    frameworks/base/include/camera \
+    frameworks/base/include/media \
+    external/skia/include/core \
+    external/skia/include/images \
+    $(TARGET_OUT_HEADERS)/qcom/display \
+    hardware/qcom/camera/QCamera2/stack/common \
+    hardware/qcom/camera/QCamera2/stack/mm-camera-interface/inc \
+    frameworks/av/include/media/stagefright \
+    frameworks/native/include/media/openmax \
+    $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr/include
+
+LOCAL_ADDITIONAL_DEPENDENCIES := $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr
+
+LOCAL_MODULE:= camera_test
+LOCAL_MODULE_TAGS:= tests
+
+LOCAL_CFLAGS += -Wall -Wextra -Werror -Wno-unused-parameter
+LOCAL_CFLAGS += -O0
+
+ifeq (1,$(filter 1,$(shell echo "$$(( $(PLATFORM_SDK_VERSION) >= 20 ))" )))
+
+LOCAL_CFLAGS += -DUSE_SDK_20_OR_HIGHER
+
+ifeq ($(TARGET_USES_AOSP),true)
+LOCAL_CFLAGS += -DVANILLA_HAL
+endif
+
+endif
+
+#include $(BUILD_EXECUTABLE)
diff --git a/msmcobalt/QCamera2/HAL/test/qcamera_test.cpp b/msmcobalt/QCamera2/HAL/test/qcamera_test.cpp
new file mode 100644
index 0000000..dd06c67
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/test/qcamera_test.cpp
@@ -0,0 +1,3710 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <stdlib.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <time.h>
+#include <semaphore.h>
+#include <pthread.h>
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <sys/wait.h>
+
+#include <ui/DisplayInfo.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include <gui/ISurfaceComposer.h>
+
+#include <system/camera.h>
+
+#include <camera/Camera.h>
+#include <camera/ICamera.h>
+#include <camera/CameraParameters.h>
+#include <media/mediarecorder.h>
+
+#include <utils/RefBase.h>
+#include <utils/Mutex.h>
+#include <utils/Condition.h>
+#include <binder/IPCThreadState.h>
+#include <binder/ProcessState.h>
+#include <binder/IServiceManager.h>
+#include <cutils/properties.h>
+#include <cutils/memory.h>
+#include <SkImageDecoder.h>
+#include <SkImageEncoder.h>
+#include <MediaCodec.h>
+#include <OMX_IVCommon.h>
+#include <foundation/AMessage.h>
+#include <media/ICrypto.h>
+#include <MediaMuxer.h>
+#include <foundation/ABuffer.h>
+#include <MediaErrors.h>
+#include <gralloc_priv.h>
+#include <math.h>
+
+#include "qcamera_test.h"
+#include "cam_types.h"
+#include "mm_camera_dbg.h"
+
+#define VIDEO_BUF_ALLIGN(size, allign) \
+  (((size) + (allign-1)) & (typeof(size))(~(allign-1)))
+
+namespace qcamera {
+
+using namespace android;
+
+int CameraContext::JpegIdx = 0;
+int CameraContext::mPiPIdx = 0;
+const char CameraContext::KEY_ZSL[] = "zsl";
+
+/*===========================================================================
+ * FUNCTION   : previewCallback
+ *
+ * DESCRIPTION: preview callback preview mesages are enabled
+ *
+ * PARAMETERS :
+ *   @mem : preview buffer
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void CameraContext::previewCallback(const sp<IMemory>& mem)
+{
+    printf("PREVIEW Callback %p", mem->pointer());
+    uint8_t *ptr = (uint8_t*) mem->pointer();
+    if (NULL != ptr) {
+        printf("PRV_CB: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x",
+                ptr[0],
+                ptr[1],
+                ptr[2],
+                ptr[3],
+                ptr[4],
+                ptr[5],
+                ptr[6],
+                ptr[7],
+                ptr[8],
+                ptr[9]);
+    } else {
+        ALOGE(" no preview for NULL CB\n");
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : useLock
+ *
+ * DESCRIPTION: Mutex lock for CameraContext
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void CameraContext::useLock()
+{
+    Mutex::Autolock l(mLock);
+    while (mInUse) {
+        mCond.wait(mLock);
+    }
+    mInUse = true;
+}
+
+/*===========================================================================
+ * FUNCTION   : signalFinished
+ *
+ * DESCRIPTION: Mutex unlock CameraContext
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void CameraContext::signalFinished()
+{
+    Mutex::Autolock l(mLock);
+    mInUse = false;
+    mCond.signal();
+}
+
+/*===========================================================================
+ * FUNCTION   : saveFile
+ *
+ * DESCRIPTION: helper function for saving buffers on filesystem
+ *
+ * PARAMETERS :
+ *   @mem : buffer to save to filesystem
+ *   @path: File path
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::saveFile(const sp<IMemory>& mem, String8 path)
+{
+    unsigned char *buff = NULL;
+    ssize_t size;
+    int fd = -1;
+
+    if (mem == NULL) {
+        return BAD_VALUE;
+    }
+
+    fd = open(path, O_CREAT | O_WRONLY | O_TRUNC, 0655);
+    if(fd < 0) {
+        printf("Unable to open file %s %s\n", path.string(), strerror(fd));
+        return -errno;
+    }
+
+    size = (ssize_t)mem->size();
+    if (size <= 0) {
+        printf("IMemory object is of zero size\n");
+        close(fd);
+        return BAD_VALUE;
+    }
+
+    buff = (unsigned char *)mem->pointer();
+    if (!buff) {
+        printf("Buffer pointer is invalid\n");
+        close(fd);
+        return BAD_VALUE;
+    }
+
+    if (size != write(fd, buff, (size_t)size)) {
+        printf("Bad Write error (%d)%s\n", errno, strerror(errno));
+        close(fd);
+        return INVALID_OPERATION;
+    }
+
+    printf("%s: buffer=%p, size=%lld stored at %s\n",
+            __FUNCTION__, buff, (long long int) size, path.string());
+
+    if (fd >= 0)
+        close(fd);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : PiPCopyToOneFile
+ *
+ * DESCRIPTION: Copy the smaller picture to the bigger one
+ *
+ * PARAMETERS :
+ *   @bitmap0 : Decoded image buffer 0
+ *   @bitmap1 : Decoded image buffer 1
+ *
+ * RETURN     : decoded picture in picture in SkBitmap
+ *==========================================================================*/
+SkBitmap * CameraContext::PiPCopyToOneFile(
+    SkBitmap *bitmap0, SkBitmap *bitmap1)
+{
+    size_t size0;
+    size_t size1;
+    SkBitmap *src;
+    SkBitmap *dst;
+    unsigned int dstOffset;
+    unsigned int srcOffset;
+
+    if (bitmap0 == NULL || bitmap1 == NULL) {
+        ALOGE(" bitmap0 : %p, bitmap1 : %p\n",  bitmap0, bitmap1);
+        return NULL;
+    }
+
+    size0 = bitmap0->getSize();
+    if (size0 <= 0) {
+        printf("Decoded image 0 is of zero size\n");
+        return NULL;
+    }
+
+    size1 = bitmap1->getSize();
+        if (size1 <= 0) {
+            printf("Decoded image 1 is of zero size\n");
+            return NULL;
+        }
+
+    if (size0 > size1) {
+        dst = bitmap0;
+        src = bitmap1;
+    } else if (size1 > size0){
+        dst = bitmap1;
+        src = bitmap0;
+    } else {
+        printf("Picture size should be with different size!\n");
+        return NULL;
+    }
+
+    for (unsigned int i = 0; i < (unsigned int)src->height(); i++) {
+        dstOffset = i * (unsigned int)dst->width() * mfmtMultiplier;
+        srcOffset = i * (unsigned int)src->width() * mfmtMultiplier;
+        memcpy(((unsigned char *)dst->getPixels()) + dstOffset,
+                ((unsigned char *)src->getPixels()) + srcOffset,
+                (unsigned int)src->width() * mfmtMultiplier);
+    }
+
+    return dst;
+}
+
+/*===========================================================================
+ * FUNCTION   : decodeJPEG
+ *
+ * DESCRIPTION: decode jpeg input buffer.
+ *
+ * PARAMETERS :
+ *   @mem     : buffer to decode
+ *   @skBM    : decoded buffer
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+
+ *==========================================================================*/
+status_t CameraContext::decodeJPEG(const sp<IMemory>& mem, SkBitmap *skBM)
+{
+#ifndef USE_SDK_20_OR_HIGHER
+    SkBitmap::Config prefConfig = SkBitmap::kARGB_8888_Config;
+    const void *buff = NULL;
+    size_t size;
+
+    buff = (const void *)mem->pointer();
+    size= mem->size();
+
+    switch(prefConfig) {
+        case SkBitmap::kARGB_8888_Config:
+        {
+            mfmtMultiplier = 4;
+        }
+            break;
+
+        case SkBitmap::kARGB_4444_Config:
+        {
+            mfmtMultiplier = 2;
+        }
+        break;
+
+        case SkBitmap::kRGB_565_Config:
+        {
+            mfmtMultiplier = 2;
+        }
+        break;
+
+        case SkBitmap::kIndex8_Config:
+        {
+            mfmtMultiplier = 4;
+        }
+        break;
+
+        case SkBitmap::kA8_Config:
+        {
+            mfmtMultiplier = 4;
+        }
+        break;
+
+        default:
+        {
+            mfmtMultiplier = 0;
+            printf("Decode format is not correct!\n");
+        }
+        break;
+    }
+
+    if (SkImageDecoder::DecodeMemory(buff, size, skBM, prefConfig,
+            SkImageDecoder::kDecodePixels_Mode) == false) {
+        printf("%s():%d:: Failed during jpeg decode\n",__FUNCTION__,__LINE__);
+        return BAD_VALUE;
+    }
+#else
+    SkColorType prefConfig = kRGBA_8888_SkColorType;
+    const void *buff = NULL;
+    size_t size;
+
+    buff = (const void *)mem->pointer();
+    size= mem->size();
+
+    switch(prefConfig) {
+        case kRGBA_8888_SkColorType:
+        {
+            mfmtMultiplier = 4;
+        }
+        break;
+
+        case kBGRA_8888_SkColorType:
+        {
+            mfmtMultiplier = 4;
+        }
+        break;
+
+        case kARGB_4444_SkColorType:
+        {
+            mfmtMultiplier = 2;
+        }
+        break;
+
+        case kRGB_565_SkColorType:
+        {
+            mfmtMultiplier = 2;
+        }
+        break;
+
+        case kIndex_8_SkColorType:
+        {
+            mfmtMultiplier = 4;
+        }
+        break;
+
+        case kAlpha_8_SkColorType:
+        {
+            mfmtMultiplier = 4;
+        }
+        break;
+
+        default:
+        {
+            mfmtMultiplier = 0;
+            printf("Decode format is not correct!\n");
+        }
+        break;
+    }
+
+    if (SkImageDecoder::DecodeMemory(buff, size, skBM, prefConfig,
+            SkImageDecoder::kDecodePixels_Mode) == false) {
+        printf("%s():%d:: Failed during jpeg decode\n",__FUNCTION__,__LINE__);
+        return BAD_VALUE;
+    }
+
+#endif
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : encodeJPEG
+ *
+ * DESCRIPTION: encode the decoded input buffer.
+ *
+ * PARAMETERS :
+ *   @stream  : SkWStream
+ *   @bitmap  : SkBitmap decoded image to encode
+ *   @path    : File path
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+
+ *==========================================================================*/
+status_t CameraContext::encodeJPEG(SkWStream * stream,
+    const SkBitmap *bitmap, String8 path)
+{
+    int qFactor = 100;
+
+    skJpegEnc = SkImageEncoder::Create(SkImageEncoder::kJPEG_Type);
+    if (!skJpegEnc) {
+        ALOGE(" skJpegEnc is NULL\n");
+        return BAD_VALUE;
+    }
+
+    if (skJpegEnc->encodeStream(stream, *bitmap, qFactor) == false) {
+        return BAD_VALUE;
+    }
+
+    FILE *fh = fopen(path.string(), "r+");
+    if ( !fh ) {
+        printf("Could not open file %s\n", path.string());
+        return BAD_VALUE;
+    }
+
+    fseek(fh, 0, SEEK_END);
+    size_t len = (size_t)ftell(fh);
+    rewind(fh);
+
+    if( !len ) {
+        printf("File %s is empty !\n", path.string());
+        fclose(fh);
+        return BAD_VALUE;
+    }
+
+    unsigned char *buff = (unsigned char*)malloc(len);
+    if (!buff) {
+        printf("Cannot allocate memory for buffer reading!\n");
+        return BAD_VALUE;
+    }
+
+    size_t readSize = fread(buff, 1, len, fh);
+    if (readSize != len) {
+        printf("Reading error\n");
+        return BAD_VALUE;
+    }
+
+    status_t ret = ReadSectionsFromBuffer(buff, len, READ_ALL);
+    if (ret != NO_ERROR) {
+        printf("Cannot read sections from buffer\n");
+        DiscardData();
+        DiscardSections();
+        return BAD_VALUE;
+    }
+    free(buff);
+    rewind(fh);
+
+    unsigned char temp = 0xff;
+    size_t writeSize = fwrite(&temp, sizeof(unsigned char), 1, fh);
+    if (1 != writeSize) {
+        printf("Writing error\n");
+    }
+    temp = 0xd8;
+    fwrite(&temp, sizeof(unsigned char), 1, fh);
+
+    for (size_t i = 0; i < mSectionsRead; i++) {
+        switch((mSections[i].Type)) {
+
+        case 0x123:
+            fwrite(mSections[i].Data, sizeof(unsigned char),
+                mSections[i].Size, fh);
+            break;
+
+        case 0xe0:
+            temp = 0xff;
+            fwrite(&temp, sizeof(unsigned char), 1, fh);
+            temp = 0xe1;
+            fwrite(&temp, sizeof(unsigned char), 1, fh);
+            fwrite(mJEXIFSection.Data, sizeof(unsigned char),
+                mJEXIFSection.Size, fh);
+            break;
+
+        default:
+            temp = 0xff;
+            fwrite(&temp, sizeof(unsigned char), 1, fh);
+            fwrite(&mSections[i].Type, sizeof(unsigned char), 1, fh);
+            fwrite(mSections[i].Data, sizeof(unsigned char),
+                mSections[i].Size, fh);
+            break;
+        }
+    }
+    fseek(fh, 0, SEEK_END);
+    len = (size_t)ftell(fh);
+    rewind(fh);
+    printf("%s: buffer=%p, size=%zu stored at %s\n",
+            __FUNCTION__, bitmap->getPixels(), len, path.string());
+
+    free(mJEXIFSection.Data);
+    DiscardData();
+    DiscardSections();
+    fclose(fh);
+    ret = NO_ERROR;
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : readSectionsFromBuffer
+ *
+ * DESCRIPTION: read all jpeg sections of input buffer.
+ *
+ * PARAMETERS :
+ *   @mem : buffer to read from Metadata Sections
+ *   @buffer_size: buffer size
+ *   @ReadMode: Read mode - all, jpeg or exif
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::ReadSectionsFromBuffer (unsigned char *buffer,
+        size_t buffer_size, ReadMode_t ReadMode)
+{
+    int a;
+    size_t pos = 0;
+    int HaveCom = 0;
+    mSectionsAllocated = 10;
+
+    mSections = (Sections_t *)malloc(sizeof(Sections_t) * mSectionsAllocated);
+    if (!mSections) {
+        printf(" not enough memory\n");
+        return BAD_VALUE;
+    }
+
+    if (!buffer) {
+        printf("Input buffer is null\n");
+        return BAD_VALUE;
+    }
+
+    if (buffer_size < 1) {
+        printf("Input size is 0\n");
+        return BAD_VALUE;
+    }
+
+    a = (int) buffer[pos++];
+
+    if (a != 0xff || buffer[pos++] != M_SOI){
+        printf("No valid image\n");
+        return BAD_VALUE;
+    }
+
+    for(;;){
+        size_t itemlen;
+        int marker = 0;
+        size_t ll,lh;
+        unsigned char * Data;
+
+        CheckSectionsAllocated();
+
+        // The call to CheckSectionsAllocated() may reallocate mSections
+        // so need to check for NULL again.
+        if (mSections == NULL) {
+            printf(" not enough memory\n");
+            return BAD_VALUE;
+        }
+
+        for (a = 0; a <= 16; a++){
+            marker = buffer[pos++];
+            if (marker != 0xff) break;
+
+            if (a >= 16){
+                fprintf(stderr,"too many padding bytes\n");
+                return BAD_VALUE;
+            }
+        }
+
+        mSections[mSectionsRead].Type = marker;
+
+        // Read the length of the section.
+        lh = buffer[pos++];
+        ll = buffer[pos++];
+
+        itemlen = (lh << 8) | ll;
+
+        if (itemlen < 2) {
+            ALOGE("invalid marker");
+            return BAD_VALUE;
+        }
+
+        mSections[mSectionsRead].Size = itemlen;
+
+        Data = (unsigned char *)malloc(itemlen);
+        if (Data == NULL) {
+            ALOGE("Could not allocate memory");
+            return NO_MEMORY;
+        }
+        mSections[mSectionsRead].Data = Data;
+
+        // Store first two pre-read bytes.
+        Data[0] = (unsigned char)lh;
+        Data[1] = (unsigned char)ll;
+
+        if (pos+itemlen-2 > buffer_size) {
+            ALOGE("Premature end of file?");
+            return BAD_VALUE;
+        }
+
+        memcpy(Data+2, buffer+pos, itemlen-2); // Read the whole section.
+        pos += itemlen-2;
+
+        mSectionsRead += 1;
+
+        switch(marker){
+
+            case M_SOS:   // stop before hitting compressed data
+                // If reading entire image is requested, read the rest of the
+                // data.
+                if (ReadMode & READ_IMAGE){
+                    size_t size;
+                    // Determine how much file is left.
+                    size = buffer_size - pos;
+
+                    if (size < 1) {
+                        ALOGE("could not read the rest of the image");
+                        return BAD_VALUE;
+                    }
+                    Data = (unsigned char *)malloc(size);
+                    if (Data == NULL) {
+                        ALOGE("%d: could not allocate data for entire "
+                                "image size: %d", __LINE__, size);
+                        return BAD_VALUE;
+                    }
+
+                    memcpy(Data, buffer+pos, size);
+
+                    CheckSectionsAllocated();
+
+                    // The call to CheckSectionsAllocated()
+                    // may reallocate mSections
+                    // so need to check for NULL again.
+                    if (mSections == NULL) {
+                        printf(" not enough memory\n");
+                        return BAD_VALUE;
+                    }
+
+                    mSections[mSectionsRead].Data = Data;
+                    mSections[mSectionsRead].Size = size;
+                    mSections[mSectionsRead].Type = PSEUDO_IMAGE_MARKER;
+                    mSectionsRead ++;
+                    mHaveAll = 1;
+                }
+                return NO_ERROR;
+
+            case M_EOI:   // in case it's a tables-only JPEG stream
+                ALOGE("No image in jpeg!\n");
+                return BAD_VALUE;
+
+            case M_COM: // Comment section
+                if (HaveCom || ((ReadMode & READ_METADATA) == 0)){
+                    // Discard this section.
+                    free(mSections[--mSectionsRead].Data);
+                }
+                break;
+
+            case M_JFIF:
+                // Regular jpegs always have this tag, exif images have the
+                // exif marker instead, althogh ACDsee will write images
+                // with both markers.
+                // this program will re-create this marker on absence of exif
+                // marker.
+                // hence no need to keep the copy from the file.
+                if (ReadMode & READ_METADATA){
+                    if (memcmp(Data+2, "JFIF", 4) == 0) {
+                        break;
+                    }
+                    free(mSections[--mSectionsRead].Data);
+                }
+                break;
+
+            case M_EXIF:
+                // There can be different section using the same marker.
+                if (ReadMode & READ_METADATA){
+                    if (memcmp(Data+2, "Exif", 4) == 0){
+                        break;
+                    }else if (memcmp(Data+2, "http:", 5) == 0){
+                        // Change tag for internal purposes.
+                        mSections[mSectionsRead-1].Type = M_XMP;
+                        break;
+                    }
+                }
+                // Oterwise, discard this section.
+                free(mSections[--mSectionsRead].Data);
+                break;
+
+            case M_IPTC:
+                if (ReadMode & READ_METADATA){
+                    // Note: We just store the IPTC section.
+                    // Its relatively straightforward
+                    // and we don't act on any part of it,
+                    // so just display it at parse time.
+                }else{
+                    free(mSections[--mSectionsRead].Data);
+                }
+                break;
+
+            case M_SOF0:
+            case M_SOF1:
+            case M_SOF2:
+            case M_SOF3:
+            case M_SOF5:
+            case M_SOF6:
+            case M_SOF7:
+            case M_SOF9:
+            case M_SOF10:
+            case M_SOF11:
+            case M_SOF13:
+            case M_SOF14:
+            case M_SOF15:
+                break;
+            default:
+                // Skip any other sections.
+                break;
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : CheckSectionsAllocated
+ *
+ * DESCRIPTION: Check allocated jpeg sections.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+
+ *==========================================================================*/
+void CameraContext::CheckSectionsAllocated(void)
+{
+    if (mSectionsRead > mSectionsAllocated){
+        ALOGE("allocation screw up");
+    }
+    if (mSectionsRead >= mSectionsAllocated){
+        mSectionsAllocated += mSectionsAllocated +1;
+        mSections = (Sections_t *)realloc(mSections,
+            sizeof(Sections_t) * mSectionsAllocated);
+        if (mSections == NULL){
+            ALOGE("could not allocate data for entire image");
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : findSection
+ *
+ * DESCRIPTION: find the desired Section of the JPEG buffer.
+ *
+ * PARAMETERS :
+ *  @SectionType: Section type
+ *
+ * RETURN     : return the found section
+
+ *==========================================================================*/
+CameraContext::Sections_t *CameraContext::FindSection(int SectionType)
+{
+    for (unsigned int a = 0; a < mSectionsRead; a++) {
+        if (mSections[a].Type == SectionType){
+            return &mSections[a];
+        }
+    }
+    // Could not be found.
+    return NULL;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : DiscardData
+ *
+ * DESCRIPTION: DiscardData
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+
+ *==========================================================================*/
+void CameraContext::DiscardData()
+{
+    for (unsigned int a = 0; a < mSectionsRead; a++) {
+        free(mSections[a].Data);
+    }
+
+    mSectionsRead = 0;
+    mHaveAll = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : DiscardSections
+ *
+ * DESCRIPTION: Discard allocated sections
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+
+ *==========================================================================*/
+void CameraContext::DiscardSections()
+{
+    free(mSections);
+    mSectionsAllocated = 0;
+    mHaveAll = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : notify
+ *
+ * DESCRIPTION: notify callback
+ *
+ * PARAMETERS :
+ *   @msgType : type of callback
+ *   @ext1: extended parameters
+ *   @ext2: extended parameters
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void CameraContext::notify(int32_t msgType, int32_t ext1, int32_t ext2)
+{
+    printf("Notify cb: %d %d %d\n", msgType, ext1, ext2);
+
+    if (( msgType & CAMERA_MSG_PREVIEW_FRAME)
+#ifndef VANILLA_HAL
+            && (ext1 == CAMERA_FRAME_DATA_FD)
+#endif
+       )
+    {
+        int fd = dup(ext2);
+        printf("notify Preview Frame fd: %d dup fd: %d\n", ext2, fd);
+        close(fd);
+    }
+
+    if ( msgType & CAMERA_MSG_FOCUS ) {
+        printf("AutoFocus %s \n",
+               (ext1) ? "OK" : "FAIL");
+    }
+
+    if ( msgType & CAMERA_MSG_SHUTTER ) {
+        printf("Shutter done \n");
+    }
+
+    if ( msgType & CAMERA_MSG_ERROR) {
+        printf("Camera Test CAMERA_MSG_ERROR\n");
+        stopPreview();
+        closeCamera();
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : postData
+ *
+ * DESCRIPTION: handles data callbacks
+ *
+ * PARAMETERS :
+ *   @msgType : type of callback
+ *   @dataPtr: buffer data
+ *   @metadata: additional metadata where available
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void CameraContext::postData(int32_t msgType,
+                             const sp<IMemory>& dataPtr,
+                             camera_frame_metadata_t *metadata)
+{
+    mInterpr->PiPLock();
+    Size currentPictureSize = mSupportedPictureSizes.itemAt(
+        mCurrentPictureSizeIdx);
+    unsigned char *buff = NULL;
+    size_t size;
+    status_t ret = 0;
+
+    memset(&mJEXIFSection, 0, sizeof(mJEXIFSection)),
+
+    printf("Data cb: %d\n", msgType);
+
+    if ( msgType & CAMERA_MSG_PREVIEW_FRAME ) {
+        previewCallback(dataPtr);
+    }
+
+    if ( msgType & CAMERA_MSG_RAW_IMAGE ) {
+        printf("RAW done \n");
+    }
+
+    if (msgType & CAMERA_MSG_POSTVIEW_FRAME) {
+        printf("Postview frame \n");
+    }
+
+    if (msgType & CAMERA_MSG_COMPRESSED_IMAGE ) {
+        String8 jpegPath;
+        jpegPath = jpegPath.format(QCAMERA_DUMP_FRM_LOCATION"img_%d.jpg",
+                JpegIdx);
+        if (!mPiPCapture) {
+            // Normal capture case
+            printf("JPEG done\n");
+            saveFile(dataPtr, jpegPath);
+            JpegIdx++;
+        } else {
+            // PiP capture case
+            SkFILEWStream *wStream;
+            ret = decodeJPEG(dataPtr, &skBMtmp);
+            if (NO_ERROR != ret) {
+                printf("Error in decoding JPEG!\n");
+                mInterpr->PiPUnlock();
+                return;
+            }
+
+            mWidthTmp = currentPictureSize.width;
+            mHeightTmp = currentPictureSize.height;
+            PiPPtrTmp = dataPtr;
+            // If there are two jpeg buffers
+            if (mPiPIdx == 1) {
+                printf("PiP done\n");
+
+                // Find the the capture with higher width and height and read
+                // its jpeg sections
+                if ((mInterpr->camera[0]->mWidthTmp * mInterpr->camera[0]->mHeightTmp) >
+                        (mInterpr->camera[1]->mWidthTmp * mInterpr->camera[1]->mHeightTmp)) {
+                    buff = (unsigned char *)PiPPtrTmp->pointer();
+                    size= PiPPtrTmp->size();
+                } else if ((mInterpr->camera[0]->mWidthTmp * mInterpr->camera[0]->mHeightTmp) <
+                        (mInterpr->camera[1]->mWidthTmp * mInterpr->camera[1]->mHeightTmp)) {
+                    buff = (unsigned char *)PiPPtrTmp->pointer();
+                    size= PiPPtrTmp->size();
+                } else {
+                    printf("Cannot take PiP. Images are with the same width"
+                            " and height size!!!\n");
+                    mInterpr->PiPUnlock();
+                    return;
+                }
+
+                if (buff != NULL && size != 0) {
+                    ret = ReadSectionsFromBuffer(buff, size, READ_ALL);
+                    if (ret != NO_ERROR) {
+                        printf("Cannot read sections from buffer\n");
+                        DiscardData();
+                        DiscardSections();
+                        mInterpr->PiPUnlock();
+                        return;
+                    }
+
+                    mJEXIFTmp = FindSection(M_EXIF);
+                    if (!mJEXIFTmp) {
+                        ALOGE("skBMDec is null\n");
+                        DiscardData();
+                        DiscardSections();
+                        return;
+                    }
+                    mJEXIFSection = *mJEXIFTmp;
+                    mJEXIFSection.Data = (unsigned char*)malloc(mJEXIFTmp->Size);
+                    if (!mJEXIFSection.Data) {
+                        ALOGE(" Not enough memory\n");
+                        DiscardData();
+                        DiscardSections();
+                        return;
+                    }
+                    memcpy(mJEXIFSection.Data,
+                        mJEXIFTmp->Data, mJEXIFTmp->Size);
+                    DiscardData();
+                    DiscardSections();
+
+                    wStream = new SkFILEWStream(jpegPath.string());
+                    skBMDec = PiPCopyToOneFile(&mInterpr->camera[0]->skBMtmp,
+                            &mInterpr->camera[1]->skBMtmp);
+                    if (!skBMDec) {
+                        ALOGE("skBMDec is null\n");
+                        delete wStream;
+                        return;
+                    }
+
+                    if (encodeJPEG(wStream, skBMDec, jpegPath) != false) {
+                        printf("%s():%d:: Failed during jpeg encode\n",
+                                __FUNCTION__,__LINE__);
+                        mInterpr->PiPUnlock();
+                        return;
+                    }
+                    mPiPIdx = 0;
+                    JpegIdx++;
+                    delete wStream;
+                }
+            } else {
+                mPiPIdx++;
+            }
+            disablePiPCapture();
+        }
+    }
+
+    if ((msgType & CAMERA_MSG_PREVIEW_METADATA) && (NULL != metadata)) {
+        printf("Face detected %d \n", metadata->number_of_faces);
+    }
+    mInterpr->PiPUnlock();
+
+}
+
+/*===========================================================================
+ * FUNCTION   : postDataTimestamp
+ *
+ * DESCRIPTION: handles recording callbacks
+ *
+ * PARAMETERS :
+ *   @timestamp : timestamp of buffer
+ *   @msgType : type of buffer
+ *   @dataPtr : buffer data
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void CameraContext::postDataTimestamp(nsecs_t timestamp,
+                                      int32_t msgType,
+                                      const sp<IMemory>& dataPtr)
+{
+    printf("Recording cb: %d %lld %p\n",
+            msgType, (long long int)timestamp, dataPtr.get());
+}
+
+/*===========================================================================
+ * FUNCTION   : dataCallbackTimestamp
+ *
+ * DESCRIPTION: handles recording callbacks. Used for ViV recording
+ *
+ * PARAMETERS :
+ *   @timestamp : timestamp of buffer
+ *   @msgType : type of buffer
+ *   @dataPtr : buffer data
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void CameraContext::dataCallbackTimestamp(nsecs_t timestamp,
+        int32_t msgType,
+        const sp<IMemory>& dataPtr)
+{
+    mInterpr->ViVLock();
+    // Not needed check. Just avoiding warnings of not used variables.
+    if (timestamp > 0)
+        timestamp = 0;
+    // Not needed check. Just avoiding warnings of not used variables.
+    if (msgType > 0)
+        msgType = 0;
+    size_t i = 0;
+    void * srcBuff = NULL;
+    void * dstBuff = NULL;
+
+    size_t srcYStride = 0, dstYStride = 0;
+    size_t srcUVStride = 0, dstUVStride = 0;
+    size_t srcYScanLines = 0, dstYScanLines = 0;
+    size_t srcUVScanLines = 0, dstUVScanLines = 0;
+    size_t srcOffset = 0, dstOffset = 0;
+    size_t srcBaseOffset = 0;
+    size_t dstBaseOffset = 0;
+    Size currentVideoSize = mSupportedVideoSizes.itemAt(mCurrentVideoSizeIdx);
+    status_t err = NO_ERROR;
+    ANativeWindowBuffer* anb = NULL;
+
+    dstBuff = (void *) dataPtr->pointer();
+    if (NULL == dstBuff) {
+        printf("Cannot access destination buffer!!!\n");
+        mInterpr->ViVUnlock();
+        return;
+    }
+
+    if (mCameraIndex == mInterpr->mViVVid.sourceCameraID) {
+        srcYStride = calcStride(currentVideoSize.width);
+        srcUVStride = calcStride(currentVideoSize.width);
+        srcYScanLines = calcYScanLines(currentVideoSize.height);
+        srcUVScanLines = calcUVScanLines(currentVideoSize.height);
+        mInterpr->mViVBuff.srcWidth = (size_t)currentVideoSize.width;
+        mInterpr->mViVBuff.srcHeight = (size_t)currentVideoSize.height;
+
+
+        mInterpr->mViVBuff.YStride = srcYStride;
+        mInterpr->mViVBuff.UVStride = srcUVStride;
+        mInterpr->mViVBuff.YScanLines = srcYScanLines;
+        mInterpr->mViVBuff.UVScanLines = srcUVScanLines;
+
+        memcpy( mInterpr->mViVBuff.buff, dstBuff,
+            mInterpr->mViVBuff.buffSize);
+
+        mInterpr->mViVVid.isBuffValid = true;
+    } else if (mCameraIndex == mInterpr->mViVVid.destinationCameraID) {
+        if(mInterpr->mViVVid.isBuffValid == true) {
+            dstYStride = calcStride(currentVideoSize.width);
+            dstUVStride = calcStride(currentVideoSize.width);
+            dstYScanLines = calcYScanLines(currentVideoSize.height);
+            dstUVScanLines = calcUVScanLines(currentVideoSize.height);
+
+            srcYStride = mInterpr->mViVBuff.YStride;
+            srcUVStride = mInterpr->mViVBuff.UVStride;
+            srcYScanLines = mInterpr->mViVBuff.YScanLines;
+            srcUVScanLines = mInterpr->mViVBuff.UVScanLines;
+
+
+            for (i = 0; i < mInterpr->mViVBuff.srcHeight; i++) {
+                srcOffset = i*srcYStride;
+                dstOffset = i*dstYStride;
+                memcpy((unsigned char *) dstBuff + dstOffset,
+                    (unsigned char *) mInterpr->mViVBuff.buff +
+                    srcOffset, mInterpr->mViVBuff.srcWidth);
+            }
+            srcBaseOffset = srcYStride * srcYScanLines;
+            dstBaseOffset = dstYStride * dstYScanLines;
+            for (i = 0; i < mInterpr->mViVBuff.srcHeight / 2; i++) {
+                srcOffset = i*srcUVStride + srcBaseOffset;
+                dstOffset = i*dstUVStride + dstBaseOffset;
+                memcpy((unsigned char *) dstBuff + dstOffset,
+                    (unsigned char *) mInterpr->mViVBuff.buff +
+                    srcOffset, mInterpr->mViVBuff.srcWidth);
+            }
+
+            err = native_window_dequeue_buffer_and_wait(
+                mInterpr->mViVVid.ANW.get(),&anb);
+            if (err != NO_ERROR) {
+                printf("Cannot dequeue anb for sensor %d!!!\n", mCameraIndex);
+                mInterpr->ViVUnlock();
+                return;
+            }
+            mInterpr->mViVVid.graphBuf = new GraphicBuffer(anb, false);
+            if(NULL == mInterpr->mViVVid.graphBuf.get()) {
+                printf("Invalid Graphic buffer\n");
+                mInterpr->ViVUnlock();
+                return;
+            }
+            err = mInterpr->mViVVid.graphBuf->lock(
+                GRALLOC_USAGE_SW_WRITE_OFTEN,
+                (void**)(&mInterpr->mViVVid.mappedBuff));
+            if (err != NO_ERROR) {
+                printf("Graphic buffer could not be locked %d!!!\n", err);
+                mInterpr->ViVUnlock();
+                return;
+            }
+
+            srcYStride = dstYStride;
+            srcUVStride = dstUVStride;
+            srcYScanLines = dstYScanLines;
+            srcUVScanLines = dstUVScanLines;
+            srcBuff = dstBuff;
+
+            for (i = 0; i < (size_t)currentVideoSize.height; i++) {
+                srcOffset = i*srcYStride;
+                dstOffset = i*dstYStride;
+                memcpy((unsigned char *) mInterpr->mViVVid.mappedBuff +
+                    dstOffset, (unsigned char *) srcBuff +
+                    srcOffset, (size_t)currentVideoSize.width);
+            }
+
+            srcBaseOffset = srcYStride * srcYScanLines;
+            dstBaseOffset = dstUVStride * (size_t)currentVideoSize.height;
+
+            for (i = 0; i < (size_t)currentVideoSize.height / 2; i++) {
+                srcOffset = i*srcUVStride + srcBaseOffset;
+                dstOffset = i*dstUVStride + dstBaseOffset;
+                memcpy((unsigned char *) mInterpr->mViVVid.mappedBuff +
+                    dstOffset, (unsigned char *) srcBuff +
+                    srcOffset, (size_t)currentVideoSize.width);
+            }
+
+
+            mInterpr->mViVVid.graphBuf->unlock();
+
+            err = mInterpr->mViVVid.ANW->queueBuffer(
+                mInterpr->mViVVid.ANW.get(), anb, -1);
+            if(err)
+                printf("Failed to enqueue buffer to recorder!!!\n");
+        }
+    }
+    mCamera->releaseRecordingFrame(dataPtr);
+
+    mInterpr->ViVUnlock();
+}
+
+/*===========================================================================
+ * FUNCTION   : ViVEncoderThread
+ *
+ * DESCRIPTION: Creates a separate thread for ViV recording
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+status_t Interpreter::ViVEncoderThread()
+{
+    int ret = NO_ERROR;
+    pthread_attr_t attr;
+    pthread_attr_init(&attr);
+
+    ret = pthread_create(&mViVEncThread, &attr, ThreadWrapper, this);
+    ret = pthread_attr_destroy(&attr);
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : ThreadWrapper
+ *
+ * DESCRIPTION: Helper function for for ViV recording thread
+ *
+ * PARAMETERS : Interpreter context
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void *Interpreter::ThreadWrapper(void *context) {
+    Interpreter *writer = static_cast<Interpreter *>(context);
+    writer->ViVEncode();
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : ViVEncode
+ *
+ * DESCRIPTION: Thread for ViV encode. Buffers from video codec are sent to
+ *              muxer and saved in a file.
+ *
+ * PARAMETERS : Interpreter context
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void Interpreter::ViVEncode()
+{
+    status_t err = NO_ERROR;
+    ssize_t trackIdx = -1;
+    uint32_t debugNumFrames = 0;
+
+    size_t bufIndex, offset, size;
+    int64_t ptsUsec;
+    uint32_t flags;
+    bool DoRecording = true;
+
+
+    err = mTestContext->mViVVid.codec->getOutputBuffers(
+        &mTestContext->mViVVid.buffers);
+    if (err != NO_ERROR) {
+        printf("Unable to get output buffers (err=%d)\n", err);
+    }
+
+    while (DoRecording) {
+        err = mTestContext->mViVVid.codec->dequeueOutputBuffer(
+            &bufIndex,
+            &offset,
+            &size,
+            &ptsUsec,
+            &flags, -1);
+
+        switch (err) {
+
+        case NO_ERROR:
+            // got a buffer
+            if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0) {
+                // ignore this -- we passed the CSD into MediaMuxer when
+                // we got the format change notification
+                size = 0;
+            }
+            if (size != 0) {
+                // If the virtual display isn't providing us with timestamps,
+                // use the current time.
+                if (ptsUsec == 0) {
+                    ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
+                }
+
+                // The MediaMuxer docs are unclear, but it appears that we
+                // need to pass either the full set of BufferInfo flags, or
+                // (flags & BUFFER_FLAG_SYNCFRAME).
+                err = mTestContext->mViVVid.muxer->writeSampleData(
+                    mTestContext->mViVVid.buffers[bufIndex],
+                    (size_t)trackIdx,
+                    ptsUsec,
+                    flags);
+                if (err != NO_ERROR) {
+                    fprintf(stderr, "Failed writing data to muxer (err=%d)\n",
+                            err);
+                }
+                debugNumFrames++;
+            }
+            err = mTestContext->mViVVid.codec->releaseOutputBuffer(bufIndex);
+            if (err != NO_ERROR) {
+                fprintf(stderr, "Unable to release output buffer (err=%d)\n",
+                        err);
+            }
+            if ((flags & MediaCodec::BUFFER_FLAG_EOS) != 0) {
+                // Not expecting EOS from SurfaceFlinger.  Go with it.
+                printf("Received end-of-stream\n");
+                //DoRecording = false;
+            }
+            break;
+        case -EAGAIN:                       // INFO_TRY_AGAIN_LATER
+            ALOGV("Got -EAGAIN, looping");
+            break;
+        case INFO_FORMAT_CHANGED:           // INFO_OUTPUT_FORMAT_CHANGED
+        {
+            // format includes CSD, which we must provide to muxer
+            sp<AMessage> newFormat;
+            mTestContext->mViVVid.codec->getOutputFormat(&newFormat);
+            trackIdx = mTestContext->mViVVid.muxer->addTrack(newFormat);
+            err = mTestContext->mViVVid.muxer->start();
+            if (err != NO_ERROR) {
+                printf("Unable to start muxer (err=%d)\n", err);
+            }
+        }
+        break;
+        case INFO_OUTPUT_BUFFERS_CHANGED:   // INFO_OUTPUT_BUFFERS_CHANGED
+            // not expected for an encoder; handle it anyway
+            ALOGV("Encoder buffers changed");
+            err = mTestContext->mViVVid.codec->getOutputBuffers(
+                &mTestContext->mViVVid.buffers);
+            if (err != NO_ERROR) {
+                printf("Unable to get new output buffers (err=%d)\n", err);
+            }
+        break;
+        case INVALID_OPERATION:
+            DoRecording = false;
+        break;
+        default:
+            printf("Got weird result %d from dequeueOutputBuffer\n", err);
+        break;
+        }
+    }
+
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : calcBufferSize
+ *
+ * DESCRIPTION: Temp buffer size calculation. Temp buffer is used to store
+ *              the buffer from the camera with smaller resolution. It is
+ *              copied to the buffer from camera with higher resolution.
+ *
+ * PARAMETERS :
+ *   @width   : video size width
+ *   @height  : video size height
+ *
+ * RETURN     : size_t
+ *==========================================================================*/
+size_t CameraContext::calcBufferSize(int width, int height)
+{
+    size_t size = 0;
+    size_t UVAlignment;
+    size_t YPlane, UVPlane, YStride, UVStride, YScanlines, UVScanlines;
+    if (!width || !height) {
+        return size;
+    }
+    UVAlignment = 4096;
+    YStride = calcStride(width);
+    UVStride = calcStride(width);
+    YScanlines = calcYScanLines(height);
+    UVScanlines = calcUVScanLines(height);
+    YPlane = YStride * YScanlines;
+    UVPlane = UVStride * UVScanlines + UVAlignment;
+    size = YPlane + UVPlane;
+    size = VIDEO_BUF_ALLIGN(size, 4096);
+
+    return size;
+}
+
+/*===========================================================================
+ * FUNCTION   : calcStride
+ *
+ * DESCRIPTION: Temp buffer stride calculation.
+ *
+ * PARAMETERS :
+ *   @width   : video size width
+ *
+ * RETURN     : size_t
+ *==========================================================================*/
+size_t CameraContext::calcStride(int width)
+{
+    size_t alignment, stride = 0;
+    if (!width) {
+        return stride;
+    }
+    alignment = 128;
+    stride = VIDEO_BUF_ALLIGN((size_t)width, alignment);
+
+    return stride;
+}
+
+/*===========================================================================
+ * FUNCTION   : calcYScanLines
+ *
+ * DESCRIPTION: Temp buffer scanlines calculation for Y plane.
+ *
+ * PARAMETERS :
+ *   @width   : video size height
+ *
+ * RETURN     : size_t
+ *==========================================================================*/
+size_t CameraContext::calcYScanLines(int height)
+{
+    size_t alignment, scanlines = 0;
+        if (!height) {
+            return scanlines;
+        }
+    alignment = 32;
+    scanlines = VIDEO_BUF_ALLIGN((size_t)height, alignment);
+
+    return scanlines;
+}
+
+/*===========================================================================
+ * FUNCTION   : calcUVScanLines
+ *
+ * DESCRIPTION: Temp buffer scanlines calculation for UV plane.
+ *
+ * PARAMETERS :
+ *   @width   : video size height
+ *
+ * RETURN     : size_t
+ *==========================================================================*/
+size_t CameraContext::calcUVScanLines(int height)
+{
+    size_t alignment, scanlines = 0;
+    if (!height) {
+        return scanlines;
+    }
+    alignment = 16;
+    scanlines = VIDEO_BUF_ALLIGN((size_t)((height + 1) >> 1), alignment);
+
+    return scanlines;
+}
+
+/*===========================================================================
+ * FUNCTION   : printSupportedParams
+ *
+ * DESCRIPTION: dump common supported parameters
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void CameraContext::printSupportedParams()
+{
+    const char *camera_ids = mParams.get("camera-indexes");
+    const char *pic_sizes = mParams.get(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES);
+    const char *pic_formats = mParams.get(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS);
+    const char *preview_sizes = mParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES);
+    const char *video_sizes = mParams.get(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES);
+    const char *preview_formats = mParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS);
+    const char *frame_rates = mParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
+    const char *thumb_sizes = mParams.get(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES);
+    const char *wb_modes = mParams.get(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE);
+    const char *effects = mParams.get(CameraParameters::KEY_SUPPORTED_EFFECTS);
+    const char *scene_modes = mParams.get(CameraParameters::KEY_SUPPORTED_SCENE_MODES);
+    const char *focus_modes = mParams.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES);
+    const char *antibanding_modes = mParams.get(CameraParameters::KEY_SUPPORTED_ANTIBANDING);
+    const char *flash_modes = mParams.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES);
+    int focus_areas = mParams.getInt(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS);
+    const char *fps_ranges = mParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE);
+    const char *focus_distances = mParams.get(CameraParameters::KEY_FOCUS_DISTANCES);
+
+    printf("\n\r\tSupported Cameras: %s",
+           (camera_ids != NULL)? camera_ids : "NULL");
+    printf("\n\r\tSupported Picture Sizes: %s",
+           (pic_sizes != NULL)? pic_sizes : "NULL");
+    printf("\n\r\tSupported Picture Formats: %s",
+           (pic_formats != NULL)? pic_formats : "NULL");
+    printf("\n\r\tSupported Preview Sizes: %s",
+           (preview_sizes != NULL)? preview_sizes : "NULL");
+    printf("\n\r\tSupported Video Sizes: %s",
+            (video_sizes != NULL)? video_sizes : "NULL");
+    printf("\n\r\tSupported Preview Formats: %s",
+           (preview_formats != NULL)? preview_formats : "NULL");
+    printf("\n\r\tSupported Preview Frame Rates: %s",
+           (frame_rates != NULL)? frame_rates : "NULL");
+    printf("\n\r\tSupported Thumbnail Sizes: %s",
+           (thumb_sizes != NULL)? thumb_sizes : "NULL");
+    printf("\n\r\tSupported Whitebalance Modes: %s",
+           (wb_modes != NULL)? wb_modes : "NULL");
+    printf("\n\r\tSupported Effects: %s",
+           (effects != NULL)? effects : "NULL");
+    printf("\n\r\tSupported Scene Modes: %s",
+           (scene_modes != NULL)? scene_modes : "NULL");
+    printf("\n\r\tSupported Focus Modes: %s",
+           (focus_modes != NULL)? focus_modes : "NULL");
+    printf("\n\r\tSupported Antibanding Options: %s",
+           (antibanding_modes != NULL)? antibanding_modes : "NULL");
+    printf("\n\r\tSupported Flash Modes: %s",
+           (flash_modes != NULL)? flash_modes : "NULL");
+    printf("\n\r\tSupported Focus Areas: %d", focus_areas);
+    printf("\n\r\tSupported FPS ranges : %s",
+           (fps_ranges != NULL)? fps_ranges : "NULL");
+    printf("\n\r\tFocus Distances: %s \n",
+           (focus_distances != NULL)? focus_distances : "NULL");
+}
+
+/*===========================================================================
+ * FUNCTION   : createPreviewSurface
+ *
+ * DESCRIPTION: helper function for creating preview surfaces
+ *
+ * PARAMETERS :
+ *   @width : preview width
+ *   @height: preview height
+ *   @pixFormat : surface pixelformat
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::createPreviewSurface(int width, int height, int32_t pixFormat)
+{
+    int ret = NO_ERROR;
+    DisplayInfo dinfo;
+    sp<IBinder> display(SurfaceComposerClient::getBuiltInDisplay(
+                        ISurfaceComposer::eDisplayIdMain));
+    SurfaceComposerClient::getDisplayInfo(display, &dinfo);
+    uint32_t previewWidth, previewHeight;
+
+    if ((0 >= width) || (0 >= height)) {
+        printf("Bad preview surface size %dx%d\n", width, height);
+        return BAD_VALUE;
+    }
+
+    if ((int)dinfo.w < width) {
+        previewWidth = dinfo.w;
+    } else {
+        previewWidth = (unsigned int)width;
+    }
+
+    if ((int)dinfo.h < height) {
+        previewHeight = dinfo.h;
+    } else {
+        previewHeight = (unsigned int)height;
+    }
+
+    mClient = new SurfaceComposerClient();
+
+    if ( NULL == mClient.get() ) {
+        printf("Unable to establish connection to Surface Composer \n");
+        return NO_INIT;
+    }
+
+    mSurfaceControl = mClient->createSurface(String8("QCamera_Test"),
+                                             previewWidth,
+                                             previewHeight,
+                                             pixFormat,
+                                             0);
+    if ( NULL == mSurfaceControl.get() ) {
+        printf("Unable to create preview surface \n");
+        return NO_INIT;
+    }
+
+    mPreviewSurface = mSurfaceControl->getSurface();
+    if ( NULL != mPreviewSurface.get() ) {
+        mClient->openGlobalTransaction();
+        ret |= mSurfaceControl->setLayer(0x7fffffff);
+        if ( mCameraIndex == 0 )
+            ret |= mSurfaceControl->setPosition(0, 0);
+        else
+            ret |= mSurfaceControl->setPosition((float)(dinfo.w - previewWidth),
+                    (float)(dinfo.h - previewHeight));
+
+        ret |= mSurfaceControl->setSize(previewWidth, previewHeight);
+        ret |= mSurfaceControl->show();
+        mClient->closeGlobalTransaction();
+
+        if ( NO_ERROR != ret ) {
+            printf("Preview surface configuration failed! \n");
+        }
+    } else {
+        ret = NO_INIT;
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : destroyPreviewSurface
+ *
+ * DESCRIPTION: closes previously open preview surface
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::destroyPreviewSurface()
+{
+    if ( NULL != mPreviewSurface.get() ) {
+        mPreviewSurface.clear();
+    }
+
+    if ( NULL != mSurfaceControl.get() ) {
+        mSurfaceControl->clear();
+        mSurfaceControl.clear();
+    }
+
+    if ( NULL != mClient.get() ) {
+        mClient->dispose();
+        mClient.clear();
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : CameraContext
+ *
+ * DESCRIPTION: camera context constructor
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+CameraContext::CameraContext(int cameraIndex) :
+    mCameraIndex(cameraIndex),
+    mResizePreview(true),
+    mHardwareActive(false),
+    mPreviewRunning(false),
+    mRecordRunning(false),
+    mVideoFd(-1),
+    mVideoIdx(0),
+    mRecordingHint(false),
+    mDoPrintMenu(true),
+    mPiPCapture(false),
+    mfmtMultiplier(1),
+    mSectionsRead(false),
+    mSectionsAllocated(0),
+    mSections(NULL),
+    mJEXIFTmp(NULL),
+    mHaveAll(false),
+    mCamera(NULL),
+    mClient(NULL),
+    mSurfaceControl(NULL),
+    mPreviewSurface(NULL),
+    mInUse(false)
+{
+    mRecorder = new MediaRecorder(String16("camera"));
+}
+
+/*===========================================================================
+ * FUNCTION     : setTestCtxInstance
+ *
+ * DESCRIPTION  : Sends TestContext instance to CameraContext
+ *
+ * PARAMETERS   :
+ *    @instance : TestContext instance
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void CameraContext::setTestCtxInstance(TestContext  *instance)
+{
+    mInterpr = instance;
+}
+
+/*===========================================================================
+ * FUNCTION     : setTestCtxInst
+ *
+ * DESCRIPTION  : Sends TestContext instance to Interpreter
+ *
+ * PARAMETERS   :
+ *    @instance : TestContext instance
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void Interpreter::setTestCtxInst(TestContext  *instance)
+{
+    mTestContext = instance;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~CameraContext
+ *
+ * DESCRIPTION: camera context destructor
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+CameraContext::~CameraContext()
+{
+    stopPreview();
+    closeCamera();
+}
+
+/*===========================================================================
+ * FUNCTION   : openCamera
+ *
+ * DESCRIPTION: connects to and initializes camera
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t  CameraContext::openCamera()
+{
+    useLock();
+    const char *ZSLStr = NULL;
+    size_t ZSLStrSize = 0;
+
+    if ( NULL != mCamera.get() ) {
+        printf("Camera already open! \n");
+        signalFinished();
+        return NO_ERROR;
+    }
+
+    printf("openCamera(camera_index=%d)\n", mCameraIndex);
+
+#ifndef USE_JB_MR1
+
+    String16 packageName("CameraTest");
+
+    mCamera = Camera::connect(mCameraIndex,
+                              packageName,
+                              Camera::USE_CALLING_UID);
+
+#else
+
+    mCamera = Camera::connect(mCameraIndex);
+
+#endif
+
+    if ( NULL == mCamera.get() ) {
+        printf("Unable to connect to CameraService\n");
+        signalFinished();
+        return NO_INIT;
+    }
+
+    mParams = mCamera->getParameters();
+    mParams.getSupportedPreviewSizes(mSupportedPreviewSizes);
+    mParams.getSupportedPictureSizes(mSupportedPictureSizes);
+    mParams.getSupportedVideoSizes(mSupportedVideoSizes);
+
+    mCurrentPictureSizeIdx = mSupportedPictureSizes.size() / 2;
+    mCurrentPreviewSizeIdx = mSupportedPreviewSizes.size() / 2;
+    mCurrentVideoSizeIdx   = mSupportedVideoSizes.size() / 2;
+
+    mCamera->setListener(this);
+    mHardwareActive = true;
+
+    mInterpr->setViVSize((Size) mSupportedVideoSizes.itemAt(
+        mCurrentVideoSizeIdx),
+        mCameraIndex);
+
+    ZSLStr = mParams.get(CameraContext::KEY_ZSL);
+    if (NULL != ZSLStr) {
+        ZSLStrSize = strlen(ZSLStr);
+        if (!strncmp(ZSLStr, "on", ZSLStrSize)) {
+            mInterpr->mIsZSLOn = true;
+        } else if (!strncmp(ZSLStr, "off", ZSLStrSize)) {
+            mInterpr->mIsZSLOn = false;
+        } else {
+            printf("zsl value is not valid!\n");
+        }
+    } else {
+        printf("zsl is NULL\n");
+    }
+
+    signalFinished();
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : onAsBinder
+ *
+ * DESCRIPTION: onAsBinder
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : Pointer to IBinder
+ *==========================================================================*/
+IBinder* CameraContext::onAsBinder() {
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumberOfCameras
+ *
+ * DESCRIPTION: returns the number of supported camera by the system
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : supported camera count
+ *==========================================================================*/
+int CameraContext::getNumberOfCameras()
+{
+    int ret = -1;
+
+    if ( NULL != mCamera.get() ) {
+        ret = mCamera->getNumberOfCameras();
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : closeCamera
+ *
+ * DESCRIPTION: closes a previously the initialized camera reference
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::closeCamera()
+{
+    useLock();
+    if ( NULL == mCamera.get() ) {
+        return NO_INIT;
+    }
+
+    mCamera->disconnect();
+    mCamera.clear();
+
+    mRecorder->init();
+    mRecorder->close();
+    mRecorder->release();
+    mRecorder.clear();
+
+    mHardwareActive = false;
+    mPreviewRunning = false;
+    mRecordRunning = false;
+
+    signalFinished();
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : startPreview
+ *
+ * DESCRIPTION: starts camera preview
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::startPreview()
+{
+    useLock();
+
+    int ret = NO_ERROR;
+    int previewWidth, previewHeight;
+    Size calculatedPreviewSize;
+    Size currentPreviewSize = mSupportedPreviewSizes.itemAt(
+        mCurrentPreviewSizeIdx);
+    Size currentPictureSize = mSupportedPictureSizes.itemAt(
+        mCurrentPictureSizeIdx);
+    Size currentVideoSize   = mSupportedVideoSizes.itemAt(
+        mCurrentVideoSizeIdx);
+
+#ifndef USE_JB_MR1
+
+    sp<IGraphicBufferProducer> gbp;
+
+#endif
+
+    if (!mHardwareActive ) {
+        printf("Camera not active! \n");
+        return NO_INIT;
+    }
+
+    if (mPreviewRunning) {
+        printf("Preview is already running! \n");
+        signalFinished();
+        return NO_ERROR;
+    }
+
+    if (mResizePreview) {
+        mPreviewRunning = false;
+
+        if ( mRecordingHint ) {
+            calculatedPreviewSize =
+                getPreviewSizeFromVideoSizes(currentVideoSize);
+            previewWidth = calculatedPreviewSize.width;
+            previewHeight = calculatedPreviewSize.height;
+        } else {
+            previewWidth = currentPreviewSize.width;
+            previewHeight = currentPreviewSize.height;
+        }
+
+        ret = createPreviewSurface(previewWidth,
+                                   previewHeight,
+                                   HAL_PIXEL_FORMAT_YCrCb_420_SP);
+        if (  NO_ERROR != ret ) {
+            printf("Error while creating preview surface\n");
+            return ret;
+        }
+
+        // set rdi mode if system prop is set for front camera
+        if (mCameraIndex == 1) {
+            char value[32];
+            property_get("persist.camera.rdimode", value, "0");
+            int rdimode = atoi(value);
+            printf("rdi mode = %d\n", rdimode);
+            if (rdimode == 1) {
+                mParams.set("rdi-mode", "enable");
+            } else {
+                mParams.set("rdi-mode", "disable");
+            }
+        } else {
+            mParams.set("rdi-mode", "disable");
+        }
+
+        //mParams.set("rdi-mode", "enable");
+        mParams.set("recording-hint", "true");
+        mParams.setPreviewSize(previewWidth, previewHeight);
+        mParams.setPictureSize(currentPictureSize.width,
+            currentPictureSize.height);
+        mParams.setVideoSize(
+            currentVideoSize.width, currentVideoSize.height);
+
+        ret |= mCamera->setParameters(mParams.flatten());
+
+#ifndef USE_JB_MR1
+
+        gbp = mPreviewSurface->getIGraphicBufferProducer();
+        ret |= mCamera->setPreviewTarget(gbp);
+
+#else
+
+        ret |= mCamera->setPreviewDisplay(mPreviewSurface);
+
+#endif
+        mResizePreview = false;
+    }
+
+    if ( !mPreviewRunning ) {
+        ret |= mCamera->startPreview();
+        if ( NO_ERROR != ret ) {
+            printf("Preview start failed! \n");
+            return ret;
+        }
+
+        mPreviewRunning = true;
+    }
+
+    signalFinished();
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : getPreviewSizeFromVideoSizes
+ *
+ * DESCRIPTION: Get the preview size from video size. Find all resolutions with
+ *              the same aspect ratio and choose the same or the closest
+ *              from them.
+ *
+ * PARAMETERS :
+ *   @currentVideoSize: current video size
+
+ *
+ * RETURN     : PreviewSize
+ *==========================================================================*/
+Size CameraContext::getPreviewSizeFromVideoSizes(Size currentVideoSize)
+{
+
+    Size tmpPreviewSize;
+    Size PreviewSize;
+    Size PreviewSizes[mSupportedPreviewSizes.size()];
+    double tolerance = 0.00001;
+    double videoRatio;
+    double previewRatio;
+    size_t i = 0;
+    size_t j = 0;
+    int delta;
+
+    // Find all the resolutions with the same aspect ratio and choose the
+    // same or the closest resolution from them. Choose the closest resolution
+    // in case same aspect ratio is not found
+    if (currentVideoSize.width * currentVideoSize.height > 0 &&
+            mSupportedPreviewSizes.size() > 0) {
+        videoRatio = (float)currentVideoSize.width /
+            (float)currentVideoSize.height;
+        for (i=0; i<mSupportedPreviewSizes.size(); i++) {
+            tmpPreviewSize = mSupportedPreviewSizes.itemAt(i);
+            previewRatio = (float)tmpPreviewSize.width /
+                (float)tmpPreviewSize.height;
+            if (fabs(videoRatio - previewRatio) < tolerance) {
+                PreviewSizes[j] = tmpPreviewSize;
+                j++;
+            }
+        }
+
+        if ( j > 0 ) {
+            delta = abs((currentVideoSize.width *currentVideoSize.height)-
+                (PreviewSizes[0].width * PreviewSizes[0].height));
+            PreviewSize = PreviewSizes[0];
+            for (i=0; i<j; i++) {
+                if(abs(currentVideoSize.width * currentVideoSize.height) -
+                    (PreviewSizes[i].width * PreviewSizes[i].height) <
+                    delta) {
+                    PreviewSize = PreviewSizes[i];
+                    delta = abs((currentVideoSize.width *
+                        currentVideoSize.height) -
+                        (PreviewSizes[i].width * PreviewSizes[i].height));
+                }
+            }
+        } else {
+            // Choose the closest resolution in case same aspect ratio is
+            // not found
+            tmpPreviewSize = mSupportedPreviewSizes.itemAt(j);
+            PreviewSize = tmpPreviewSize;
+            delta = abs(
+                    (currentVideoSize.width * currentVideoSize.height)-
+                    (tmpPreviewSize.width * tmpPreviewSize.height));
+            for (i=0; i<mSupportedPreviewSizes.size(); i++) {
+                tmpPreviewSize = mSupportedPreviewSizes.itemAt(i);
+                if(abs(
+                        (currentVideoSize.width * currentVideoSize.height)-
+                        (tmpPreviewSize.width * tmpPreviewSize.height)) <
+                        delta) {
+                    PreviewSize = tmpPreviewSize;
+                    delta = abs(
+                            (currentVideoSize.width * currentVideoSize.height)-
+                            (tmpPreviewSize.width * tmpPreviewSize.height));
+                }
+            }
+        }
+    } else {
+        memset(&PreviewSize, 0, sizeof(PreviewSize));
+    }
+    return PreviewSize;
+}
+
+/*===========================================================================
+ * FUNCTION   : autoFocus
+ *
+ * DESCRIPTION: Triggers autofocus
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::autoFocus()
+{
+    useLock();
+    status_t ret = NO_ERROR;
+
+    if ( mPreviewRunning ) {
+        ret = mCamera->autoFocus();
+    }
+
+    signalFinished();
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : enablePreviewCallbacks
+ *
+ * DESCRIPTION: Enables preview callback messages
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::enablePreviewCallbacks()
+{
+    useLock();
+    if ( mHardwareActive ) {
+        mCamera->setPreviewCallbackFlags(
+            CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK);
+    }
+
+    signalFinished();
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : takePicture
+ *
+ * DESCRIPTION: triggers image capture
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::takePicture()
+{
+    status_t ret = NO_ERROR;
+    useLock();
+    if ( mPreviewRunning ) {
+        ret = mCamera->takePicture(
+            CAMERA_MSG_COMPRESSED_IMAGE|
+            CAMERA_MSG_RAW_IMAGE);
+        if (!mRecordingHint && !mInterpr->mIsZSLOn) {
+            mPreviewRunning = false;
+        }
+    } else {
+        printf("Please resume/start the preview before taking a picture!\n");
+    }
+    signalFinished();
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : configureRecorder
+ *
+ * DESCRIPTION: Configure video recorder
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::configureRecorder()
+{
+    useLock();
+    status_t ret = NO_ERROR;
+
+    mResizePreview = true;
+    mParams.set("recording-hint", "true");
+    mRecordingHint = true;
+    mCamera->setParameters(mParams.flatten());
+
+    Size videoSize = mSupportedVideoSizes.itemAt(mCurrentVideoSizeIdx);
+    ret = mRecorder->setParameters(
+        String8("video-param-encoding-bitrate=64000"));
+    if ( ret != NO_ERROR ) {
+        LOGE("Could not configure recorder (%d)", ret);
+        return ret;
+    }
+
+    ret = mRecorder->setCamera(
+        mCamera->remote(), mCamera->getRecordingProxy());
+    if ( ret != NO_ERROR ) {
+        LOGE("Could not set camera (%d)", ret);
+        return ret;
+    }
+    ret = mRecorder->setVideoSource(VIDEO_SOURCE_CAMERA);
+    if ( ret != NO_ERROR ) {
+        LOGE("Could not set video soruce (%d)", ret);
+        return ret;
+    }
+    ret = mRecorder->setAudioSource(AUDIO_SOURCE_DEFAULT);
+    if ( ret != NO_ERROR ) {
+        LOGE("Could not set audio source (%d)", ret);
+        return ret;
+    }
+    ret = mRecorder->setOutputFormat(OUTPUT_FORMAT_DEFAULT);
+    if ( ret != NO_ERROR ) {
+        LOGE("Could not set output format (%d)", ret);
+        return ret;
+    }
+
+    ret = mRecorder->setVideoEncoder(VIDEO_ENCODER_DEFAULT);
+    if ( ret != NO_ERROR ) {
+        LOGE("Could not set video encoder (%d)", ret);
+        return ret;
+    }
+
+    char fileName[100];
+
+    snprintf(fileName, sizeof(fileName) / sizeof(char),
+            "/sdcard/vid_cam%d_%dx%d_%d.mpeg", mCameraIndex,
+            videoSize.width, videoSize.height, mVideoIdx++);
+
+    if ( mVideoFd < 0 ) {
+        mVideoFd = open(fileName, O_CREAT | O_RDWR );
+    }
+
+    if ( mVideoFd < 0 ) {
+        LOGE("Could not open video file for writing %s!", fileName);
+        return UNKNOWN_ERROR;
+    }
+
+    ret = mRecorder->setOutputFile(mVideoFd, 0, 0);
+    if ( ret != NO_ERROR ) {
+        LOGE("Could not set output file (%d)", ret);
+        return ret;
+    }
+
+    ret = mRecorder->setVideoSize(videoSize.width, videoSize.height);
+    if ( ret  != NO_ERROR ) {
+        LOGE("Could not set video size %dx%d", videoSize.width,
+            videoSize.height);
+        return ret;
+    }
+
+    ret = mRecorder->setVideoFrameRate(30);
+    if ( ret != NO_ERROR ) {
+        LOGE("Could not set video frame rate (%d)", ret);
+        return ret;
+    }
+
+    ret = mRecorder->setAudioEncoder(AUDIO_ENCODER_DEFAULT);
+    if ( ret != NO_ERROR ) {
+        LOGE("Could not set audio encoder (%d)", ret);
+        return ret;
+    }
+
+    signalFinished();
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : unconfigureViVRecording
+ *
+ * DESCRIPTION: Unconfigures video in video recording
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::unconfigureRecorder()
+{
+    useLock();
+
+    if ( !mRecordRunning ) {
+        mResizePreview = true;
+        mParams.set("recording-hint", "false");
+        mRecordingHint = false;
+        mCamera->setParameters(mParams.flatten());
+    }
+
+    signalFinished();
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : configureViVRecording
+ *
+ * DESCRIPTION: Configures video in video recording
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::configureViVRecording()
+{
+    status_t ret = NO_ERROR;
+
+    mResizePreview = true;
+    mParams.set("recording-hint", "true");
+    mRecordingHint = true;
+    mCamera->setParameters(mParams.flatten());
+    mCamera->setRecordingProxyListener(this);
+
+    signalFinished();
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : startRecording
+ *
+ * DESCRIPTION: triggers start recording
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::startRecording()
+{
+    useLock();
+    status_t ret = NO_ERROR;
+
+
+    if ( mPreviewRunning ) {
+
+        mCamera->unlock();
+
+        ret = mRecorder->prepare();
+        if ( ret != NO_ERROR ) {
+            LOGE("Could not prepare recorder");
+            return ret;
+        }
+
+        ret = mRecorder->start();
+        if ( ret != NO_ERROR ) {
+            LOGE("Could not start recorder");
+            return ret;
+        }
+
+        mRecordRunning = true;
+    }
+    signalFinished();
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : stopRecording
+ *
+ * DESCRIPTION: triggers start recording
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::stopRecording()
+{
+    useLock();
+    status_t ret = NO_ERROR;
+
+    if ( mRecordRunning ) {
+            mRecorder->stop();
+            close(mVideoFd);
+            mVideoFd = -1;
+
+        mRecordRunning = false;
+    }
+
+    signalFinished();
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : startViVRecording
+ *
+ * DESCRIPTION: Starts video in video recording
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::startViVRecording()
+{
+    useLock();
+    status_t ret;
+
+    if (mInterpr->mViVVid.VideoSizes[0].width *
+            mInterpr->mViVVid.VideoSizes[0].height >=
+            mInterpr->mViVVid.VideoSizes[1].width *
+            mInterpr->mViVVid.VideoSizes[1].height) {
+        mInterpr->mViVBuff.buffSize = calcBufferSize(
+            mInterpr->mViVVid.VideoSizes[1].width,
+            mInterpr->mViVVid.VideoSizes[1].height);
+        if (mInterpr->mViVBuff.buff == NULL) {
+            mInterpr->mViVBuff.buff =
+                (void *)malloc(mInterpr->mViVBuff.buffSize);
+        }
+        mInterpr->mViVVid.sourceCameraID = 1;
+        mInterpr->mViVVid.destinationCameraID = 0;
+
+    } else {
+        mInterpr->mViVBuff.buffSize = calcBufferSize(
+            mInterpr->mViVVid.VideoSizes[0].width,
+            mInterpr->mViVVid.VideoSizes[0].height);
+        if (mInterpr->mViVBuff.buff == NULL) {
+            mInterpr->mViVBuff.buff =
+                (void *)malloc(mInterpr->mViVBuff.buffSize);
+        }
+        mInterpr->mViVVid.sourceCameraID = 0;
+        mInterpr->mViVVid.destinationCameraID = 1;
+    }
+
+    ret = mCamera->startRecording();
+
+    signalFinished();
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : stopViVRecording
+ *
+ * DESCRIPTION: Stops video in video recording
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::stopViVRecording()
+{
+    useLock();
+    status_t ret = NO_ERROR;
+
+    mCamera->stopRecording();
+
+    signalFinished();
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : stopPreview
+ *
+ * DESCRIPTION: stops camera preview
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::stopPreview()
+{
+    useLock();
+    status_t ret = NO_ERROR;
+
+    if ( mHardwareActive ) {
+        mCamera->stopPreview();
+        ret = destroyPreviewSurface();
+    }
+
+    mPreviewRunning  = false;
+    mResizePreview = true;
+
+    signalFinished();
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : resumePreview
+ *
+ * DESCRIPTION: resumes camera preview after image capture
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::resumePreview()
+{
+    useLock();
+    status_t ret = NO_ERROR;
+
+    if ( mHardwareActive ) {
+        ret = mCamera->startPreview();
+        mPreviewRunning = true;
+    } else {
+        ret = NO_INIT;
+    }
+
+    signalFinished();
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : nextPreviewSize
+ *
+ * DESCRIPTION: Iterates through all supported preview sizes.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::nextPreviewSize()
+{
+    useLock();
+    if ( mHardwareActive ) {
+        mCurrentPreviewSizeIdx += 1;
+        mCurrentPreviewSizeIdx %= mSupportedPreviewSizes.size();
+        Size previewSize = mSupportedPreviewSizes.itemAt(
+            mCurrentPreviewSizeIdx);
+        mParams.setPreviewSize(previewSize.width,
+                               previewSize.height);
+        mResizePreview = true;
+
+        if ( mPreviewRunning ) {
+            mCamera->stopPreview();
+            mCamera->setParameters(mParams.flatten());
+            mCamera->startPreview();
+        } else {
+            mCamera->setParameters(mParams.flatten());
+        }
+    }
+
+    signalFinished();
+    return NO_ERROR;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : setPreviewSize
+ *
+ * DESCRIPTION: Sets exact preview size if supported
+ *
+ * PARAMETERS : format size in the form of WIDTHxHEIGHT
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::setPreviewSize(const char *format)
+{
+    useLock();
+    if ( mHardwareActive ) {
+        int newHeight;
+        int newWidth;
+        sscanf(format, "%dx%d", &newWidth, &newHeight);
+
+        unsigned int i;
+        for (i = 0; i < mSupportedPreviewSizes.size(); ++i) {
+            Size previewSize = mSupportedPreviewSizes.itemAt(i);
+            if ( newWidth == previewSize.width &&
+                 newHeight == previewSize.height )
+            {
+                break;
+            }
+
+        }
+        if ( i == mSupportedPreviewSizes.size())
+        {
+            printf("Preview size %dx%d not supported !\n",
+                newWidth, newHeight);
+            return INVALID_OPERATION;
+        }
+
+        mParams.setPreviewSize(newWidth,
+                               newHeight);
+        mResizePreview = true;
+
+        if ( mPreviewRunning ) {
+            mCamera->stopPreview();
+            mCamera->setParameters(mParams.flatten());
+            mCamera->startPreview();
+        } else {
+            mCamera->setParameters(mParams.flatten());
+        }
+    }
+
+    signalFinished();
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCurrentPreviewSize
+ *
+ * DESCRIPTION: queries the currently configured preview size
+ *
+ * PARAMETERS :
+ *  @previewSize : preview size currently configured
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::getCurrentPreviewSize(Size &previewSize)
+{
+    useLock();
+    if ( mHardwareActive ) {
+        previewSize = mSupportedPreviewSizes.itemAt(mCurrentPreviewSizeIdx);
+    }
+    signalFinished();
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : nextPictureSize
+ *
+ * DESCRIPTION: Iterates through all supported picture sizes.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::nextPictureSize()
+{
+    useLock();
+    if ( mHardwareActive ) {
+        mCurrentPictureSizeIdx += 1;
+        mCurrentPictureSizeIdx %= mSupportedPictureSizes.size();
+        Size pictureSize = mSupportedPictureSizes.itemAt(
+            mCurrentPictureSizeIdx);
+        mParams.setPictureSize(pictureSize.width,
+            pictureSize.height);
+        mCamera->setParameters(mParams.flatten());
+    }
+    signalFinished();
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPictureSize
+ *
+ * DESCRIPTION: Sets exact preview size if supported
+ *
+ * PARAMETERS : format size in the form of WIDTHxHEIGHT
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::setPictureSize(const char *format)
+{
+    useLock();
+    if ( mHardwareActive ) {
+        int newHeight;
+        int newWidth;
+        sscanf(format, "%dx%d", &newWidth, &newHeight);
+
+        unsigned int i;
+        for (i = 0; i < mSupportedPictureSizes.size(); ++i) {
+            Size PictureSize = mSupportedPictureSizes.itemAt(i);
+            if ( newWidth == PictureSize.width &&
+                 newHeight == PictureSize.height )
+            {
+                break;
+            }
+
+        }
+        if ( i == mSupportedPictureSizes.size())
+        {
+            printf("Preview size %dx%d not supported !\n",
+                newWidth, newHeight);
+            return INVALID_OPERATION;
+        }
+
+        mParams.setPictureSize(newWidth,
+                               newHeight);
+        mCamera->setParameters(mParams.flatten());
+    }
+
+    signalFinished();
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : nextVideoSize
+ *
+ * DESCRIPTION: Select the next available video size
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::nextVideoSize()
+{
+    useLock();
+    if ( mHardwareActive ) {
+        mCurrentVideoSizeIdx += 1;
+        mCurrentVideoSizeIdx %= mSupportedVideoSizes.size();
+        Size videoSize = mSupportedVideoSizes.itemAt(mCurrentVideoSizeIdx);
+        mParams.setVideoSize(videoSize.width,
+                             videoSize.height);
+        mCamera->setParameters(mParams.flatten());
+        mInterpr->setViVSize((Size) mSupportedVideoSizes.itemAt(
+            mCurrentVideoSizeIdx), mCameraIndex);
+    }
+    signalFinished();
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setVideoSize
+ *
+ * DESCRIPTION: Set video size
+ *
+ * PARAMETERS :
+ *   @format  : format
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::setVideoSize(const char *format)
+{
+    useLock();
+    if ( mHardwareActive ) {
+        int newHeight;
+        int newWidth;
+        sscanf(format, "%dx%d", &newWidth, &newHeight);
+
+        unsigned int i;
+        for (i = 0; i < mSupportedVideoSizes.size(); ++i) {
+            Size PictureSize = mSupportedVideoSizes.itemAt(i);
+            if ( newWidth == PictureSize.width &&
+                 newHeight == PictureSize.height )
+            {
+                break;
+            }
+
+        }
+        if ( i == mSupportedVideoSizes.size())
+        {
+            printf("Preview size %dx%d not supported !\n",
+                newWidth, newHeight);
+            return INVALID_OPERATION;
+        }
+
+        mParams.setVideoSize(newWidth,
+                             newHeight);
+        mCamera->setParameters(mParams.flatten());
+    }
+
+    signalFinished();
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION    : getCurrentVideoSize
+ *
+ * DESCRIPTION : Get current video size
+ *
+ * PARAMETERS  :
+ *   @videoSize: video Size
+ *
+ * RETURN      : status_t type of status
+ *               NO_ERROR  -- success
+ *               none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::getCurrentVideoSize(Size &videoSize)
+{
+    useLock();
+    if ( mHardwareActive ) {
+        videoSize = mSupportedVideoSizes.itemAt(mCurrentVideoSizeIdx);
+    }
+    signalFinished();
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCurrentPictureSize
+ *
+ * DESCRIPTION: queries the currently configured picture size
+ *
+ * PARAMETERS :
+ *  @pictureSize : picture size currently configured
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::getCurrentPictureSize(Size &pictureSize)
+{
+    useLock();
+    if ( mHardwareActive ) {
+        pictureSize = mSupportedPictureSizes.itemAt(mCurrentPictureSizeIdx);
+    }
+    signalFinished();
+    return NO_ERROR;
+}
+
+}; //namespace qcamera ends here
+
+using namespace qcamera;
+
+/*===========================================================================
+ * FUNCTION   : printMenu
+ *
+ * DESCRIPTION: prints the available camera options
+ *
+ * PARAMETERS :
+ *  @currentCamera : camera context currently being used
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void CameraContext::printMenu(sp<CameraContext> currentCamera)
+{
+    if ( !mDoPrintMenu ) return;
+    Size currentPictureSize, currentPreviewSize, currentVideoSize;
+    const char *zsl_mode = mParams.get(CameraContext::KEY_ZSL);
+
+    assert(currentCamera.get());
+
+    currentCamera->getCurrentPictureSize(currentPictureSize);
+    currentCamera->getCurrentPreviewSize(currentPreviewSize);
+    currentCamera->getCurrentVideoSize(currentVideoSize);
+
+    printf("\n\n=========== FUNCTIONAL TEST MENU ===================\n\n");
+
+    printf(" \n\nSTART / STOP / GENERAL SERVICES \n");
+    printf(" -----------------------------\n");
+    printf("   %c. Switch camera - Current Index: %d\n",
+            Interpreter::SWITCH_CAMERA_CMD,
+            currentCamera->getCameraIndex());
+    printf("   %c. Resume Preview after capture \n",
+            Interpreter::RESUME_PREVIEW_CMD);
+    printf("   %c. Quit \n",
+            Interpreter::EXIT_CMD);
+    printf("   %c. Camera Capability Dump",
+            Interpreter::DUMP_CAPS_CMD);
+
+    printf(" \n\n PREVIEW SUB MENU \n");
+    printf(" -----------------------------\n");
+    printf("   %c. Start Preview\n",
+            Interpreter::START_PREVIEW_CMD);
+    printf("   %c. Stop Preview\n",
+            Interpreter::STOP_PREVIEW_CMD);
+    printf("   %c. Preview size:  %dx%d\n",
+            Interpreter::CHANGE_PREVIEW_SIZE_CMD,
+            currentPreviewSize.width,
+            currentPreviewSize.height);
+    printf("   %c. Video size:  %dx%d\n",
+            Interpreter::CHANGE_VIDEO_SIZE_CMD,
+            currentVideoSize.width,
+            currentVideoSize.height);
+    printf("   %c. Start Recording\n",
+            Interpreter::START_RECORD_CMD);
+    printf("   %c. Stop Recording\n",
+            Interpreter::STOP_RECORD_CMD);
+    printf("   %c. Start ViV Recording\n",
+            Interpreter::START_VIV_RECORD_CMD);
+    printf("   %c. Stop ViV Recording\n",
+            Interpreter::STOP_VIV_RECORD_CMD);
+    printf("   %c. Enable preview frames\n",
+            Interpreter::ENABLE_PRV_CALLBACKS_CMD);
+    printf("   %c. Trigger autofocus \n",
+            Interpreter::AUTOFOCUS_CMD);
+
+    printf(" \n\n IMAGE CAPTURE SUB MENU \n");
+    printf(" -----------------------------\n");
+    printf("   %c. Take picture/Full Press\n",
+            Interpreter::TAKEPICTURE_CMD);
+    printf("   %c. Take picture in picture\n",
+            Interpreter::TAKEPICTURE_IN_PICTURE_CMD);
+    printf("   %c. Picture size:  %dx%d\n",
+            Interpreter::CHANGE_PICTURE_SIZE_CMD,
+            currentPictureSize.width,
+            currentPictureSize.height);
+    printf("   %c. zsl:  %s\n", Interpreter::ZSL_CMD,
+        (zsl_mode != NULL) ? zsl_mode : "NULL");
+
+    printf("\n   Choice: ");
+}
+
+/*===========================================================================
+ * FUNCTION   : enablePrintPreview
+ *
+ * DESCRIPTION: Enables printing the preview
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void CameraContext::enablePrintPreview()
+{
+    mDoPrintMenu = true;
+}
+
+/*===========================================================================
+ * FUNCTION   : disablePrintPreview
+ *
+ * DESCRIPTION: Disables printing the preview
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void CameraContext::disablePrintPreview()
+{
+    mDoPrintMenu = false;
+}
+
+/*===========================================================================
+ * FUNCTION   : enablePiPCapture
+ *
+ * DESCRIPTION: Enables picture in picture capture
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void CameraContext::enablePiPCapture()
+{
+    mPiPCapture = true;
+}
+
+/*===========================================================================
+ * FUNCTION   : disablePiPCapture
+ *
+ * DESCRIPTION: Disables picture in picture capture
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void CameraContext::disablePiPCapture()
+{
+    mPiPCapture = false;
+}
+
+/*===========================================================================
+ * FUNCTION   : getZSL
+ *
+ * DESCRIPTION: get ZSL value of current camera
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : current zsl value
+ *==========================================================================*/
+const char *CameraContext::getZSL()
+{
+    return mParams.get(CameraContext::KEY_ZSL);
+}
+
+/*===========================================================================
+ * FUNCTION   : setZSL
+ *
+ * DESCRIPTION: set ZSL value of current camera
+ *
+ * PARAMETERS : zsl value to be set
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void CameraContext::setZSL(const char *value)
+{
+    mParams.set(CameraContext::KEY_ZSL, value);
+    mCamera->setParameters(mParams.flatten());
+}
+
+/*===========================================================================
+ * FUNCTION   : configureViVCodec
+ *
+ * DESCRIPTION: Configures video in video codec
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t Interpreter::configureViVCodec()
+{
+    status_t ret = NO_ERROR;
+    char fileName[100];
+    sp<AMessage> format = new AMessage;
+    sp<ALooper> looper = new ALooper;
+
+    if (mTestContext->mViVVid.VideoSizes[0].width *
+            mTestContext->mViVVid.VideoSizes[0].height >=
+            mTestContext->mViVVid.VideoSizes[1].width *
+            mTestContext->mViVVid.VideoSizes[1].height) {
+        snprintf(fileName, sizeof(fileName) / sizeof(char), "/sdcard/ViV_vid_%dx%d_%d.mp4",
+            mTestContext->mViVVid.VideoSizes[0].width,
+            mTestContext->mViVVid.VideoSizes[0].height,
+            mTestContext->mViVVid.ViVIdx++);
+        format->setInt32("width", mTestContext->mViVVid.VideoSizes[0].width);
+        format->setInt32("height", mTestContext->mViVVid.VideoSizes[0].height);
+    } else {
+        snprintf(fileName, sizeof(fileName) / sizeof(char), "/sdcard/ViV_vid_%dx%d_%d.mp4",
+            mTestContext->mViVVid.VideoSizes[1].width,
+            mTestContext->mViVVid.VideoSizes[1].height,
+            mTestContext->mViVVid.ViVIdx++);
+        format->setInt32("width", mTestContext->mViVVid.VideoSizes[1].width);
+        format->setInt32("height", mTestContext->mViVVid.VideoSizes[1].height);
+    }
+    int fd = open(fileName, O_CREAT | O_RDWR );
+    if (fd < 0) {
+        LOGE("Error opening file");
+        return UNKNOWN_ERROR;
+    }
+    mTestContext->mViVVid.muxer = new MediaMuxer(
+        fd, MediaMuxer::OUTPUT_FORMAT_MPEG_4);
+
+    format->setString("mime", "video/avc");
+    format->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque);
+
+    format->setInt32("bitrate", 1000000);
+    format->setFloat("frame-rate", 30);
+    format->setInt32("i-frame-interval", 10);
+
+    looper->setName("ViV_recording_looper");
+    looper->start();
+    ALOGV("Creating codec");
+    mTestContext->mViVVid.codec = MediaCodec::CreateByType(
+        looper, "video/avc", true);
+    if (mTestContext->mViVVid.codec == NULL) {
+        fprintf(stderr, "ERROR: unable to create video/avc codec instance\n");
+        return UNKNOWN_ERROR;
+    }
+    ret = mTestContext->mViVVid.codec->configure(format, NULL, NULL,
+            MediaCodec::CONFIGURE_FLAG_ENCODE);
+    if (ret != NO_ERROR) {
+        mTestContext->mViVVid.codec->release();
+        mTestContext->mViVVid.codec.clear();
+
+        fprintf(stderr, "ERROR: unable to configure codec (err=%d)\n", ret);
+        return ret;
+    }
+
+    ALOGV("Creating buffer producer");
+    ret = mTestContext->mViVVid.codec->createInputSurface(
+        &mTestContext->mViVVid.bufferProducer);
+    if (ret != NO_ERROR) {
+        mTestContext->mViVVid.codec->release();
+        mTestContext->mViVVid.codec.clear();
+
+        fprintf(stderr,
+            "ERROR: unable to create encoder input surface (err=%d)\n", ret);
+        return ret;
+    }
+
+    ret = mTestContext->mViVVid.codec->start();
+    if (ret != NO_ERROR) {
+        mTestContext->mViVVid.codec->release();
+        mTestContext->mViVVid.codec.clear();
+
+        fprintf(stderr, "ERROR: unable to start codec (err=%d)\n", ret);
+        return ret;
+    }
+    ALOGV("Codec prepared");
+
+    mTestContext->mViVVid.surface = new Surface(
+        mTestContext->mViVVid.bufferProducer);
+    mTestContext->mViVVid.ANW = mTestContext->mViVVid.surface;
+    ret = native_window_api_connect(mTestContext->mViVVid.ANW.get(),
+        NATIVE_WINDOW_API_CPU);
+    if (mTestContext->mViVVid.VideoSizes[0].width *
+        mTestContext->mViVVid.VideoSizes[0].height >=
+        mTestContext->mViVVid.VideoSizes[1].width *
+        mTestContext->mViVVid.VideoSizes[1].height) {
+        native_window_set_buffers_format(mTestContext->mViVVid.ANW.get(),
+                HAL_PIXEL_FORMAT_NV12_ENCODEABLE);
+        native_window_set_buffers_dimensions(mTestContext->mViVVid.ANW.get(),
+                mTestContext->mViVVid.VideoSizes[0].width,
+                mTestContext->mViVVid.VideoSizes[0].height);
+    } else {
+        native_window_set_buffers_format(mTestContext->mViVVid.ANW.get(),
+                HAL_PIXEL_FORMAT_NV12_ENCODEABLE);
+        native_window_set_buffers_dimensions(mTestContext->mViVVid.ANW.get(),
+                mTestContext->mViVVid.VideoSizes[1].width,
+                mTestContext->mViVVid.VideoSizes[1].height);
+    }
+    native_window_set_usage(mTestContext->mViVVid.ANW.get(),
+        GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN);
+    native_window_set_buffer_count(mTestContext->mViVVid.ANW.get(),
+        mTestContext->mViVVid.buff_cnt);
+
+    ViVEncoderThread();
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : unconfigureViVCodec
+ *
+ * DESCRIPTION: Unconfigures video in video codec
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t Interpreter::unconfigureViVCodec()
+{
+    status_t ret = NO_ERROR;
+
+    ret = native_window_api_disconnect(mTestContext->mViVVid.ANW.get(),
+        NATIVE_WINDOW_API_CPU);
+    mTestContext->mViVVid.bufferProducer = NULL;
+    mTestContext->mViVVid.codec->stop();
+    pthread_join(mViVEncThread, NULL);
+    mTestContext->mViVVid.muxer->stop();
+    mTestContext->mViVVid.codec->release();
+    mTestContext->mViVVid.codec.clear();
+    mTestContext->mViVVid.muxer.clear();
+    mTestContext->mViVVid.surface.clear();
+  return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : Interpreter
+ *
+ * DESCRIPTION: Interpreter constructor
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+Interpreter::Interpreter(const char *file)
+    : mCmdIndex(0)
+    , mScript(NULL)
+{
+    if (!file){
+        printf("no File Given\n");
+        mUseScript = false;
+        return;
+    }
+
+    FILE *fh = fopen(file, "r");
+    if ( !fh ) {
+        printf("Could not open file %s\n", file);
+        mUseScript = false;
+        return;
+    }
+
+    fseek(fh, 0, SEEK_END);
+    size_t len = (size_t)ftell(fh);
+    rewind(fh);
+
+    if( !len ) {
+        printf("Script file %s is empty !\n", file);
+        fclose(fh);
+        return;
+    }
+
+    mScript = new char[len + 1];
+    if ( !mScript ) {
+        fclose(fh);
+        return;
+    }
+
+    fread(mScript, sizeof(char), len, fh);
+    mScript[len] = '\0'; // ensure null terminated;
+    fclose(fh);
+
+
+    char *p1;
+    char *p2;
+    p1 = p2 = mScript;
+
+    do {
+        switch (*p1) {
+        case '\0':
+        case '|':
+            p1++;
+            break;
+        case SWITCH_CAMERA_CMD:
+        case RESUME_PREVIEW_CMD:
+        case START_PREVIEW_CMD:
+        case STOP_PREVIEW_CMD:
+        case CHANGE_PREVIEW_SIZE_CMD:
+        case CHANGE_PICTURE_SIZE_CMD:
+        case START_RECORD_CMD:
+        case STOP_RECORD_CMD:
+        case START_VIV_RECORD_CMD:
+        case STOP_VIV_RECORD_CMD:
+        case DUMP_CAPS_CMD:
+        case AUTOFOCUS_CMD:
+        case TAKEPICTURE_CMD:
+        case TAKEPICTURE_IN_PICTURE_CMD:
+        case ENABLE_PRV_CALLBACKS_CMD:
+        case EXIT_CMD:
+        case ZSL_CMD:
+        case DELAY:
+            p2 = p1;
+            while( (p2 != (mScript + len)) && (*p2 != '|')) {
+                p2++;
+            }
+            *p2 = '\0';
+            if (p2 == (p1 + 1))
+                mCommands.push_back(Command(
+                    static_cast<Interpreter::Commands_e>(*p1)));
+            else
+                mCommands.push_back(Command(
+                    static_cast<Interpreter::Commands_e>(*p1), (p1 + 1)));
+            p1 = p2;
+            break;
+        default:
+            printf("Invalid cmd %c \n", *p1);
+            do {
+                p1++;
+
+            } while(*p1 != '|' && p1 != (mScript + len));
+
+        }
+    } while(p1 != (mScript + len));
+    mUseScript = true;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~Interpreter
+ *
+ * DESCRIPTION: Interpreter destructor
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+Interpreter::~Interpreter()
+{
+    if ( mScript )
+        delete[] mScript;
+
+    mCommands.clear();
+}
+
+/*===========================================================================
+ * FUNCTION        : getCommand
+ *
+ * DESCRIPTION     : Get a command from interpreter
+ *
+ * PARAMETERS      :
+ *   @currentCamera: Current camera context
+ *
+ * RETURN          : command
+ *==========================================================================*/
+Interpreter::Command Interpreter::getCommand(
+    sp<CameraContext> currentCamera)
+{
+    if( mUseScript ) {
+        return mCommands[mCmdIndex++];
+    } else {
+        currentCamera->printMenu(currentCamera);
+        return Interpreter::Command(
+            static_cast<Interpreter::Commands_e>(getchar()));
+    }
+}
+
+/*===========================================================================
+ * FUNCTION        : TestContext
+ *
+ * DESCRIPTION     : TestContext constructor
+ *
+ * PARAMETERS      : None
+ *
+ * RETURN          : None
+ *==========================================================================*/
+TestContext::TestContext()
+{
+    int i = 0;
+    mTestRunning = false;
+    mInterpreter = NULL;
+    mViVVid.ViVIdx = 0;
+    mViVVid.buff_cnt = 9;
+    mViVVid.graphBuf = 0;
+    mViVVid.mappedBuff = NULL;
+    mViVVid.isBuffValid = false;
+    mViVVid.sourceCameraID = -1;
+    mViVVid.destinationCameraID = -1;
+    mPiPinUse = false;
+    mViVinUse = false;
+    mIsZSLOn = false;
+    memset(&mViVBuff, 0, sizeof(ViVBuff_t));
+
+    ProcessState::self()->startThreadPool();
+
+    do {
+        camera[i] = new CameraContext(i);
+        if ( NULL == camera[i].get() ) {
+            break;
+        }
+        camera[i]->setTestCtxInstance(this);
+
+        //by default open only back camera
+        if (i==0) {
+            status_t stat = camera[i]->openCamera();
+            if ( NO_ERROR != stat ) {
+                printf("Error encountered Openging camera id : %d\n", i);
+                break;
+            }
+        }
+        mAvailableCameras.add(camera[i]);
+        i++;
+    } while ( i < camera[0]->getNumberOfCameras() ) ;
+
+    if (i < camera[0]->getNumberOfCameras() ) {
+        for (size_t j = 0; j < mAvailableCameras.size(); j++) {
+            camera[j] = mAvailableCameras.itemAt(j);
+            camera[j]->closeCamera();
+            camera[j].clear();
+        }
+
+        mAvailableCameras.clear();
+    }
+}
+
+/*===========================================================================
+ * FUNCTION        : ~TestContext
+ *
+ * DESCRIPTION     : TestContext destructor
+ *
+ * PARAMETERS      : None
+ *
+ * RETURN          : None
+ *==========================================================================*/
+TestContext::~TestContext()
+{
+    delete mInterpreter;
+
+    for (size_t j = 0; j < mAvailableCameras.size(); j++) {
+        camera[j] = mAvailableCameras.itemAt(j);
+        camera[j]->closeCamera();
+        camera[j].clear();
+    }
+
+    mAvailableCameras.clear();
+}
+
+/*===========================================================================
+ * FUNCTION        : GetCamerasNum
+ *
+ * DESCRIPTION     : Get the number of available cameras
+ *
+ * PARAMETERS      : None
+ *
+ * RETURN          : Number of cameras
+ *==========================================================================*/
+size_t TestContext::GetCamerasNum()
+{
+    return mAvailableCameras.size();
+}
+
+/*===========================================================================
+ * FUNCTION        : AddScriptFromFile
+ *
+ * DESCRIPTION     : Add script from file
+ *
+ * PARAMETERS      :
+ *   @scriptFile   : Script file
+ *
+ * RETURN          : status_t type of status
+ *                   NO_ERROR  -- success
+ *                   none-zero failure code
+ *==========================================================================*/
+status_t TestContext::AddScriptFromFile(const char *scriptFile)
+{
+    mInterpreter = new Interpreter(scriptFile);
+    mInterpreter->setTestCtxInst(this);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION        : releasePiPBuff
+ *
+ * DESCRIPTION     : Release video in video temp buffer
+ *
+ * PARAMETERS      : None
+ *
+ * RETURN          : None
+ *==========================================================================*/
+void Interpreter::releasePiPBuff() {
+    free(mTestContext->mViVBuff.buff);
+    mTestContext->mViVBuff.buff = NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : functionalTest
+ *
+ * DESCRIPTION: queries and executes client supplied commands for testing a
+ *              particular camera.
+ *
+ * PARAMETERS :
+ *  @availableCameras : List with all cameras supported
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- continue testing
+ *              none-zero -- quit test
+ *==========================================================================*/
+status_t TestContext::FunctionalTest()
+{
+    status_t stat = NO_ERROR;
+    const char *ZSLStr = NULL;
+    size_t ZSLStrSize = 0;
+
+    assert(mAvailableCameras.size());
+
+    if ( !mInterpreter ) {
+        mInterpreter = new Interpreter();
+        mInterpreter->setTestCtxInst(this);
+    }
+
+    if (mAvailableCameras.size() == 0) {
+        printf("no cameras supported... exiting test app\n");
+    } else {
+        mTestRunning = true;
+    }
+
+    while (mTestRunning) {
+        sp<CameraContext> currentCamera =
+            mAvailableCameras.itemAt(mCurrentCameraIndex);
+        Interpreter::Command command =
+            mInterpreter->getCommand(currentCamera);
+        currentCamera->enablePrintPreview();
+
+        switch (command.cmd) {
+        case Interpreter::SWITCH_CAMERA_CMD:
+        {
+            mCurrentCameraIndex++;
+            mCurrentCameraIndex %= mAvailableCameras.size();
+            currentCamera = mAvailableCameras.itemAt(mCurrentCameraIndex);
+            stat = currentCamera->openCamera();
+        }
+            break;
+
+        case Interpreter::RESUME_PREVIEW_CMD:
+        {
+            stat = currentCamera->resumePreview();
+        }
+            break;
+
+        case Interpreter::START_PREVIEW_CMD:
+        {
+            stat = currentCamera->startPreview();
+        }
+            break;
+
+        case Interpreter::STOP_PREVIEW_CMD:
+        {
+            stat = currentCamera->stopPreview();
+        }
+            break;
+
+        case Interpreter::CHANGE_VIDEO_SIZE_CMD:
+        {
+            if ( command.arg )
+                stat = currentCamera->setVideoSize(command.arg);
+            else
+                stat = currentCamera->nextVideoSize();
+        }
+        break;
+
+        case Interpreter::CHANGE_PREVIEW_SIZE_CMD:
+        {
+            if ( command.arg )
+                stat = currentCamera->setPreviewSize(command.arg);
+            else
+                stat = currentCamera->nextPreviewSize();
+        }
+            break;
+
+        case Interpreter::CHANGE_PICTURE_SIZE_CMD:
+        {
+            if ( command.arg )
+                stat = currentCamera->setPictureSize(command.arg);
+            else
+                stat = currentCamera->nextPictureSize();
+        }
+            break;
+
+        case Interpreter::DUMP_CAPS_CMD:
+        {
+            currentCamera->printSupportedParams();
+        }
+            break;
+
+        case Interpreter::AUTOFOCUS_CMD:
+        {
+            stat = currentCamera->autoFocus();
+        }
+            break;
+
+        case Interpreter::TAKEPICTURE_CMD:
+        {
+            stat = currentCamera->takePicture();
+        }
+            break;
+
+        case Interpreter::TAKEPICTURE_IN_PICTURE_CMD:
+        {
+            if (mAvailableCameras.size() == 2) {
+                mSaveCurrentCameraIndex = mCurrentCameraIndex;
+                for (size_t i = 0; i < mAvailableCameras.size(); i++) {
+                    mCurrentCameraIndex = i;
+                    currentCamera = mAvailableCameras.itemAt(mCurrentCameraIndex);
+                    currentCamera->enablePiPCapture();
+                    stat = currentCamera->takePicture();
+                }
+                mCurrentCameraIndex = mSaveCurrentCameraIndex;
+            } else {
+                printf("Number of available sensors should be 2\n");
+            }
+        }
+        break;
+
+        case Interpreter::ENABLE_PRV_CALLBACKS_CMD:
+        {
+            stat = currentCamera->enablePreviewCallbacks();
+        }
+            break;
+
+        case Interpreter::START_RECORD_CMD:
+        {
+            stat = currentCamera->stopPreview();
+            stat = currentCamera->configureRecorder();
+            stat = currentCamera->startPreview();
+            stat = currentCamera->startRecording();
+        }
+            break;
+
+        case Interpreter::STOP_RECORD_CMD:
+        {
+            stat = currentCamera->stopRecording();
+
+            stat = currentCamera->stopPreview();
+            stat = currentCamera->unconfigureRecorder();
+            stat = currentCamera->startPreview();
+        }
+            break;
+
+        case Interpreter::START_VIV_RECORD_CMD:
+        {
+
+            if (mAvailableCameras.size() == 2) {
+                mSaveCurrentCameraIndex = mCurrentCameraIndex;
+                stat = mInterpreter->configureViVCodec();
+                for ( size_t i = 0; i < mAvailableCameras.size(); i++ ) {
+                    mCurrentCameraIndex = i;
+                    currentCamera = mAvailableCameras.itemAt(
+                        mCurrentCameraIndex);
+                    stat = currentCamera->stopPreview();
+                    stat = currentCamera->configureViVRecording();
+                    stat = currentCamera->startPreview();
+                    stat = currentCamera->startViVRecording();
+                }
+                mCurrentCameraIndex = mSaveCurrentCameraIndex;
+            } else {
+                printf("Number of available sensors should be 2\n");
+            }
+
+        }
+            break;
+
+        case Interpreter::STOP_VIV_RECORD_CMD:
+        {
+            if (mAvailableCameras.size() == 2) {
+                mSaveCurrentCameraIndex = mCurrentCameraIndex;
+                for ( size_t i = 0; i < mAvailableCameras.size(); i++ ) {
+                    mCurrentCameraIndex = i;
+                    currentCamera = mAvailableCameras.itemAt(
+                        mCurrentCameraIndex);
+                    stat = currentCamera->stopViVRecording();
+                    stat = currentCamera->stopPreview();
+                    stat = currentCamera->unconfigureRecorder();
+                    stat = currentCamera->startPreview();
+                }
+                stat = mInterpreter->unconfigureViVCodec();
+                mCurrentCameraIndex = mSaveCurrentCameraIndex;
+
+                mInterpreter->releasePiPBuff();
+            } else {
+                printf("Number of available sensors should be 2\n");
+            }
+        }
+        break;
+
+        case Interpreter::EXIT_CMD:
+        {
+            currentCamera->stopPreview();
+            mTestRunning = false;
+        }
+            break;
+
+        case Interpreter::DELAY:
+        {
+            if ( command.arg ) {
+                int delay = atoi(command.arg);
+                if (0 < delay) {
+                    usleep(1000U * (unsigned int)delay);
+                }
+            }
+        }
+            break;
+
+        case Interpreter::ZSL_CMD:
+        {
+            currentCamera = mAvailableCameras.itemAt(
+                    mCurrentCameraIndex);
+            ZSLStr = currentCamera->getZSL();
+
+            if (NULL != ZSLStr) {
+                ZSLStrSize = strlen(ZSLStr);
+                if (!strncmp(ZSLStr, "off", ZSLStrSize)) {
+                    currentCamera->setZSL("on");
+                    mIsZSLOn = true;
+                } else if (!strncmp(ZSLStr, "on", ZSLStrSize)) {
+                    currentCamera->setZSL("off");
+                    mIsZSLOn = false;
+                } else {
+                    printf("Set zsl failed!\n");
+                }
+            } else {
+                printf("zsl is NULL\n");
+            }
+        }
+            break;
+
+        default:
+        {
+            currentCamera->disablePrintPreview();
+        }
+            break;
+        }
+        printf("Command status 0x%x \n", stat);
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : PiPLock
+ *
+ * DESCRIPTION: Mutex lock for PiP capture
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void TestContext::PiPLock()
+{
+    Mutex::Autolock l(mPiPLock);
+    while (mPiPinUse) {
+        mPiPCond.wait(mPiPLock);
+    }
+    mPiPinUse = true;
+}
+
+/*===========================================================================
+ * FUNCTION   : PiPUnLock
+ *
+ * DESCRIPTION: Mutex unlock for PiP capture
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void TestContext::PiPUnlock()
+{
+    Mutex::Autolock l(mPiPLock);
+    mPiPinUse = false;
+    mPiPCond.signal();
+}
+
+/*===========================================================================
+ * FUNCTION   : ViVLock
+ *
+ * DESCRIPTION: Mutex lock for ViV Video
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void TestContext::ViVLock()
+{
+    Mutex::Autolock l(mViVLock);
+    while (mViVinUse) {
+        mViVCond.wait(mViVLock);
+    }
+    mViVinUse = true;
+}
+
+/*===========================================================================
+ * FUNCTION   : ViVUnlock
+ *
+ * DESCRIPTION: Mutex unlock for ViV Video
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void TestContext::ViVUnlock()
+{
+    Mutex::Autolock l(mViVLock);
+    mViVinUse = false;
+    mViVCond.signal();
+}
+
+/*===========================================================================
+ * FUNCTION     : setViVSize
+ *
+ * DESCRIPTION  : Set video in video size
+ *
+ * PARAMETERS   :
+ *   @VideoSize : video size
+ *   @camIndex  : camera index
+ *
+ * RETURN       : none
+ *==========================================================================*/
+void TestContext::setViVSize(Size VideoSize, int camIndex)
+{
+    mViVVid.VideoSizes[camIndex] = VideoSize;
+}
+
+/*===========================================================================
+ * FUNCTION     : main
+ *
+ * DESCRIPTION  : main function
+ *
+ * PARAMETERS   :
+ *   @argc      : argc
+ *   @argv      : argv
+ *
+ * RETURN       : int status
+ *==========================================================================*/
+int main(int argc, char *argv[])
+{
+    TestContext ctx;
+
+    if (argc > 1) {
+        if ( ctx.AddScriptFromFile((const char *)argv[1]) ) {
+            printf("Could not add script file... "
+                "continuing in normal menu mode! \n");
+        }
+    }
+
+    ctx.FunctionalTest();
+
+    return 0;
+}
diff --git a/msmcobalt/QCamera2/HAL/test/qcamera_test.h b/msmcobalt/QCamera2/HAL/test/qcamera_test.h
new file mode 100644
index 0000000..b8c5998
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/test/qcamera_test.h
@@ -0,0 +1,361 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef QCAMERA_TEST_H
+#define QCAMERA_TEST_H
+
+#include <SkData.h>
+#include <SkBitmap.h>
+#include <SkStream.h>
+
+namespace qcamera {
+
+using namespace android;
+
+#define MAX_CAM_INSTANCES 3
+
+class TestContext;
+
+class CameraContext : public CameraListener,
+    public ICameraRecordingProxyListener{
+public:
+    typedef enum {
+        READ_METADATA = 1,
+        READ_IMAGE = 2,
+        READ_ALL = 3
+    } ReadMode_t;
+
+    // This structure is used to store jpeg file sections in memory.
+    typedef struct {
+        unsigned char *  Data;
+        int      Type;
+        size_t   Size;
+    } Sections_t;
+
+public:
+    static const char KEY_ZSL[];
+
+    CameraContext(int cameraIndex);
+    virtual ~CameraContext();
+
+
+
+    status_t openCamera();
+    status_t closeCamera();
+
+    status_t startPreview();
+    status_t stopPreview();
+    status_t resumePreview();
+    status_t autoFocus();
+    status_t enablePreviewCallbacks();
+    status_t takePicture();
+    status_t startRecording();
+    status_t stopRecording();
+    status_t startViVRecording();
+    status_t stopViVRecording();
+    status_t configureViVRecording();
+
+    status_t nextPreviewSize();
+    status_t setPreviewSize(const char *format);
+    status_t getCurrentPreviewSize(Size &previewSize);
+
+    status_t nextPictureSize();
+    status_t getCurrentPictureSize(Size &pictureSize);
+    status_t setPictureSize(const char *format);
+
+    status_t nextVideoSize();
+    status_t setVideoSize(const char *format);
+    status_t getCurrentVideoSize(Size &videoSize);
+    status_t configureRecorder();
+    status_t unconfigureRecorder();
+    Sections_t *FindSection(int SectionType);
+    status_t ReadSectionsFromBuffer (unsigned char *buffer,
+            size_t buffer_size, ReadMode_t ReadMode);
+    virtual IBinder* onAsBinder();
+    void setTestCtxInstance(TestContext *instance);
+
+    void printMenu(sp<CameraContext> currentCamera);
+    void printSupportedParams();
+    const char *getZSL();
+    void setZSL(const char *value);
+
+
+    int getCameraIndex() { return mCameraIndex; }
+    int getNumberOfCameras();
+    void enablePrintPreview();
+    void disablePrintPreview();
+    void enablePiPCapture();
+    void disablePiPCapture();
+    void CheckSectionsAllocated();
+    void DiscardData();
+    void DiscardSections();
+    size_t calcBufferSize(int width, int height);
+    size_t calcStride(int width);
+    size_t calcYScanLines(int height);
+    size_t calcUVScanLines(int height);
+
+    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
+    virtual void postData(int32_t msgType,
+            const sp<IMemory>& dataPtr,
+            camera_frame_metadata_t *metadata);
+
+    virtual void postDataTimestamp(nsecs_t timestamp,
+            int32_t msgType,
+            const sp<IMemory>& dataPtr);
+    virtual void dataCallbackTimestamp(nsecs_t timestamp,
+            int32_t msgType,
+            const sp<IMemory>& dataPtr);
+
+private:
+
+    status_t createPreviewSurface(int width, int height, int32_t pixFormat);
+    status_t destroyPreviewSurface();
+
+    status_t saveFile(const sp<IMemory>& mem, String8 path);
+    SkBitmap * PiPCopyToOneFile(SkBitmap *bitmap0, SkBitmap *bitmap1);
+    status_t decodeJPEG(const sp<IMemory>& mem, SkBitmap *skBM);
+    status_t encodeJPEG(SkWStream * stream, const SkBitmap *bitmap,
+        String8 path);
+    void previewCallback(const sp<IMemory>& mem);
+
+    static int JpegIdx;
+    int mCameraIndex;
+    bool mResizePreview;
+    bool mHardwareActive;
+    bool mPreviewRunning;
+    bool mRecordRunning;
+    int  mVideoFd;
+    int  mVideoIdx;
+    bool mRecordingHint;
+    bool mDoPrintMenu;
+    bool mPiPCapture;
+    static int mPiPIdx;
+    unsigned int mfmtMultiplier;
+    int mWidthTmp;
+    int mHeightTmp;
+    size_t mSectionsRead;
+    size_t mSectionsAllocated;
+    Sections_t * mSections;
+    Sections_t * mJEXIFTmp;
+    Sections_t mJEXIFSection;
+    int mHaveAll;
+    TestContext *mInterpr;
+
+    sp<Camera> mCamera;
+    sp<SurfaceComposerClient> mClient;
+    sp<SurfaceControl> mSurfaceControl;
+    sp<Surface> mPreviewSurface;
+    sp<MediaRecorder> mRecorder;
+    CameraParameters mParams;
+    SkBitmap *skBMDec;
+    SkImageEncoder* skJpegEnc;
+    SkBitmap skBMtmp;
+    sp<IMemory> PiPPtrTmp;
+
+    size_t mCurrentPreviewSizeIdx;
+    Size getPreviewSizeFromVideoSizes(Size currentVideoSize);
+    size_t mCurrentPictureSizeIdx;
+    size_t mCurrentVideoSizeIdx;
+    Vector<Size> mSupportedPreviewSizes;
+    Vector<Size> mSupportedPictureSizes;
+    Vector<Size> mSupportedVideoSizes;
+
+    bool mInUse;
+    Mutex mLock;
+    Condition mCond;
+
+    void useLock();
+    void signalFinished();
+
+    //------------------------------------------------------------------------
+    // JPEG markers consist of one or more 0xFF bytes, followed by a marker
+    // code byte (which is not an FF).  Here are the marker codes of interest
+    // in this program.  (See jdmarker.c for a more complete list.)
+    //------------------------------------------------------------------------
+    #define M_SOF0  0xC0          // Start Of Frame N
+    #define M_SOF1  0xC1          // N indicates which compression process
+    #define M_SOF2  0xC2          // Only SOF0-SOF2 are now in common use
+    #define M_SOF3  0xC3
+    #define M_SOF5  0xC5          // NB: codes C4 and CC are NOT SOF markers
+    #define M_SOF6  0xC6
+    #define M_SOF7  0xC7
+    #define M_SOF9  0xC9
+    #define M_SOF10 0xCA
+    #define M_SOF11 0xCB
+    #define M_SOF13 0xCD
+    #define M_SOF14 0xCE
+    #define M_SOF15 0xCF
+    #define M_SOI   0xD8          // Start Of Image (beginning of datastream)
+    #define M_EOI   0xD9          // End Of Image (end of datastream)
+    #define M_SOS   0xDA          // Start Of Scan (begins compressed data)
+    #define M_JFIF  0xE0          // Jfif marker
+    #define M_EXIF  0xE1          // Exif marker.  Also used for XMP data!
+    #define M_XMP   0x10E1        // Not a real tag same value as Exif!
+    #define M_COM   0xFE          // COMment
+    #define M_DQT   0xDB
+    #define M_DHT   0xC4
+    #define M_DRI   0xDD
+    #define M_IPTC  0xED          // IPTC marker
+    #define PSEUDO_IMAGE_MARKER 0x123; // Extra value.
+};
+
+class Interpreter
+{
+public:
+    enum Commands_e {
+        SWITCH_CAMERA_CMD = 'A',
+        RESUME_PREVIEW_CMD = '[',
+        START_PREVIEW_CMD = '1',
+        STOP_PREVIEW_CMD = '2',
+        CHANGE_VIDEO_SIZE_CMD = '3',
+        CHANGE_PREVIEW_SIZE_CMD = '4',
+        CHANGE_PICTURE_SIZE_CMD = '5',
+        START_RECORD_CMD = '6',
+        STOP_RECORD_CMD = '7',
+        START_VIV_RECORD_CMD = '8',
+        STOP_VIV_RECORD_CMD = '9',
+        DUMP_CAPS_CMD = 'E',
+        AUTOFOCUS_CMD = 'f',
+        TAKEPICTURE_CMD = 'p',
+        TAKEPICTURE_IN_PICTURE_CMD = 'P',
+        ENABLE_PRV_CALLBACKS_CMD = '&',
+        EXIT_CMD = 'q',
+        DELAY = 'd',
+        ZSL_CMD = 'z',
+        INVALID_CMD = '0'
+    };
+
+    struct Command {
+        Command( Commands_e cmd_, char *arg_ = NULL)
+        : cmd(cmd_)
+        , arg(arg_) {}
+        Command()
+        : cmd(INVALID_CMD)
+        , arg(NULL) {}
+        Commands_e cmd;
+        char *arg;
+    };
+
+    /* API */
+    Interpreter()
+    : mUseScript(false)
+    , mScript(NULL) {}
+
+    Interpreter(const char *file);
+    ~Interpreter();
+
+    Command getCommand(sp<CameraContext> currentCamera);
+    void releasePiPBuff();
+    status_t configureViVCodec();
+    void setViVSize(Size VideoSize, int camIndex);
+    void setTestCtxInst(TestContext *instance);
+    status_t unconfigureViVCodec();
+    status_t ViVEncoderThread();
+    void ViVEncode();
+    static void *ThreadWrapper(void *context);
+
+private:
+    static const int numberOfCommands;
+
+    bool mUseScript;
+    size_t mCmdIndex;
+    char *mScript;
+    Vector<Command> mCommands;
+    TestContext *mTestContext;
+    pthread_t mViVEncThread;
+};
+
+class TestContext
+{
+    friend class CameraContext;
+    friend class Interpreter;
+public:
+    TestContext();
+    ~TestContext();
+
+    size_t GetCamerasNum();
+    status_t FunctionalTest();
+    status_t AddScriptFromFile(const char *scriptFile);
+    void setViVSize(Size VideoSize, int camIndex);
+    void PiPLock();
+    void PiPUnlock();
+    void ViVLock();
+    void ViVUnlock();
+
+private:
+    sp<CameraContext> camera[MAX_CAM_INSTANCES];
+    char GetNextCmd(sp<qcamera::CameraContext> currentCamera);
+    size_t mCurrentCameraIndex;
+    size_t mSaveCurrentCameraIndex;
+    Vector< sp<qcamera::CameraContext> > mAvailableCameras;
+    bool mTestRunning;
+    Interpreter *mInterpreter;
+    Mutex mPiPLock;
+    Condition mPiPCond;
+    bool mPiPinUse;
+    Mutex mViVLock;
+    Condition mViVCond;
+    bool mViVinUse;
+    bool mIsZSLOn;
+
+    typedef struct ViVBuff_t{
+        void *buff;
+        size_t buffSize;
+        size_t YStride;
+        size_t UVStride;
+        size_t YScanLines;
+        size_t UVScanLines;
+        size_t srcWidth;
+        size_t srcHeight;
+    } ViVBuff_t;
+
+    typedef struct ViVVid_t{
+        sp<IGraphicBufferProducer> bufferProducer;
+        sp<Surface> surface;
+        sp<MediaCodec> codec;
+        sp<MediaMuxer> muxer;
+        sp<ANativeWindow> ANW;
+        Vector<sp<ABuffer> > buffers;
+        Size VideoSizes[2];
+        int ViVIdx;
+        size_t buff_cnt;
+        sp<GraphicBuffer> graphBuf;
+        void * mappedBuff;
+        bool isBuffValid;
+        int sourceCameraID;
+        int destinationCameraID;
+    } vidPiP_t;
+
+    ViVVid_t mViVVid;
+    ViVBuff_t mViVBuff;
+};
+
+}; //namespace qcamera
+
+#endif
diff --git a/msmcobalt/QCamera2/HAL/tsMakeuplib/include/ts_detectface_engine.h b/msmcobalt/QCamera2/HAL/tsMakeuplib/include/ts_detectface_engine.h
new file mode 100644
index 0000000..5197447
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/tsMakeuplib/include/ts_detectface_engine.h
@@ -0,0 +1,100 @@
+/*
+ * Copyright (C) 2014,2015 Thundersoft Corporation
+ * All rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __TS_DETECTFACE_ENGINE_H__
+#define __TS_DETECTFACE_ENGINE_H__
+#include "ts_makeup_data.h"
+#include "ts_makeup_image.h"
+
+    typedef void*                    TSHandle;
+
+    /*===========================================================================
+     * FUNCTION   : ts_detectface_create_context
+     *
+     * DESCRIPTION: create context.The method MUST call at first time.
+     *
+     *
+     * RETURN    : TSHandle as the context handle
+     *
+     *==========================================================================*/
+    TSHandle ts_detectface_create_context();
+
+
+    /*===========================================================================
+     * FUNCTION   : ts_detectface_destroy_context
+     *
+     * DESCRIPTION: destroy context. The method MUST call at last time.
+     * Before you MUST call ts_detectface_create_context method
+     * to create context and get context handle.
+     *
+     * PARAMETERS :
+     *   @param[in] contexTSHandle : The context handle pointer.
+     *
+     *
+     *==========================================================================*/
+    void ts_detectface_destroy_context(TSHandle* contexTSHandle);
+
+
+    /*===========================================================================
+     * FUNCTION   : ts_detectface_detect
+     *
+     * DESCRIPTION: start detect.Before you MUST call ts_detectface_create_context method
+     * to create context and get context handle.
+     *
+     * PARAMETERS :
+     *   @param[in] contexTSHandle : The context handle.
+     *   @param[in] pInData : The TSMakeupData pointer.MUST not NULL.
+     *
+     * RETURN    : int If less than zero failed, otherwise the number of the detected faces.
+     *
+     *==========================================================================*/
+    int ts_detectface_detect(TSHandle contexTSHandle, TSMakeupData *pInData);
+
+    /*===========================================================================
+     * FUNCTION   : ts_detectface_detectEx
+     *
+     * DESCRIPTION: start detect.Before you MUST call ts_detectface_create_context method
+     * to create context and get context handle.
+     *
+     * PARAMETERS :
+     *   @param[in] contexTSHandle : The context handle.
+     *   @param[in] pInData : The TSMakeupDataEx pointer.MUST not NULL.
+     *
+     * RETURN    : int If less than zero failed, otherwise the number of the detected faces.
+     *
+     *==========================================================================*/
+    int ts_detectface_detectEx(TSHandle contexTSHandle, TSMakeupDataEx *pInData);
+    /*===========================================================================
+     * FUNCTION   : ts_detectface_get_face_info
+     *
+     * DESCRIPTION: get detected face information.Before you MUST call ts_detectface_detect method
+     * to detect face.
+     *
+     * PARAMETERS :
+     *   @param[in] contexTSHandle : The context handle.
+     *   @param[in] index : The face index.MUST > 0.
+     *   @param[out] pFaceRect : The face rects.MUST not NULL.
+     *   @param[out] leftEye : The left eye rect.
+     *   @param[out] rightEye : The right eye rect.
+     *   @param[out] pMouth : The mount rect.
+     *
+     * RETURN    : TS_OK if success, otherwise failed.
+     *
+     *==========================================================================*/
+    int ts_detectface_get_face_info(TSHandle contexTSHandle, int index, TSRect *pFaceRect, TSRect *leftEye, TSRect *rightEye, TSRect *pMouth);
+
+#endif // __TS_DETECTFACE_ENGINE_H__
diff --git a/msmcobalt/QCamera2/HAL/tsMakeuplib/include/ts_makeup_data.h b/msmcobalt/QCamera2/HAL/tsMakeuplib/include/ts_makeup_data.h
new file mode 100644
index 0000000..ac43713
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/tsMakeuplib/include/ts_makeup_data.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2014,2015 Thundersoft Corporation
+ * All rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __TS_MAKEUP_DATA_H__
+#define __TS_MAKEUP_DATA_H__
+
+    #define TS_OK                (0x00000000)    //Successful
+    #define TS_ERROR_PARAM       (0x00000001)    //Parameters error
+    #define TS_ERROR_IO          (0x00000002)    //Input or output error
+    #define TS_ERROR_INTERNAL    (0x00000003)    //Internal error
+    #define TS_NO_MEMORY         (0x00000004)    //No memory error
+
+
+    /*
+     * Data struct : rectangle
+     */
+    typedef struct __tag_tsrect
+    {
+        long left;
+        long top;
+        long right;
+        long bottom;
+    } TSRect;
+
+    /*
+     * Data struct : point
+     */
+    typedef struct __tag_tsmakeuppoint
+    {
+        long x;
+        long y;
+    } TSPoint;
+
+
+#endif // __TS_MAKEUP_DATA_H__
diff --git a/msmcobalt/QCamera2/HAL/tsMakeuplib/include/ts_makeup_engine.h b/msmcobalt/QCamera2/HAL/tsMakeuplib/include/ts_makeup_engine.h
new file mode 100644
index 0000000..375130d
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/tsMakeuplib/include/ts_makeup_engine.h
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2014,2015 Thundersoft Corporation
+ * All rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __TS_MAKEUP_ENGINI_H__
+#define __TS_MAKEUP_ENGINI_H__
+#include "ts_makeup_data.h"
+#include "ts_makeup_image.h"
+
+
+    /*
+     * FUNCTION   : ts_makeup_get_supported_face_num
+     *
+     * DESCRIPTION: get supported face number
+     *
+     * RETURN    : The supported face number
+     *
+     */
+    int ts_makeup_get_supported_face_num();
+
+
+    /*
+     * FUNCTION   : ts_makeup_skin_beauty
+     *
+     * DESCRIPTION: skin beauty method.
+     *
+     * PARAMETERS :
+     *   @param[in] pInData : The TSMakeupData pointer.MUST not NULL.
+     *   @param[out] pOutData : The TSMakeupData pointer.MUST not NULL.
+     *   @param[in] pFaceRect : The face rect.MUST not NULL.
+     *   @param[in] cleanLevel : Skin clean level, value range [0,100].
+     *   @param[in] whiteLevel : Skin white level, value range [0,100].
+     * RETURN    : TS_OK if success, otherwise failed.
+     *
+     */
+    int ts_makeup_skin_beauty(TSMakeupData *pInData, TSMakeupData *pOutData, const TSRect *pFaceRect, int cleanLevel,int whiteLevel);
+    /*
+     * FUNCTION   : ts_makeup_skin_beautyEx
+     *
+     * DESCRIPTION: skin beauty method.
+     *
+     * PARAMETERS :
+     *   @param[in] pInData : The TSMakeupDataEx pointer.MUST not NULL.
+     *   @param[out] pOutData : The TSMakeupDataEx pointer.MUST not NULL.
+     *   @param[in] pFaceRect : The face rect.MUST not NULL.
+     *   @param[in] cleanLevel : Skin clean level, value range [0,100].
+     *   @param[in] whiteLevel : Skin white level, value range [0,100].
+     * RETURN    : TS_OK if success, otherwise failed.
+     *
+     */
+    int ts_makeup_skin_beautyEx(TSMakeupDataEx *pInData, TSMakeupDataEx *pOutData, const TSRect *pFaceRect, int cleanLevel, int whiteLevel);
+    /*
+     * FUNCTION   : ts_makeup_finish
+     *
+     * DESCRIPTION: Finish makeup,call this method at last time.
+     * This method MUST be called After ts_makeup_skin_clean and ts_makeup_skin_whiten
+     *
+     */
+    void ts_makeup_finish();
+
+
+    /*
+     * FUNCTION   : ts_makeup_warp_face
+     *
+     * DESCRIPTION: do warp face.
+     *
+     * PARAMETERS :
+     *   @param[in] pInData : The TSMakeupData pointer.MUST not NULL.
+     *   @param[out] pOutData : The TSMakeupData pointer.MUST not NULL.
+     *   @param[in] pLeftEye : The left eye rect pointer.MUST not NULL.
+     *   @param[in] pRightEye : The right eye rect pointer.MUST not NULL.
+     *   @param[in] pMouth : The mouth rect pointer.MUST not NULL.
+     *   @param[in] bigEyeLevel : The big eye level, value range [0,100].
+     *   @param[in] trimFaceLevel : The trim face level, value range [0,100].
+     *
+     * RETURN    : TS_OK if success, otherwise failed.
+     *
+     */
+    int ts_makeup_warp_face(TSMakeupData *pInData, TSMakeupData *pOutData,
+            const TSRect *pLeftEye, const TSRect *pRightEye, const TSRect *pMouth, int bigEyeLevel, int trimFaceLevel);
+
+#endif // __TS_MAKEUP_ENGINI_H__
diff --git a/msmcobalt/QCamera2/HAL/tsMakeuplib/include/ts_makeup_image.h b/msmcobalt/QCamera2/HAL/tsMakeuplib/include/ts_makeup_image.h
new file mode 100644
index 0000000..5621d3f
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/tsMakeuplib/include/ts_makeup_image.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2014,2015 Thundersoft Corporation
+ * All rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __TS_MAKEUP_IMGAGE_H__
+#define __TS_MAKEUP_IMGAGE_H__
+
+    /*
+     * Data struct : TSMakeupData
+     */
+    typedef struct  __tag_tsmakeupdata
+    {
+        int frameWidth;                 //NV21 Frame width.MUST > 0.
+        int frameHeight;                //NV21 Frame height. MUST > 0.
+        unsigned char *yBuf;            //NV21 Y buffer pointer.MUST not null.
+        unsigned char *uvBuf;           //NV21 UV buffer pointer.MUST not null.
+    }TSMakeupData;
+
+     /*
+     * Data struct : TSMakeupDataEx
+     */
+    typedef struct  __tag_tsmakeupdataEx
+    {
+        int frameWidth;                 //NV21 Frame width.MUST > 0.
+        int frameHeight;                //NV21 Frame height. MUST > 0.
+        unsigned char *yBuf;            //NV21 Y buffer pointer.MUST not null.
+        unsigned char *uvBuf;           //NV21 UV buffer pointer.MUST not null.
+        int yStride;                    //NV21 Y buffer stride len
+        int uvStride;                   //NV21 uv buffer stride len
+    }TSMakeupDataEx;
+
+
+#endif // __TS_MAKEUP_IMGAGE_H__
diff --git a/msmcobalt/QCamera2/HAL/wrapper/QualcommCamera.cpp b/msmcobalt/QCamera2/HAL/wrapper/QualcommCamera.cpp
new file mode 100644
index 0000000..e964cd9
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/wrapper/QualcommCamera.cpp
@@ -0,0 +1,450 @@
+/* Copyright (c) 2011-2014, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#define ALOG_NIDEBUG 0
+#define LOG_TAG "QualcommCamera"
+
+// System dependencies
+#include <utils/threads.h>
+#include <binder/IMemory.h>
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <utils/RefBase.h>
+
+extern "C" {
+#define TIME_H <SYSTEM_HEADER_PREFIX/time.h>
+#include TIME_H
+}
+
+// Camera dependencies
+#include "QualcommCamera.h"
+#include "QCamera2Factory.h"
+#include "QCamera2HWI.h"
+
+/* HAL function implementation goes here*/
+
+/**
+ * The functions need to be provided by the camera HAL.
+ *
+ * If getNumberOfCameras() returns N, the valid cameraId for getCameraInfo()
+ * and openCameraHardware() is 0 to N-1.
+ */
+
+
+static hw_module_methods_t camera_module_methods = {
+    open: camera_device_open,
+};
+
+static hw_module_t camera_common = {
+    tag: HARDWARE_MODULE_TAG,
+    module_api_version: CAMERA_MODULE_API_VERSION_1_0,
+    hal_api_version: HARDWARE_HAL_API_VERSION,
+    id: CAMERA_HARDWARE_MODULE_ID,
+    name: "QCamera Module",
+    author: "Quic on behalf of CAF",
+    methods: &camera_module_methods,
+    dso: NULL,
+    reserved:  {0},
+};
+
+using namespace qcamera;
+namespace android {
+
+typedef struct {
+    camera_device hw_dev;
+    QCamera2HardwareInterface *hardware;
+    int camera_released;
+    int cameraId;
+} camera_hardware_t;
+
+typedef struct {
+  camera_memory_t mem;
+  int32_t msgType;
+  sp<IMemory> dataPtr;
+  void* user;
+  unsigned int index;
+} q_cam_memory_t;
+
+QCamera2HardwareInterface *util_get_Hal_obj( struct camera_device * device)
+{
+    QCamera2HardwareInterface *hardware = NULL;
+    if(device && device->priv){
+        camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+        hardware = camHal->hardware;
+    }
+    return hardware;
+}
+
+extern "C" int get_number_of_cameras()
+{
+    /* try to query every time we get the call!*/
+
+    ALOGE("Q%s: E");
+    return QCamera2Factory::get_number_of_cameras();
+}
+
+extern "C" int get_camera_info(int camera_id, struct camera_info *info)
+{
+    int rc = -1;
+    ALOGE("Q%s: E");
+
+    if(info) {
+        QCamera2Factory::get_camera_info(camera_id, info);
+    }
+    LOGD("Q%s: X");
+    return rc;
+}
+
+
+/* HAL should return NULL if it fails to open camera hardware. */
+extern "C" int  camera_device_open(
+  const struct hw_module_t* module, const char* id,
+          struct hw_device_t** hw_device)
+{
+    int rc = -1;
+    camera_device *device = NULL;
+
+    if(module && id && hw_device) {
+        if (!strcmp(module->name, camera_common.name)) {
+            int cameraId = atoi(id);
+
+            camera_hardware_t *camHal =
+                (camera_hardware_t *) malloc(sizeof (camera_hardware_t));
+            if(!camHal) {
+                *hw_device = NULL;
+                ALOGE("  end in no mem");
+                return rc;
+            }
+            /* we have the camera_hardware obj malloced */
+            memset(camHal, 0, sizeof (camera_hardware_t));
+            camHal->hardware = new QCamera2HardwareInterface((uint32_t)cameraId);
+            if (camHal->hardware) {
+                camHal->cameraId = cameraId;
+                device = &camHal->hw_dev;
+                device->common.close = close_camera_device;
+                device->ops = &QCamera2HardwareInterface::mCameraOps;
+                device->priv = (void *)camHal;
+                rc =  0;
+            } else {
+                if (camHal->hardware) {
+                    delete camHal->hardware;
+                    camHal->hardware = NULL;
+                }
+                free(camHal);
+                device = NULL;
+                goto EXIT;
+            }
+        }
+    }
+    /* pass actual hw_device ptr to framework. This amkes that we actally be use memberof() macro */
+    *hw_device = (hw_device_t*)&device->common;
+
+EXIT:
+
+    ALOGE("  end rc %d",  rc);
+    return rc;
+}
+
+extern "C"  int close_camera_device( hw_device_t *hw_dev)
+{
+    ALOGE("Q%s: device =%p E",  hw_dev);
+    int rc =  -1;
+    camera_device_t *device = (camera_device_t *)hw_dev;
+
+    if(device) {
+        camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+        if(camHal ) {
+            QCamera2HardwareInterface *hardware = util_get_Hal_obj( device);
+            if(!camHal->camera_released) {
+                if(hardware != NULL) {
+                    hardware->release(device);
+                }
+            }
+            if(hardware != NULL)
+                delete hardware;
+            free(camHal);
+        }
+        rc = 0;
+    }
+    return rc;
+}
+
+
+int set_preview_window(struct camera_device * device,
+        struct preview_stream_ops *window)
+{
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+
+    if(hardware != NULL) {
+        rc = hardware->set_preview_window(device, window);
+    }
+    return rc;
+}
+
+void set_CallBacks(struct camera_device * device,
+        camera_notify_callback notify_cb,
+        camera_data_callback data_cb,
+        camera_data_timestamp_callback data_cb_timestamp,
+        camera_request_memory get_memory,
+        void *user)
+{
+    ALOGE("Q%s: E");
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        hardware->set_CallBacks(device, notify_cb,data_cb, data_cb_timestamp, get_memory, user);
+    }
+}
+
+void enable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        hardware->enable_msg_type(device, msg_type);
+    }
+}
+
+void disable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    ALOGE("Q%s: E");
+    if(hardware != NULL){
+        hardware->disable_msg_type(device, msg_type);
+    }
+}
+
+int msg_type_enabled(struct camera_device * device, int32_t msg_type)
+{
+    ALOGE("Q%s: E");
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->msg_type_enabled(device, msg_type);
+    }
+    return rc;
+}
+
+int start_preview(struct camera_device * device)
+{
+    ALOGE("Q%s: E");
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->start_preview(device);
+    }
+    ALOGE("Q%s: X");
+    return rc;
+}
+
+void stop_preview(struct camera_device * device)
+{
+    ALOGE("Q%s: E");
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        hardware->stop_preview(device);
+    }
+}
+
+int preview_enabled(struct camera_device * device)
+{
+    ALOGE("Q%s: E");
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->preview_enabled(device);
+    }
+    return rc;
+}
+
+int store_meta_data_in_buffers(struct camera_device * device, int enable)
+{
+    ALOGE("Q%s: E");
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+      rc = hardware->store_meta_data_in_buffers(device, enable);
+    }
+    return rc;
+}
+
+int start_recording(struct camera_device * device)
+{
+    ALOGE("Q%s: E");
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->start_recording(device);
+    }
+    return rc;
+}
+
+void stop_recording(struct camera_device * device)
+{
+    ALOGE("Q%s: E");
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        hardware->stop_recording(device);
+    }
+}
+
+int recording_enabled(struct camera_device * device)
+{
+    ALOGE("Q%s: E");
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->recording_enabled(device);
+    }
+    return rc;
+}
+
+void release_recording_frame(struct camera_device * device,
+                const void *opaque)
+{
+    LOGD("Q%s: E");
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        hardware->release_recording_frame(device, opaque);
+    }
+}
+
+int auto_focus(struct camera_device * device)
+{
+    ALOGE("Q%s: E");
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->auto_focus(device);
+    }
+    return rc;
+}
+
+int cancel_auto_focus(struct camera_device * device)
+{
+    ALOGE("Q%s: E");
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->cancel_auto_focus(device);
+    }
+    return rc;
+}
+
+int take_picture(struct camera_device * device)
+{
+    ALOGE("Q%s: E");
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->take_picture(device);
+    }
+    return rc;
+}
+
+int cancel_picture(struct camera_device * device)
+
+{
+    ALOGE("Q%s: E");
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->cancel_picture(device);
+    }
+    return rc;
+}
+
+int set_parameters(struct camera_device * device, const char *parms)
+
+{
+    ALOGE("Q%s: E");
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL && parms){
+        rc = hardware->set_parameters(device, parms);
+  }
+  return rc;
+}
+
+char* get_parameters(struct camera_device * device)
+{
+    ALOGE("Q%s: E");
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        char *parms = NULL;
+        parms = hardware->get_parameters(device);
+        return parms;
+    }
+    return NULL;
+}
+
+void put_parameters(struct camera_device * device, char *parm)
+
+{
+    ALOGE("Q%s: E");
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+      hardware->put_parameters(device, parm);
+    }
+}
+
+int send_command(struct camera_device * device,
+            int32_t cmd, int32_t arg1, int32_t arg2)
+{
+    ALOGE("Q%s: E");
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->send_command(device, cmd, arg1, arg2);
+    }
+    return rc;
+}
+
+void release(struct camera_device * device)
+{
+    ALOGE("Q%s: E");
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+        hardware->release(device);
+        camHal->camera_released = true;
+    }
+}
+
+int dump(struct camera_device * device, int fd)
+{
+    ALOGE("Q%s: E");
+    int rc = -1;
+    QCamera2HardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->dump(device, fd);
+    }
+    return rc;
+}
+
+}; // namespace android
diff --git a/msmcobalt/QCamera2/HAL/wrapper/QualcommCamera.h b/msmcobalt/QCamera2/HAL/wrapper/QualcommCamera.h
new file mode 100644
index 0000000..6caa3ca
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL/wrapper/QualcommCamera.h
@@ -0,0 +1,107 @@
+/* Copyright (c) 2011-2013, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef ANDROID_HARDWARE_QUALCOMM_CAMERA_H
+#define ANDROID_HARDWARE_QUALCOMM_CAMERA_H
+
+// Camera dependencies
+#include "QCamera2HWI.h"
+
+extern "C" {
+
+  int get_number_of_cameras();
+  int get_camera_info(int camera_id, struct camera_info *info);
+
+  int camera_device_open(const struct hw_module_t* module, const char* id,
+          struct hw_device_t** device);
+
+  hw_device_t * open_camera_device(int cameraId);
+
+  int close_camera_device( hw_device_t *);
+
+namespace android {
+  int set_preview_window(struct camera_device *,
+          struct preview_stream_ops *window);
+  void set_CallBacks(struct camera_device *,
+          camera_notify_callback notify_cb,
+          camera_data_callback data_cb,
+          camera_data_timestamp_callback data_cb_timestamp,
+          camera_request_memory get_memory,
+          void *user);
+
+  void enable_msg_type(struct camera_device *, int32_t msg_type);
+
+  void disable_msg_type(struct camera_device *, int32_t msg_type);
+  int msg_type_enabled(struct camera_device *, int32_t msg_type);
+
+  int start_preview(struct camera_device *);
+
+  void stop_preview(struct camera_device *);
+
+  int preview_enabled(struct camera_device *);
+  int store_meta_data_in_buffers(struct camera_device *, int enable);
+
+  int start_recording(struct camera_device *);
+
+  void stop_recording(struct camera_device *);
+
+  int recording_enabled(struct camera_device *);
+
+  void release_recording_frame(struct camera_device *,
+                  const void *opaque);
+
+  int auto_focus(struct camera_device *);
+
+  int cancel_auto_focus(struct camera_device *);
+
+  int take_picture(struct camera_device *);
+
+  int cancel_picture(struct camera_device *);
+
+  int set_parameters(struct camera_device *, const char *parms);
+
+  char* get_parameters(struct camera_device *);
+
+  void put_parameters(struct camera_device *, char *);
+
+  int send_command(struct camera_device *,
+              int32_t cmd, int32_t arg1, int32_t arg2);
+
+  void release(struct camera_device *);
+
+  int dump(struct camera_device *, int fd);
+
+
+
+}; // namespace android
+
+} //extern "C"
+
+#endif
+
diff --git a/msmcobalt/QCamera2/HAL3/QCamera3Channel.cpp b/msmcobalt/QCamera2/HAL3/QCamera3Channel.cpp
new file mode 100644
index 0000000..f55b65a
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL3/QCamera3Channel.cpp
@@ -0,0 +1,4781 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCamera3Channel"
+
+// To remove
+#include <cutils/properties.h>
+
+// System dependencies
+#include <fcntl.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include "hardware/gralloc.h"
+#include <utils/Timers.h>
+
+// Camera dependencies
+#include "QCamera3Channel.h"
+#include "QCamera3HWI.h"
+#include "QCameraTrace.h"
+#include "QCameraFormat.h"
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+using namespace android;
+
+namespace qcamera {
+#define IS_BUFFER_ERROR(x) (((x) & V4L2_BUF_FLAG_ERROR) == V4L2_BUF_FLAG_ERROR)
+
+/*===========================================================================
+ * FUNCTION   : QCamera3Channel
+ *
+ * DESCRIPTION: constrcutor of QCamera3Channel
+ *
+ * PARAMETERS :
+ *   @cam_handle : camera handle
+ *   @cam_ops    : ptr to camera ops table
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3Channel::QCamera3Channel(uint32_t cam_handle,
+                               uint32_t channel_handle,
+                               mm_camera_ops_t *cam_ops,
+                               channel_cb_routine cb_routine,
+                               cam_padding_info_t *paddingInfo,
+                               cam_feature_mask_t postprocess_mask,
+                               void *userData, uint32_t numBuffers)
+{
+    m_camHandle = cam_handle;
+    m_handle = channel_handle;
+    m_camOps = cam_ops;
+    m_bIsActive = false;
+
+    m_numStreams = 0;
+    memset(mStreams, 0, sizeof(mStreams));
+    mUserData = userData;
+
+    mStreamInfoBuf = NULL;
+    mChannelCB = cb_routine;
+    mPaddingInfo = *paddingInfo;
+    mPaddingInfo.offset_info.offset_x = 0;
+    mPaddingInfo.offset_info.offset_y = 0;
+
+    mPostProcMask = postprocess_mask;
+
+    mIsType = IS_TYPE_NONE;
+    mNumBuffers = numBuffers;
+    mPerFrameMapUnmapEnable = true;
+    mDumpFrmCnt = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera3Channel
+ *
+ * DESCRIPTION: destructor of QCamera3Channel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3Channel::~QCamera3Channel()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : destroy
+ *
+ * DESCRIPTION: internal destructor of QCamera3Channel called by the subclasses
+ *              this destructor will call pure virtual functions.  stop will eventuall call
+ *              QCamera3Stream::putBufs.  The putBufs function will
+ *              call QCamera3Channel::putStreamBufs which is pure virtual
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3Channel::destroy()
+{
+    if (m_bIsActive)
+        stop();
+
+    for (uint32_t i = 0; i < m_numStreams; i++) {
+        if (mStreams[i] != NULL) {
+            delete mStreams[i];
+            mStreams[i] = 0;
+        }
+    }
+    m_numStreams = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : addStream
+ *
+ * DESCRIPTION: add a stream into channel
+ *
+ * PARAMETERS :
+ *   @streamType     : stream type
+ *   @streamFormat   : stream format
+ *   @streamDim      : stream dimension
+ *   @streamRotation : rotation of the stream
+ *   @minStreamBufNum : minimal buffer count for particular stream type
+ *   @postprocessMask : post-proccess feature mask
+ *   @isType         : type of image stabilization required on the stream
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Channel::addStream(cam_stream_type_t streamType,
+                                  cam_format_t streamFormat,
+                                  cam_dimension_t streamDim,
+                                  cam_rotation_t streamRotation,
+                                  uint8_t minStreamBufNum,
+                                  cam_feature_mask_t postprocessMask,
+                                  cam_is_type_t isType,
+                                  uint32_t batchSize)
+{
+    int32_t rc = NO_ERROR;
+
+    if (m_numStreams >= 1) {
+        LOGE("Only one stream per channel supported in v3 Hal");
+        return BAD_VALUE;
+    }
+
+    if (m_numStreams >= MAX_STREAM_NUM_IN_BUNDLE) {
+        LOGE("stream number (%d) exceeds max limit (%d)",
+               m_numStreams, MAX_STREAM_NUM_IN_BUNDLE);
+        return BAD_VALUE;
+    }
+    QCamera3Stream *pStream = new QCamera3Stream(m_camHandle,
+                                               m_handle,
+                                               m_camOps,
+                                               &mPaddingInfo,
+                                               this);
+    if (pStream == NULL) {
+        LOGE("No mem for Stream");
+        return NO_MEMORY;
+    }
+    LOGD("batch size is %d", batchSize);
+
+    rc = pStream->init(streamType, streamFormat, streamDim, streamRotation,
+            NULL, minStreamBufNum, postprocessMask, isType, batchSize,
+            streamCbRoutine, this);
+    if (rc == 0) {
+        mStreams[m_numStreams] = pStream;
+        m_numStreams++;
+    } else {
+        delete pStream;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : start
+ *
+ * DESCRIPTION: start channel, which will start all streams belong to this channel
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Channel::start()
+{
+    ATRACE_CALL();
+    int32_t rc = NO_ERROR;
+
+    if (m_numStreams > 1) {
+        LOGW("bundle not supported");
+    } else if (m_numStreams == 0) {
+        return NO_INIT;
+    }
+
+    if(m_bIsActive) {
+        LOGW("Attempt to start active channel");
+        return rc;
+    }
+
+    for (uint32_t i = 0; i < m_numStreams; i++) {
+        if (mStreams[i] != NULL) {
+            mStreams[i]->start();
+        }
+    }
+
+    m_bIsActive = true;
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop
+ *
+ * DESCRIPTION: stop a channel, which will stop all streams belong to this channel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Channel::stop()
+{
+    ATRACE_CALL();
+    int32_t rc = NO_ERROR;
+    if(!m_bIsActive) {
+        LOGE("Attempt to stop inactive channel");
+        return rc;
+    }
+
+    for (uint32_t i = 0; i < m_numStreams; i++) {
+        if (mStreams[i] != NULL) {
+            mStreams[i]->stop();
+        }
+    }
+
+    m_bIsActive = false;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setBatchSize
+ *
+ * DESCRIPTION: Set batch size for the channel. This is a dummy implementation
+ *              for the base class
+ *
+ * PARAMETERS :
+ *   @batchSize  : Number of image buffers in a batch
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success always
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Channel::setBatchSize(uint32_t batchSize)
+{
+    LOGD("Dummy method. batchSize: %d unused ", batchSize);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : queueBatchBuf
+ *
+ * DESCRIPTION: This is a dummy implementation for the base class
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success always
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Channel::queueBatchBuf()
+{
+    LOGD("Dummy method. Unused ");
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPerFrameMapUnmap
+ *
+ * DESCRIPTION: Sets internal enable flag
+ *
+ * PARAMETERS :
+ *  @enable : Bool value for the enable flag
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success always
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Channel::setPerFrameMapUnmap(bool enable)
+{
+    mPerFrameMapUnmapEnable = enable;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : flush
+ *
+ * DESCRIPTION: flush a channel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Channel::flush()
+{
+    ATRACE_CALL();
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : bufDone
+ *
+ * DESCRIPTION: return a stream buf back to kernel
+ *
+ * PARAMETERS :
+ *   @recvd_frame  : stream buf frame to be returned
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Channel::bufDone(mm_camera_super_buf_t *recvd_frame)
+{
+    int32_t rc = NO_ERROR;
+    for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
+         if (recvd_frame->bufs[i] != NULL) {
+             for (uint32_t j = 0; j < m_numStreams; j++) {
+                 if (mStreams[j] != NULL &&
+                     mStreams[j]->getMyHandle() == recvd_frame->bufs[i]->stream_id) {
+                     rc = mStreams[j]->bufDone(recvd_frame->bufs[i]->buf_idx);
+                     break; // break loop j
+                 }
+             }
+         }
+    }
+
+    return rc;
+}
+
+int32_t QCamera3Channel::setBundleInfo(const cam_bundle_config_t &bundleInfo)
+{
+    int32_t rc = NO_ERROR;
+    cam_stream_parm_buffer_t param;
+    memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+    param.type = CAM_STREAM_PARAM_TYPE_SET_BUNDLE_INFO;
+    param.bundleInfo = bundleInfo;
+    rc = mStreams[0]->setParameter(param);
+    if (rc != NO_ERROR) {
+        LOGE("stream setParameter for set bundle failed");
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamTypeMask
+ *
+ * DESCRIPTION: Get bit mask of all stream types in this channel
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : Bit mask of all stream types in this channel
+ *==========================================================================*/
+uint32_t QCamera3Channel::getStreamTypeMask()
+{
+    uint32_t mask = 0;
+    for (uint32_t i = 0; i < m_numStreams; i++) {
+       mask |= (1U << mStreams[i]->getMyType());
+    }
+    return mask;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamID
+ *
+ * DESCRIPTION: Get StreamID of requested stream type
+ *
+ * PARAMETERS : streamMask
+ *
+ * RETURN     : Stream ID
+ *==========================================================================*/
+uint32_t QCamera3Channel::getStreamID(uint32_t streamMask)
+{
+    uint32_t streamID = 0;
+    for (uint32_t i = 0; i < m_numStreams; i++) {
+        if (streamMask == (uint32_t )(0x1 << mStreams[i]->getMyType())) {
+            streamID = mStreams[i]->getMyServerID();
+            break;
+        }
+    }
+    return streamID;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamByHandle
+ *
+ * DESCRIPTION: return stream object by stream handle
+ *
+ * PARAMETERS :
+ *   @streamHandle : stream handle
+ *
+ * RETURN     : stream object. NULL if not found
+ *==========================================================================*/
+QCamera3Stream *QCamera3Channel::getStreamByHandle(uint32_t streamHandle)
+{
+    for (uint32_t i = 0; i < m_numStreams; i++) {
+        if (mStreams[i] != NULL && mStreams[i]->getMyHandle() == streamHandle) {
+            return mStreams[i];
+        }
+    }
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamByIndex
+ *
+ * DESCRIPTION: return stream object by index
+ *
+ * PARAMETERS :
+ *   @streamHandle : stream handle
+ *
+ * RETURN     : stream object. NULL if not found
+ *==========================================================================*/
+QCamera3Stream *QCamera3Channel::getStreamByIndex(uint32_t index)
+{
+    if (index < m_numStreams) {
+        return mStreams[index];
+    }
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : streamCbRoutine
+ *
+ * DESCRIPTION: callback routine for stream
+ *
+ * PARAMETERS :
+ *   @streamHandle : stream handle
+ *
+ * RETURN     : stream object. NULL if not found
+ *==========================================================================*/
+void QCamera3Channel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
+                QCamera3Stream *stream, void *userdata)
+{
+    QCamera3Channel *channel = (QCamera3Channel *)userdata;
+    if (channel == NULL) {
+        LOGE("invalid channel pointer");
+        return;
+    }
+    channel->streamCbRoutine(super_frame, stream);
+}
+
+/*===========================================================================
+ * FUNCTION   : dumpYUV
+ *
+ * DESCRIPTION: function to dump the YUV data from ISP/pproc
+ *
+ * PARAMETERS :
+ *   @frame   : frame to be dumped
+ *   @dim     : dimension of the stream
+ *   @offset  : offset of the data
+ *   @name    : 1 if it is ISP output/pproc input, 2 if it is pproc output
+ *
+ * RETURN  :
+ *==========================================================================*/
+void QCamera3Channel::dumpYUV(mm_camera_buf_def_t *frame, cam_dimension_t dim,
+        cam_frame_len_offset_t offset, uint8_t dump_type)
+{
+    char buf[FILENAME_MAX];
+    memset(buf, 0, sizeof(buf));
+    static int counter = 0;
+    char prop[PROPERTY_VALUE_MAX];
+    property_get("persist.camera.dumpimg", prop, "0");
+    mYUVDump = (uint32_t)atoi(prop);
+    if (mYUVDump & dump_type) {
+        mFrmNum = ((mYUVDump & 0xffff0000) >> 16);
+        if (mFrmNum == 0) {
+            mFrmNum = 10;
+        }
+        if (mFrmNum > 256) {
+            mFrmNum = 256;
+        }
+        mSkipMode = ((mYUVDump & 0x0000ff00) >> 8);
+        if (mSkipMode == 0) {
+            mSkipMode = 1;
+        }
+        if (mDumpSkipCnt == 0) {
+            mDumpSkipCnt = 1;
+        }
+        if (mDumpSkipCnt % mSkipMode == 0) {
+            if (mDumpFrmCnt <= mFrmNum) {
+                /* Note that the image dimension will be the unrotated stream dimension.
+                * If you feel that the image would have been rotated during reprocess
+                * then swap the dimensions while opening the file
+                * */
+                switch (dump_type) {
+                    case QCAMERA_DUMP_FRM_PREVIEW:
+                        snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"p_%d_%d_%dx%d.yuv",
+                            counter, frame->frame_idx, dim.width, dim.height);
+                    break;
+                    case QCAMERA_DUMP_FRM_VIDEO:
+                        snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"v_%d_%d_%dx%d.yuv",
+                            counter, frame->frame_idx, dim.width, dim.height);
+                    break;
+                    case QCAMERA_DUMP_FRM_SNAPSHOT:
+                        snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.yuv",
+                            counter, frame->frame_idx, dim.width, dim.height);
+                    break;
+                    case QCAMERA_DUMP_FRM_INPUT_REPROCESS:
+                        snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"ir_%d_%d_%dx%d.yuv",
+                            counter, frame->frame_idx, dim.width, dim.height);
+                    break;
+                    case QCAMERA_DUMP_FRM_CALLBACK:
+                        snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"c_%d_%d_%dx%d.yuv",
+                            counter, frame->frame_idx, dim.width, dim.height);
+                    break;
+                    default :
+                        LOGE("dumping not enabled for stream type %d",dump_type);
+                    break;
+                }
+                counter++;
+                int file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+                ssize_t written_len = 0;
+                if (file_fd >= 0) {
+                    void *data = NULL;
+                    fchmod(file_fd, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
+                    for (uint32_t i = 0; i < offset.num_planes; i++) {
+                        uint32_t index = offset.mp[i].offset;
+                        if (i > 0) {
+                            index += offset.mp[i-1].len;
+                        }
+                        for (int j = 0; j < offset.mp[i].height; j++) {
+                            data = (void *)((uint8_t *)frame->buffer + index);
+                            written_len += write(file_fd, data,
+                                    (size_t)offset.mp[i].width);
+                            index += (uint32_t)offset.mp[i].stride;
+                        }
+                    }
+                    LOGH("written number of bytes %ld\n", written_len);
+                    mDumpFrmCnt++;
+                    close(file_fd);
+                } else {
+                    LOGE("failed to open file to dump image");
+                }
+            }
+        } else {
+            mDumpSkipCnt++;
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : isUBWCEnabled
+ *
+ * DESCRIPTION: Function to get UBWC hardware support.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : TRUE -- UBWC format supported
+ *              FALSE -- UBWC is not supported.
+ *==========================================================================*/
+bool QCamera3Channel::isUBWCEnabled()
+{
+#ifdef UBWC_PRESENT
+    char value[PROPERTY_VALUE_MAX];
+    int prop_value = 0;
+    memset(value, 0, sizeof(value));
+    property_get("debug.gralloc.gfx_ubwc_disable", value, "0");
+    prop_value = atoi(value);
+    if (prop_value) {
+        return FALSE;
+    }
+
+    //Disable UBWC if Eztune is enabled
+    //EzTune process CPP output frame and cannot understand UBWC.
+    memset(value, 0, sizeof(value));
+    property_get("persist.camera.eztune.enable", value, "0");
+    prop_value = atoi(value);
+    if (prop_value) {
+        return FALSE;
+    }
+    return TRUE;
+#else
+    return FALSE;
+#endif
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamDefaultFormat
+ *
+ * DESCRIPTION: return default buffer format for the stream
+ *
+ * PARAMETERS : type : Stream type
+ *
+ ** RETURN    : format for stream type
+ *
+ *==========================================================================*/
+cam_format_t QCamera3Channel::getStreamDefaultFormat(cam_stream_type_t type)
+{
+    cam_format_t streamFormat;
+
+    switch (type) {
+    case CAM_STREAM_TYPE_PREVIEW:
+        if (isUBWCEnabled()) {
+            char prop[PROPERTY_VALUE_MAX];
+            int pFormat;
+            memset(prop, 0, sizeof(prop));
+            property_get("persist.camera.preview.ubwc", prop, "1");
+            pFormat = atoi(prop);
+            if (pFormat == 1) {
+                streamFormat = CAM_FORMAT_YUV_420_NV12_UBWC;
+            } else {
+                /* Changed to macro to ensure format sent to gralloc for preview
+                is also changed if the preview format is changed at camera HAL */
+                streamFormat = PREVIEW_STREAM_FORMAT;
+            }
+        } else {
+            /* Changed to macro to ensure format sent to gralloc for preview
+            is also changed if the preview format is changed at camera HAL */
+            streamFormat = PREVIEW_STREAM_FORMAT;
+        }
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        if (isUBWCEnabled()) {
+            char prop[PROPERTY_VALUE_MAX];
+            int pFormat;
+            memset(prop, 0, sizeof(prop));
+            property_get("persist.camera.video.ubwc", prop, "1");
+            pFormat = atoi(prop);
+            if (pFormat == 1) {
+                streamFormat = CAM_FORMAT_YUV_420_NV12_UBWC;
+            } else {
+                streamFormat = CAM_FORMAT_YUV_420_NV12_VENUS;
+            }
+        } else {
+#if VENUS_PRESENT
+        streamFormat = CAM_FORMAT_YUV_420_NV12_VENUS;
+#else
+        streamFormat = CAM_FORMAT_YUV_420_NV12;
+#endif
+        }
+        break;
+    case CAM_STREAM_TYPE_SNAPSHOT:
+        streamFormat = CAM_FORMAT_YUV_420_NV21;
+        break;
+    case CAM_STREAM_TYPE_CALLBACK:
+        /* Changed to macro to ensure format sent to gralloc for callback
+        is also changed if the preview format is changed at camera HAL */
+        streamFormat = CALLBACK_STREAM_FORMAT;
+        break;
+    case CAM_STREAM_TYPE_RAW:
+        streamFormat = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
+        break;
+    default:
+        streamFormat = CAM_FORMAT_YUV_420_NV21;
+        break;
+    }
+    return streamFormat;
+}
+
+
+/* QCamera3ProcessingChannel methods */
+
+/*===========================================================================
+ * FUNCTION   : QCamera3ProcessingChannel
+ *
+ * DESCRIPTION: constructor of QCamera3ProcessingChannel
+ *
+ * PARAMETERS :
+ *   @cam_handle : camera handle
+ *   @cam_ops    : ptr to camera ops table
+ *   @cb_routine : callback routine to frame aggregator
+ *   @paddingInfo: stream padding info
+ *   @userData   : HWI handle
+ *   @stream     : camera3_stream_t structure
+ *   @stream_type: Channel stream type
+ *   @postprocess_mask: the postprocess mask for streams of this channel
+ *   @metadataChannel: handle to the metadataChannel
+ *   @numBuffers : number of max dequeued buffers
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3ProcessingChannel::QCamera3ProcessingChannel(uint32_t cam_handle,
+        uint32_t channel_handle,
+        mm_camera_ops_t *cam_ops,
+        channel_cb_routine cb_routine,
+        cam_padding_info_t *paddingInfo,
+        void *userData,
+        camera3_stream_t *stream,
+        cam_stream_type_t stream_type,
+        cam_feature_mask_t postprocess_mask,
+        QCamera3Channel *metadataChannel,
+        uint32_t numBuffers) :
+            QCamera3Channel(cam_handle, channel_handle, cam_ops, cb_routine,
+                    paddingInfo, postprocess_mask, userData, numBuffers),
+            m_postprocessor(this),
+            mFrameCount(0),
+            mLastFrameCount(0),
+            mLastFpsTime(0),
+            mMemory(numBuffers),
+            mCamera3Stream(stream),
+            mNumBufs(CAM_MAX_NUM_BUFS_PER_STREAM),
+            mStreamType(stream_type),
+            mPostProcStarted(false),
+            mInputBufferConfig(false),
+            m_pMetaChannel(metadataChannel),
+            mMetaFrame(NULL),
+            mOfflineMemory(0),
+            mOfflineMetaMemory(numBuffers + (MAX_REPROCESS_PIPELINE_STAGES - 1),
+                    false)
+{
+    char prop[PROPERTY_VALUE_MAX];
+    property_get("persist.debug.sf.showfps", prop, "0");
+    mDebugFPS = (uint8_t) atoi(prop);
+
+    int32_t rc = m_postprocessor.init(&mMemory);
+    if (rc != 0) {
+        LOGE("Init Postprocessor failed");
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera3ProcessingChannel
+ *
+ * DESCRIPTION: destructor of QCamera3ProcessingChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3ProcessingChannel::~QCamera3ProcessingChannel()
+{
+    destroy();
+
+    int32_t rc = m_postprocessor.deinit();
+    if (rc != 0) {
+        LOGE("De-init Postprocessor failed");
+    }
+
+    if (0 < mOfflineMetaMemory.getCnt()) {
+        mOfflineMetaMemory.deallocate();
+    }
+    if (0 < mOfflineMemory.getCnt()) {
+        mOfflineMemory.unregisterBuffers();
+    }
+
+}
+
+/*===========================================================================
+ * FUNCTION   : streamCbRoutine
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS :
+ * @super_frame : the super frame with filled buffer
+ * @stream      : stream on which the buffer was requested and filled
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3ProcessingChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
+        QCamera3Stream *stream)
+{
+     ATRACE_CALL();
+    //FIXME Q Buf back in case of error?
+    uint8_t frameIndex;
+    buffer_handle_t *resultBuffer;
+    int32_t resultFrameNumber;
+    camera3_stream_buffer_t result;
+    cam_dimension_t dim;
+    cam_frame_len_offset_t offset;
+
+    memset(&dim, 0, sizeof(dim));
+    memset(&offset, 0, sizeof(cam_frame_len_offset_t));
+    if (checkStreamCbErrors(super_frame, stream) != NO_ERROR) {
+        LOGE("Error with the stream callback");
+        return;
+    }
+
+    frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
+    if(frameIndex >= mNumBufs) {
+         LOGE("Error, Invalid index for buffer");
+         stream->bufDone(frameIndex);
+         return;
+    }
+
+    if (mDebugFPS) {
+        showDebugFPS(stream->getMyType());
+    }
+    stream->getFrameDimension(dim);
+    stream->getFrameOffset(offset);
+    if (stream->getMyType() == CAM_STREAM_TYPE_PREVIEW) {
+        dumpYUV(super_frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_PREVIEW);
+    } else if (stream->getMyType() == CAM_STREAM_TYPE_VIDEO) {
+        dumpYUV(super_frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_VIDEO);
+    } else if (stream->getMyType() == CAM_STREAM_TYPE_CALLBACK) {
+        dumpYUV(super_frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_CALLBACK);
+    }
+    ////Use below data to issue framework callback
+    resultBuffer = (buffer_handle_t *)mMemory.getBufferHandle(frameIndex);
+    resultFrameNumber = mMemory.getFrameNumber(frameIndex);
+
+    result.stream = mCamera3Stream;
+    result.buffer = resultBuffer;
+    if (IS_BUFFER_ERROR(super_frame->bufs[0]->flags)) {
+        result.status = CAMERA3_BUFFER_STATUS_ERROR;
+        LOGW("CAMERA3_BUFFER_STATUS_ERROR for stream_type: %d",
+                mStreams[0]->getMyType());
+    } else {
+        result.status = CAMERA3_BUFFER_STATUS_OK;
+    }
+    result.acquire_fence = -1;
+    result.release_fence = -1;
+    if(mPerFrameMapUnmapEnable) {
+        int32_t rc = stream->bufRelease(frameIndex);
+        if (NO_ERROR != rc) {
+            LOGE("Error %d releasing stream buffer %d",
+                     rc, frameIndex);
+        }
+
+        rc = mMemory.unregisterBuffer(frameIndex);
+        if (NO_ERROR != rc) {
+            LOGE("Error %d unregistering stream buffer %d",
+                     rc, frameIndex);
+        }
+    }
+
+    if (0 <= resultFrameNumber) {
+        if (mChannelCB) {
+            mChannelCB(NULL, &result, (uint32_t)resultFrameNumber, false, mUserData);
+        }
+    } else {
+        LOGE("Bad frame number");
+    }
+    free(super_frame);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : putStreamBufs
+ *
+ * DESCRIPTION: release the buffers allocated to the stream
+ *
+ * PARAMETERS : NONE
+ *
+ * RETURN     : NONE
+ *==========================================================================*/
+void QCamera3YUVChannel::putStreamBufs()
+{
+    QCamera3ProcessingChannel::putStreamBufs();
+
+    // Free allocated heap buffer.
+    mMemory.deallocate();
+    // Clear free heap buffer list.
+    mFreeHeapBufferList.clear();
+    // Clear offlinePpInfoList
+    mOfflinePpInfoList.clear();
+}
+
+/*===========================================================================
+ * FUNCTION   : request
+ *
+ * DESCRIPTION: handle the request - either with an input buffer or a direct
+ *              output request
+ *
+ * PARAMETERS :
+ * @buffer          : pointer to the output buffer
+ * @frameNumber     : frame number of the request
+ * @pInputBuffer    : pointer to input buffer if an input request
+ * @metadata        : parameters associated with the request
+ *
+ * RETURN     : 0 on a success start of capture
+ *              -EINVAL on invalid input
+ *              -ENODEV on serious error
+ *==========================================================================*/
+int32_t QCamera3ProcessingChannel::request(buffer_handle_t *buffer,
+        uint32_t frameNumber,
+        camera3_stream_buffer_t* pInputBuffer,
+        metadata_buffer_t* metadata)
+{
+    int32_t rc = NO_ERROR;
+    int index;
+
+    if (NULL == buffer || NULL == metadata) {
+        LOGE("Invalid buffer/metadata in channel request");
+        return BAD_VALUE;
+    }
+
+    if (pInputBuffer) {
+        //need to send to reprocessing
+        LOGD("Got a request with input buffer, output streamType = %d", mStreamType);
+        reprocess_config_t reproc_cfg;
+        cam_dimension_t dim;
+        memset(&reproc_cfg, 0, sizeof(reprocess_config_t));
+        memset(&dim, 0, sizeof(dim));
+        setReprocConfig(reproc_cfg, pInputBuffer, metadata, mStreamFormat, dim);
+        startPostProc(reproc_cfg);
+
+        qcamera_fwk_input_pp_data_t *src_frame = NULL;
+        src_frame = (qcamera_fwk_input_pp_data_t *)calloc(1,
+                sizeof(qcamera_fwk_input_pp_data_t));
+        if (src_frame == NULL) {
+            LOGE("No memory for src frame");
+            return NO_MEMORY;
+        }
+        rc = setFwkInputPPData(src_frame, pInputBuffer, &reproc_cfg, metadata, buffer, frameNumber);
+        if (NO_ERROR != rc) {
+            LOGE("Error %d while setting framework input PP data", rc);
+            free(src_frame);
+            return rc;
+        }
+        LOGH("Post-process started");
+        m_postprocessor.processData(src_frame);
+    } else {
+        //need to fill output buffer with new data and return
+        if(!m_bIsActive) {
+            rc = registerBuffer(buffer, mIsType);
+            if (NO_ERROR != rc) {
+                LOGE("On-the-fly buffer registration failed %d",
+                         rc);
+                return rc;
+            }
+
+            rc = start();
+            if (NO_ERROR != rc)
+                return rc;
+        } else {
+            LOGD("Request on an existing stream");
+        }
+
+        index = mMemory.getMatchBufIndex((void*)buffer);
+        if(index < 0) {
+            rc = registerBuffer(buffer, mIsType);
+            if (NO_ERROR != rc) {
+                LOGE("On-the-fly buffer registration failed %d",
+                         rc);
+                return rc;
+            }
+
+            index = mMemory.getMatchBufIndex((void*)buffer);
+            if (index < 0) {
+                LOGE("Could not find object among registered buffers");
+                return DEAD_OBJECT;
+            }
+        }
+        rc = mStreams[0]->bufDone(index);
+        if(rc != NO_ERROR) {
+            LOGE("Failed to Q new buffer to stream");
+            return rc;
+        }
+        rc = mMemory.markFrameNumber(index, frameNumber);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : initialize
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS : isType : type of image stabilization on the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3ProcessingChannel::initialize(__unused cam_is_type_t isType)
+{
+    int32_t rc = NO_ERROR;
+    rc = mOfflineMetaMemory.allocateAll(sizeof(metadata_buffer_t));
+    if (rc == NO_ERROR) {
+        Mutex::Autolock lock(mFreeOfflineMetaBuffersLock);
+        mFreeOfflineMetaBuffersList.clear();
+        for (uint32_t i = 0; i < mNumBuffers + (MAX_REPROCESS_PIPELINE_STAGES - 1);
+                i++) {
+            mFreeOfflineMetaBuffersList.push_back(i);
+        }
+    } else {
+        LOGE("Could not allocate offline meta buffers for input reprocess");
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : registerBuffer
+ *
+ * DESCRIPTION: register streaming buffer to the channel object
+ *
+ * PARAMETERS :
+ *   @buffer     : buffer to be registered
+ *   @isType     : image stabilization type on the stream
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3ProcessingChannel::registerBuffer(buffer_handle_t *buffer,
+        cam_is_type_t isType)
+{
+    ATRACE_CALL();
+    int rc = 0;
+    mIsType = isType;
+    cam_stream_type_t streamType;
+
+    if ((uint32_t)mMemory.getCnt() > (mNumBufs - 1)) {
+        LOGE("Trying to register more buffers than initially requested");
+        return BAD_VALUE;
+    }
+
+    if (0 == m_numStreams) {
+        rc = initialize(mIsType);
+        if (rc != NO_ERROR) {
+            LOGE("Couldn't initialize camera stream %d", rc);
+            return rc;
+        }
+    }
+
+    streamType = mStreams[0]->getMyType();
+    rc = mMemory.registerBuffer(buffer, streamType);
+    if (ALREADY_EXISTS == rc) {
+        return NO_ERROR;
+    } else if (NO_ERROR != rc) {
+        LOGE("Buffer %p couldn't be registered %d", buffer, rc);
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFwkInputPPData
+ *
+ * DESCRIPTION: fill out the framework src frame information for reprocessing
+ *
+ * PARAMETERS :
+ *   @src_frame         : input pp data to be filled out
+ *   @pInputBuffer      : input buffer for reprocessing
+ *   @reproc_cfg        : pointer to the reprocess config
+ *   @metadata          : pointer to the metadata buffer
+ *   @output_buffer     : output buffer for reprocessing; could be NULL if not
+ *                        framework allocated
+ *   @frameNumber       : frame number of the request
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3ProcessingChannel::setFwkInputPPData(qcamera_fwk_input_pp_data_t *src_frame,
+        camera3_stream_buffer_t *pInputBuffer, reprocess_config_t *reproc_cfg,
+        metadata_buffer_t *metadata, buffer_handle_t *output_buffer,
+        uint32_t frameNumber)
+{
+    int32_t rc = NO_ERROR;
+    int input_index = mOfflineMemory.getMatchBufIndex((void*)pInputBuffer->buffer);
+    if(input_index < 0) {
+        rc = mOfflineMemory.registerBuffer(pInputBuffer->buffer, mStreamType);
+        if (NO_ERROR != rc) {
+            LOGE("On-the-fly input buffer registration failed %d",
+                     rc);
+            return rc;
+        }
+        input_index = mOfflineMemory.getMatchBufIndex((void*)pInputBuffer->buffer);
+        if (input_index < 0) {
+            LOGE("Could not find object among registered buffers");
+            return DEAD_OBJECT;
+        }
+    }
+    mOfflineMemory.markFrameNumber(input_index, frameNumber);
+
+    src_frame->src_frame = *pInputBuffer;
+    rc = mOfflineMemory.getBufDef(reproc_cfg->input_stream_plane_info.plane_info,
+            src_frame->input_buffer, input_index);
+    if (rc != 0) {
+        return rc;
+    }
+    dumpYUV(&src_frame->input_buffer, reproc_cfg->input_stream_dim,
+            reproc_cfg->input_stream_plane_info.plane_info, QCAMERA_DUMP_FRM_INPUT_REPROCESS);
+    cam_dimension_t dim = {sizeof(metadata_buffer_t), 1};
+    cam_stream_buf_plane_info_t meta_planes;
+    rc = mm_stream_calc_offset_metadata(&dim, &mPaddingInfo, &meta_planes);
+    if (rc != 0) {
+        LOGE("Metadata stream plane info calculation failed!");
+        return rc;
+    }
+    uint32_t metaBufIdx;
+    {
+        Mutex::Autolock lock(mFreeOfflineMetaBuffersLock);
+        if (mFreeOfflineMetaBuffersList.empty()) {
+            LOGE("mFreeOfflineMetaBuffersList is null. Fatal");
+            return BAD_VALUE;
+        }
+
+        metaBufIdx = *(mFreeOfflineMetaBuffersList.begin());
+        mFreeOfflineMetaBuffersList.erase(mFreeOfflineMetaBuffersList.begin());
+        LOGD("erasing %d, mFreeOfflineMetaBuffersList.size %d", metaBufIdx,
+                mFreeOfflineMetaBuffersList.size());
+    }
+
+    mOfflineMetaMemory.markFrameNumber(metaBufIdx, frameNumber);
+
+    mm_camera_buf_def_t meta_buf;
+    cam_frame_len_offset_t offset = meta_planes.plane_info;
+    rc = mOfflineMetaMemory.getBufDef(offset, meta_buf, metaBufIdx);
+    if (NO_ERROR != rc) {
+        return rc;
+    }
+    memcpy(meta_buf.buffer, metadata, sizeof(metadata_buffer_t));
+    src_frame->metadata_buffer = meta_buf;
+    src_frame->reproc_config = *reproc_cfg;
+    src_frame->output_buffer = output_buffer;
+    src_frame->frameNumber = frameNumber;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : checkStreamCbErrors
+ *
+ * DESCRIPTION: check the stream callback for errors
+ *
+ * PARAMETERS :
+ *   @super_frame : the super frame with filled buffer
+ *   @stream      : stream on which the buffer was requested and filled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3ProcessingChannel::checkStreamCbErrors(mm_camera_super_buf_t *super_frame,
+        QCamera3Stream *stream)
+{
+    if (NULL == stream) {
+        LOGE("Invalid stream");
+        return BAD_VALUE;
+    }
+
+    if(NULL == super_frame) {
+         LOGE("Invalid Super buffer");
+         return BAD_VALUE;
+    }
+
+    if(super_frame->num_bufs != 1) {
+         LOGE("Multiple streams are not supported");
+         return BAD_VALUE;
+    }
+    if(NULL == super_frame->bufs[0]) {
+         LOGE("Error, Super buffer frame does not contain valid buffer");
+         return BAD_VALUE;
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamSize
+ *
+ * DESCRIPTION: get the size from the camera3_stream_t for the channel
+ *
+ * PARAMETERS :
+ *   @dim     : Return the size of the stream
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3ProcessingChannel::getStreamSize(cam_dimension_t &dim)
+{
+    if (mCamera3Stream) {
+        dim.width = mCamera3Stream->width;
+        dim.height = mCamera3Stream->height;
+        return NO_ERROR;
+    } else {
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamBufs
+ *
+ * DESCRIPTION: get the buffers allocated to the stream
+ *
+ * PARAMETERS :
+ * @len       : buffer length
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+QCamera3StreamMem* QCamera3ProcessingChannel::getStreamBufs(uint32_t /*len*/)
+{
+    KPI_ATRACE_CALL();
+    return &mMemory;
+}
+
+/*===========================================================================
+ * FUNCTION   : putStreamBufs
+ *
+ * DESCRIPTION: release the buffers allocated to the stream
+ *
+ * PARAMETERS : NONE
+ *
+ * RETURN     : NONE
+ *==========================================================================*/
+void QCamera3ProcessingChannel::putStreamBufs()
+{
+    mMemory.unregisterBuffers();
+
+    /* Reclaim all the offline metabuffers and push them to free list */
+    {
+        Mutex::Autolock lock(mFreeOfflineMetaBuffersLock);
+        mFreeOfflineMetaBuffersList.clear();
+        for (uint32_t i = 0; i < mOfflineMetaMemory.getCnt(); i++) {
+            mFreeOfflineMetaBuffersList.push_back(i);
+        }
+    }
+}
+
+
+/*===========================================================================
+ * FUNCTION   : stop
+ *
+ * DESCRIPTION: stop processing channel, which will stop all streams within,
+ *              including the reprocessing channel in postprocessor.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3ProcessingChannel::stop()
+{
+    int32_t rc = NO_ERROR;
+    if(!m_bIsActive) {
+        LOGE("Attempt to stop inactive channel");
+        return rc;
+    }
+
+    m_postprocessor.stop();
+    mPostProcStarted = false;
+    rc |= QCamera3Channel::stop();
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : startPostProc
+ *
+ * DESCRIPTION: figure out if the postprocessor needs to be restarted and if yes
+ *              start it
+ *
+ * PARAMETERS :
+ * @inputBufExists : whether there is an input buffer for post processing
+ * @config         : reprocessing configuration
+ * @metadata       : metadata associated with the reprocessing request
+ *
+ * RETURN     : NONE
+ *==========================================================================*/
+void QCamera3ProcessingChannel::startPostProc(const reprocess_config_t &config)
+{
+    if(!mPostProcStarted) {
+        m_postprocessor.start(config);
+        mPostProcStarted = true;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : queueReprocMetadata
+ *
+ * DESCRIPTION: queue the reprocess metadata to the postprocessor
+ *
+ * PARAMETERS : metadata : the metadata corresponding to the pp frame
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3ProcessingChannel::queueReprocMetadata(mm_camera_super_buf_t *metadata)
+{
+    return m_postprocessor.processPPMetadata(metadata);
+}
+
+/*===========================================================================
+ * FUNCTION : metadataBufDone
+ *
+ * DESCRIPTION: Buffer done method for a metadata buffer
+ *
+ * PARAMETERS :
+ * @recvd_frame : received metadata frame
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3ProcessingChannel::metadataBufDone(mm_camera_super_buf_t *recvd_frame)
+{
+    int32_t rc = NO_ERROR;;
+    if ((NULL == m_pMetaChannel) || (NULL == recvd_frame)) {
+        LOGE("Metadata channel or metadata buffer invalid");
+        return BAD_VALUE;
+    }
+
+    rc = ((QCamera3MetadataChannel*)m_pMetaChannel)->bufDone(recvd_frame);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION : translateStreamTypeAndFormat
+ *
+ * DESCRIPTION: translates the framework stream format into HAL stream type
+ *              and format
+ *
+ * PARAMETERS :
+ * @streamType   : translated stream type
+ * @streamFormat : translated stream format
+ * @stream       : fwk stream
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3ProcessingChannel::translateStreamTypeAndFormat(camera3_stream_t *stream,
+        cam_stream_type_t &streamType, cam_format_t &streamFormat)
+{
+    switch (stream->format) {
+        case HAL_PIXEL_FORMAT_YCbCr_420_888:
+            if(stream->stream_type == CAMERA3_STREAM_INPUT){
+                streamType = CAM_STREAM_TYPE_SNAPSHOT;
+                streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_SNAPSHOT);
+            } else {
+                streamType = CAM_STREAM_TYPE_CALLBACK;
+                streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_CALLBACK);
+            }
+            break;
+        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+            if (stream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) {
+                streamType = CAM_STREAM_TYPE_VIDEO;
+                streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_VIDEO);
+            } else if(stream->stream_type == CAMERA3_STREAM_INPUT ||
+                    stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
+                    IS_USAGE_ZSL(stream->usage)){
+                streamType = CAM_STREAM_TYPE_SNAPSHOT;
+                streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_SNAPSHOT);
+            } else {
+                streamType = CAM_STREAM_TYPE_PREVIEW;
+                streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_PREVIEW);
+            }
+            break;
+        case HAL_PIXEL_FORMAT_RAW_OPAQUE:
+        case HAL_PIXEL_FORMAT_RAW16:
+        case HAL_PIXEL_FORMAT_RAW10:
+            streamType = CAM_STREAM_TYPE_RAW;
+            streamFormat = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
+            break;
+        default:
+            return -EINVAL;
+    }
+    LOGD("fwk_format = %d, streamType = %d, streamFormat = %d",
+            stream->format, streamType, streamFormat);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION : setReprocConfig
+ *
+ * DESCRIPTION: sets the reprocessing parameters for the input buffer
+ *
+ * PARAMETERS :
+ * @reproc_cfg : the configuration to be set
+ * @pInputBuffer : pointer to the input buffer
+ * @metadata : pointer to the reprocessing metadata buffer
+ * @streamFormat : format of the input stream
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3ProcessingChannel::setReprocConfig(reprocess_config_t &reproc_cfg,
+        camera3_stream_buffer_t *pInputBuffer,
+        __unused metadata_buffer_t *metadata,
+        cam_format_t streamFormat, cam_dimension_t dim)
+{
+    int32_t rc = 0;
+    reproc_cfg.padding = &mPaddingInfo;
+    //to ensure a big enough buffer size set the height and width
+    //padding to max(height padding, width padding)
+    if (reproc_cfg.padding->height_padding > reproc_cfg.padding->width_padding) {
+       reproc_cfg.padding->width_padding = reproc_cfg.padding->height_padding;
+    } else {
+       reproc_cfg.padding->height_padding = reproc_cfg.padding->width_padding;
+    }
+    if (NULL != pInputBuffer) {
+        reproc_cfg.input_stream_dim.width = (int32_t)pInputBuffer->stream->width;
+        reproc_cfg.input_stream_dim.height = (int32_t)pInputBuffer->stream->height;
+    } else {
+        reproc_cfg.input_stream_dim.width = (int32_t)dim.width;
+        reproc_cfg.input_stream_dim.height = (int32_t)dim.height;
+    }
+    reproc_cfg.src_channel = this;
+    reproc_cfg.output_stream_dim.width = mCamera3Stream->width;
+    reproc_cfg.output_stream_dim.height = mCamera3Stream->height;
+    reproc_cfg.reprocess_type = getReprocessType();
+
+    //offset calculation
+    if (NULL != pInputBuffer) {
+        rc = translateStreamTypeAndFormat(pInputBuffer->stream,
+                reproc_cfg.stream_type, reproc_cfg.stream_format);
+        if (rc != NO_ERROR) {
+            LOGE("Stream format %d is not supported",
+                    pInputBuffer->stream->format);
+            return rc;
+        }
+    } else {
+        reproc_cfg.stream_type = mStreamType;
+        reproc_cfg.stream_format = streamFormat;
+    }
+
+    switch (reproc_cfg.stream_type) {
+        case CAM_STREAM_TYPE_PREVIEW:
+            if (getStreamByIndex(0) == NULL) {
+                LOGE("Could not find stream");
+                rc = -1;
+                break;
+            }
+            rc = mm_stream_calc_offset_preview(
+                    getStreamByIndex(0)->getStreamInfo(),
+                    &reproc_cfg.input_stream_dim,
+                    reproc_cfg.padding,
+                    &reproc_cfg.input_stream_plane_info);
+            break;
+        case CAM_STREAM_TYPE_VIDEO:
+            rc = mm_stream_calc_offset_video(reproc_cfg.stream_format,
+                    &reproc_cfg.input_stream_dim,
+                    &reproc_cfg.input_stream_plane_info);
+            break;
+        case CAM_STREAM_TYPE_RAW:
+            rc = mm_stream_calc_offset_raw(reproc_cfg.stream_format,
+                    &reproc_cfg.input_stream_dim,
+                    reproc_cfg.padding, &reproc_cfg.input_stream_plane_info);
+            break;
+        case CAM_STREAM_TYPE_SNAPSHOT:
+        case CAM_STREAM_TYPE_CALLBACK:
+        default:
+            rc = mm_stream_calc_offset_snapshot(streamFormat, &reproc_cfg.input_stream_dim,
+                    reproc_cfg.padding, &reproc_cfg.input_stream_plane_info);
+            break;
+    }
+    if (rc != 0) {
+        LOGE("Stream %d plane info calculation failed!", mStreamType);
+        return rc;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : reprocessCbRoutine
+ *
+ * DESCRIPTION: callback function for the reprocessed frame. This frame now
+ *              should be returned to the framework
+ *
+ * PARAMETERS :
+ * @resultBuffer      : buffer containing the reprocessed data
+ * @resultFrameNumber : frame number on which the buffer was requested
+ *
+ * RETURN     : NONE
+ *
+ *==========================================================================*/
+void QCamera3ProcessingChannel::reprocessCbRoutine(buffer_handle_t *resultBuffer,
+        uint32_t resultFrameNumber)
+{
+    ATRACE_CALL();
+    int rc = NO_ERROR;
+
+    rc = releaseOfflineMemory(resultFrameNumber);
+    if (NO_ERROR != rc) {
+        LOGE("Error releasing offline memory %d", rc);
+    }
+    /* Since reprocessing is done, send the callback to release the input buffer */
+    if (mChannelCB) {
+        mChannelCB(NULL, NULL, resultFrameNumber, true, mUserData);
+    }
+    issueChannelCb(resultBuffer, resultFrameNumber);
+
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : issueChannelCb
+ *
+ * DESCRIPTION: function to set the result and issue channel callback
+ *
+ * PARAMETERS :
+ * @resultBuffer      : buffer containing the data
+ * @resultFrameNumber : frame number on which the buffer was requested
+ *
+ * RETURN     : NONE
+ *
+ *
+ *==========================================================================*/
+void QCamera3ProcessingChannel::issueChannelCb(buffer_handle_t *resultBuffer,
+        uint32_t resultFrameNumber)
+{
+    camera3_stream_buffer_t result;
+    //Use below data to issue framework callback
+    result.stream = mCamera3Stream;
+    result.buffer = resultBuffer;
+    result.status = CAMERA3_BUFFER_STATUS_OK;
+    result.acquire_fence = -1;
+    result.release_fence = -1;
+
+    if (mChannelCB) {
+        mChannelCB(NULL, &result, resultFrameNumber, false, mUserData);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : showDebugFPS
+ *
+ * DESCRIPTION: Function to log the fps for preview, video, callback and raw
+ *              streams
+ *
+ * PARAMETERS : Stream type
+ *
+ * RETURN  : None
+ *==========================================================================*/
+void QCamera3ProcessingChannel::showDebugFPS(int32_t streamType)
+{
+    double fps = 0;
+    mFrameCount++;
+    nsecs_t now = systemTime();
+    nsecs_t diff = now - mLastFpsTime;
+    if (diff > ms2ns(250)) {
+        fps = (((double)(mFrameCount - mLastFrameCount)) *
+                (double)(s2ns(1))) / (double)diff;
+        switch(streamType) {
+            case CAM_STREAM_TYPE_PREVIEW:
+                LOGH("PROFILE_PREVIEW_FRAMES_PER_SECOND : %.4f: mFrameCount=%d",
+                         fps, mFrameCount);
+                break;
+            case CAM_STREAM_TYPE_VIDEO:
+                LOGH("PROFILE_VIDEO_FRAMES_PER_SECOND : %.4f",
+                         fps);
+                break;
+            case CAM_STREAM_TYPE_CALLBACK:
+                LOGH("PROFILE_CALLBACK_FRAMES_PER_SECOND : %.4f",
+                         fps);
+                break;
+            case CAM_STREAM_TYPE_RAW:
+                LOGH("PROFILE_RAW_FRAMES_PER_SECOND : %.4f",
+                         fps);
+                break;
+            default:
+                LOGH("logging not supported for the stream");
+                break;
+        }
+        mLastFpsTime = now;
+        mLastFrameCount = mFrameCount;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseOfflineMemory
+ *
+ * DESCRIPTION: function to clean up the offline memory used for input reprocess
+ *
+ * PARAMETERS :
+ * @resultFrameNumber : frame number on which the buffer was requested
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              non-zero failure code
+ *
+ *
+ *==========================================================================*/
+int32_t QCamera3ProcessingChannel::releaseOfflineMemory(uint32_t resultFrameNumber)
+{
+    int32_t rc = NO_ERROR;
+    int32_t inputBufIndex =
+            mOfflineMemory.getGrallocBufferIndex(resultFrameNumber);
+    if (0 <= inputBufIndex) {
+        rc = mOfflineMemory.unregisterBuffer(inputBufIndex);
+    } else {
+        LOGW("Could not find offline input buffer, resultFrameNumber %d",
+                 resultFrameNumber);
+    }
+    if (rc != NO_ERROR) {
+        LOGE("Failed to unregister offline input buffer");
+    }
+
+    int32_t metaBufIndex =
+            mOfflineMetaMemory.getHeapBufferIndex(resultFrameNumber);
+    if (0 <= metaBufIndex) {
+        Mutex::Autolock lock(mFreeOfflineMetaBuffersLock);
+        mFreeOfflineMetaBuffersList.push_back((uint32_t)metaBufIndex);
+    } else {
+        LOGW("Could not find offline meta buffer, resultFrameNumber %d",
+                resultFrameNumber);
+    }
+
+    return rc;
+}
+
+/* Regular Channel methods */
+/*===========================================================================
+ * FUNCTION   : QCamera3RegularChannel
+ *
+ * DESCRIPTION: constructor of QCamera3RegularChannel
+ *
+ * PARAMETERS :
+ *   @cam_handle : camera handle
+ *   @cam_ops    : ptr to camera ops table
+ *   @cb_routine : callback routine to frame aggregator
+ *   @stream     : camera3_stream_t structure
+ *   @stream_type: Channel stream type
+ *   @postprocess_mask: feature mask for postprocessing
+ *   @metadataChannel : metadata channel for the session
+ *   @numBuffers : number of max dequeued buffers
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3RegularChannel::QCamera3RegularChannel(uint32_t cam_handle,
+        uint32_t channel_handle,
+        mm_camera_ops_t *cam_ops,
+        channel_cb_routine cb_routine,
+        cam_padding_info_t *paddingInfo,
+        void *userData,
+        camera3_stream_t *stream,
+        cam_stream_type_t stream_type,
+        cam_feature_mask_t postprocess_mask,
+        QCamera3Channel *metadataChannel,
+        uint32_t numBuffers) :
+            QCamera3ProcessingChannel(cam_handle, channel_handle, cam_ops,
+                    cb_routine, paddingInfo, userData, stream, stream_type,
+                    postprocess_mask, metadataChannel, numBuffers),
+            mBatchSize(0),
+            mRotation(ROTATE_0)
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera3RegularChannel
+ *
+ * DESCRIPTION: destructor of QCamera3RegularChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3RegularChannel::~QCamera3RegularChannel()
+{
+    destroy();
+}
+
+/*===========================================================================
+ * FUNCTION   : initialize
+ *
+ * DESCRIPTION: Initialize and add camera channel & stream
+ *
+ * PARAMETERS :
+ *    @isType : type of image stabilization required on this stream
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+
+int32_t QCamera3RegularChannel::initialize(cam_is_type_t isType)
+{
+    ATRACE_CALL();
+    int32_t rc = NO_ERROR;
+
+    cam_dimension_t streamDim;
+
+    if (NULL == mCamera3Stream) {
+        LOGE("Camera stream uninitialized");
+        return NO_INIT;
+    }
+
+    if (1 <= m_numStreams) {
+        // Only one stream per channel supported in v3 Hal
+        return NO_ERROR;
+    }
+
+    mIsType  = isType;
+
+    rc = translateStreamTypeAndFormat(mCamera3Stream, mStreamType,
+            mStreamFormat);
+    if (rc != NO_ERROR) {
+        return -EINVAL;
+    }
+
+
+    if ((mStreamType == CAM_STREAM_TYPE_VIDEO) ||
+            (mStreamType == CAM_STREAM_TYPE_PREVIEW)) {
+        if ((mCamera3Stream->rotation != CAMERA3_STREAM_ROTATION_0) &&
+                ((mPostProcMask & CAM_QCOM_FEATURE_ROTATION) == 0)) {
+            LOGE("attempting rotation %d when rotation is disabled",
+                    mCamera3Stream->rotation);
+            return -EINVAL;
+        }
+
+        switch (mCamera3Stream->rotation) {
+            case CAMERA3_STREAM_ROTATION_0:
+                mRotation = ROTATE_0;
+                break;
+            case CAMERA3_STREAM_ROTATION_90: {
+                mRotation = ROTATE_90;
+                break;
+            }
+            case CAMERA3_STREAM_ROTATION_180:
+                mRotation = ROTATE_180;
+                break;
+            case CAMERA3_STREAM_ROTATION_270: {
+                mRotation = ROTATE_270;
+                break;
+            }
+            default:
+                LOGE("Unknown rotation: %d",
+                         mCamera3Stream->rotation);
+            return -EINVAL;
+        }
+    } else if (mCamera3Stream->rotation != CAMERA3_STREAM_ROTATION_0) {
+        LOGE("Rotation %d is not supported by stream type %d",
+                mCamera3Stream->rotation,
+                mStreamType);
+        return -EINVAL;
+    }
+
+    streamDim.width = mCamera3Stream->width;
+    streamDim.height = mCamera3Stream->height;
+
+    LOGD("batch size is %d", mBatchSize);
+    rc = QCamera3Channel::addStream(mStreamType,
+            mStreamFormat,
+            streamDim,
+            mRotation,
+            mNumBufs,
+            mPostProcMask,
+            mIsType,
+            mBatchSize);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setBatchSize
+ *
+ * DESCRIPTION: Set batch size for the channel.
+ *
+ * PARAMETERS :
+ *   @batchSize  : Number of image buffers in a batch
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success always
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3RegularChannel::setBatchSize(uint32_t batchSize)
+{
+    int32_t rc = NO_ERROR;
+
+    mBatchSize = batchSize;
+    LOGD("Batch size set: %d", mBatchSize);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamTypeMask
+ *
+ * DESCRIPTION: Get bit mask of all stream types in this channel.
+ *              If stream is not initialized, then generate mask based on
+ *              local streamType
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : Bit mask of all stream types in this channel
+ *==========================================================================*/
+uint32_t QCamera3RegularChannel::getStreamTypeMask()
+{
+    if (mStreams[0]) {
+        return QCamera3Channel::getStreamTypeMask();
+    } else {
+        return (1U << mStreamType);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : queueBatchBuf
+ *
+ * DESCRIPTION: queue batch container to downstream
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success always
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3RegularChannel::queueBatchBuf()
+{
+    int32_t rc = NO_ERROR;
+
+    if (mStreams[0]) {
+        rc = mStreams[0]->queueBatchBuf();
+    }
+    if (rc != NO_ERROR) {
+        LOGE("stream->queueBatchContainer failed");
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : request
+ *
+ * DESCRIPTION: process a request from camera service. Stream on if ncessary.
+ *
+ * PARAMETERS :
+ *   @buffer  : buffer to be filled for this request
+ *
+ * RETURN     : 0 on a success start of capture
+ *              -EINVAL on invalid input
+ *              -ENODEV on serious error
+ *==========================================================================*/
+int32_t QCamera3RegularChannel::request(buffer_handle_t *buffer, uint32_t frameNumber)
+{
+    ATRACE_CALL();
+    //FIX ME: Return buffer back in case of failures below.
+
+    int32_t rc = NO_ERROR;
+    int index;
+
+    if (NULL == buffer) {
+        LOGE("Invalid buffer in channel request");
+        return BAD_VALUE;
+    }
+
+    if(!m_bIsActive) {
+        rc = registerBuffer(buffer, mIsType);
+        if (NO_ERROR != rc) {
+            LOGE("On-the-fly buffer registration failed %d",
+                     rc);
+            return rc;
+        }
+
+        rc = start();
+        if (NO_ERROR != rc) {
+            return rc;
+        }
+    } else {
+        LOGD("Request on an existing stream");
+    }
+
+    index = mMemory.getMatchBufIndex((void*)buffer);
+    if(index < 0) {
+        rc = registerBuffer(buffer, mIsType);
+        if (NO_ERROR != rc) {
+            LOGE("On-the-fly buffer registration failed %d",
+                     rc);
+            return rc;
+        }
+
+        index = mMemory.getMatchBufIndex((void*)buffer);
+        if (index < 0) {
+            LOGE("Could not find object among registered buffers");
+            return DEAD_OBJECT;
+        }
+    }
+
+    rc = mStreams[0]->bufDone((uint32_t)index);
+    if(rc != NO_ERROR) {
+        LOGE("Failed to Q new buffer to stream");
+        return rc;
+    }
+
+    rc = mMemory.markFrameNumber((uint32_t)index, frameNumber);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getReprocessType
+ *
+ * DESCRIPTION: get the type of reprocess output supported by this channel
+ *
+ * PARAMETERS : NONE
+ *
+ * RETURN     : reprocess_type_t : type of reprocess
+ *==========================================================================*/
+reprocess_type_t QCamera3RegularChannel::getReprocessType()
+{
+    return REPROCESS_TYPE_PRIVATE;
+}
+
+
+QCamera3MetadataChannel::QCamera3MetadataChannel(uint32_t cam_handle,
+                    uint32_t channel_handle,
+                    mm_camera_ops_t *cam_ops,
+                    channel_cb_routine cb_routine,
+                    cam_padding_info_t *paddingInfo,
+                    cam_feature_mask_t postprocess_mask,
+                    void *userData, uint32_t numBuffers) :
+                        QCamera3Channel(cam_handle, channel_handle, cam_ops,
+                                cb_routine, paddingInfo, postprocess_mask,
+                                userData, numBuffers),
+                        mMemory(NULL)
+{
+}
+
+QCamera3MetadataChannel::~QCamera3MetadataChannel()
+{
+    destroy();
+
+    if (mMemory) {
+        mMemory->deallocate();
+        delete mMemory;
+        mMemory = NULL;
+    }
+}
+
+int32_t QCamera3MetadataChannel::initialize(cam_is_type_t isType)
+{
+    ATRACE_CALL();
+    int32_t rc;
+    cam_dimension_t streamDim;
+
+    if (mMemory || m_numStreams > 0) {
+        LOGE("metadata channel already initialized");
+        return -EINVAL;
+    }
+
+    streamDim.width = (int32_t)sizeof(metadata_buffer_t),
+    streamDim.height = 1;
+
+    mIsType = isType;
+    rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_METADATA, CAM_FORMAT_MAX,
+            streamDim, ROTATE_0, (uint8_t)mNumBuffers, mPostProcMask, mIsType);
+    if (rc < 0) {
+        LOGE("addStream failed");
+    }
+    return rc;
+}
+
+int32_t QCamera3MetadataChannel::request(buffer_handle_t * /*buffer*/,
+                                                uint32_t /*frameNumber*/)
+{
+    if (!m_bIsActive) {
+        return start();
+    }
+    else
+        return 0;
+}
+
+void QCamera3MetadataChannel::streamCbRoutine(
+                        mm_camera_super_buf_t *super_frame,
+                        QCamera3Stream * /*stream*/)
+{
+    ATRACE_NAME("metadata_stream_cb_routine");
+    uint32_t requestNumber = 0;
+    if (super_frame == NULL || super_frame->num_bufs != 1) {
+        LOGE("super_frame is not valid");
+        return;
+    }
+    if (mChannelCB) {
+        mChannelCB(super_frame, NULL, requestNumber, false, mUserData);
+    }
+}
+
+QCamera3StreamMem* QCamera3MetadataChannel::getStreamBufs(uint32_t len)
+{
+    int rc;
+    if (len < sizeof(metadata_buffer_t)) {
+        LOGE("Metadata buffer size less than structure %d vs %d",
+                len,
+                sizeof(metadata_buffer_t));
+        return NULL;
+    }
+    mMemory = new QCamera3StreamMem(MIN_STREAMING_BUFFER_NUM);
+    if (!mMemory) {
+        LOGE("unable to create metadata memory");
+        return NULL;
+    }
+    rc = mMemory->allocateAll(len);
+    if (rc < 0) {
+        LOGE("unable to allocate metadata memory");
+        delete mMemory;
+        mMemory = NULL;
+        return NULL;
+    }
+    clear_metadata_buffer((metadata_buffer_t*)mMemory->getPtr(0));
+    return mMemory;
+}
+
+void QCamera3MetadataChannel::putStreamBufs()
+{
+    mMemory->deallocate();
+    delete mMemory;
+    mMemory = NULL;
+}
+/*************************************************************************************/
+// RAW Channel related functions
+QCamera3RawChannel::QCamera3RawChannel(uint32_t cam_handle,
+                    uint32_t channel_handle,
+                    mm_camera_ops_t *cam_ops,
+                    channel_cb_routine cb_routine,
+                    cam_padding_info_t *paddingInfo,
+                    void *userData,
+                    camera3_stream_t *stream,
+                    cam_feature_mask_t postprocess_mask,
+                    QCamera3Channel *metadataChannel,
+                    bool raw_16, uint32_t numBuffers) :
+                        QCamera3RegularChannel(cam_handle, channel_handle, cam_ops,
+                                cb_routine, paddingInfo, userData, stream,
+                                CAM_STREAM_TYPE_RAW, postprocess_mask,
+                                metadataChannel, numBuffers),
+                        mIsRaw16(raw_16)
+{
+    char prop[PROPERTY_VALUE_MAX];
+    property_get("persist.camera.raw.debug.dump", prop, "0");
+    mRawDump = atoi(prop);
+}
+
+QCamera3RawChannel::~QCamera3RawChannel()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : initialize
+ *
+ * DESCRIPTION: Initialize and add camera channel & stream
+ *
+ * PARAMETERS :
+ * @isType    : image stabilization type on the stream
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+
+int32_t QCamera3RawChannel::initialize(cam_is_type_t isType)
+{
+    return QCamera3RegularChannel::initialize(isType);
+}
+
+void QCamera3RawChannel::streamCbRoutine(
+                        mm_camera_super_buf_t *super_frame,
+                        QCamera3Stream * stream)
+{
+    ATRACE_CALL();
+    /* Move this back down once verified */
+    if (mRawDump)
+        dumpRawSnapshot(super_frame->bufs[0]);
+
+    if (mIsRaw16) {
+        if (getStreamDefaultFormat(CAM_STREAM_TYPE_RAW) ==
+                CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG)
+            convertMipiToRaw16(super_frame->bufs[0]);
+        else
+            convertLegacyToRaw16(super_frame->bufs[0]);
+    }
+
+    //Make sure cache coherence because extra processing is done
+    mMemory.cleanInvalidateCache(super_frame->bufs[0]->buf_idx);
+
+    QCamera3RegularChannel::streamCbRoutine(super_frame, stream);
+    return;
+}
+
+void QCamera3RawChannel::dumpRawSnapshot(mm_camera_buf_def_t *frame)
+{
+   QCamera3Stream *stream = getStreamByIndex(0);
+   if (stream != NULL) {
+       char buf[FILENAME_MAX];
+       memset(buf, 0, sizeof(buf));
+       cam_dimension_t dim;
+       memset(&dim, 0, sizeof(dim));
+       stream->getFrameDimension(dim);
+
+       cam_frame_len_offset_t offset;
+       memset(&offset, 0, sizeof(cam_frame_len_offset_t));
+       stream->getFrameOffset(offset);
+       snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"r_%d_%dx%d.raw",
+                frame->frame_idx, offset.mp[0].stride, offset.mp[0].scanline);
+
+       int file_fd = open(buf, O_RDWR| O_CREAT, 0644);
+       if (file_fd >= 0) {
+          ssize_t written_len = write(file_fd, frame->buffer, frame->frame_len);
+          LOGD("written number of bytes %zd", written_len);
+          close(file_fd);
+       } else {
+          LOGE("failed to open file to dump image");
+       }
+   } else {
+       LOGE("Could not find stream");
+   }
+
+}
+
+void QCamera3RawChannel::convertLegacyToRaw16(mm_camera_buf_def_t *frame)
+{
+    // Convert image buffer from Opaque raw format to RAW16 format
+    // 10bit Opaque raw is stored in the format of:
+    // 0000 - p5 - p4 - p3 - p2 - p1 - p0
+    // where p0 to p5 are 6 pixels (each is 10bit)_and most significant
+    // 4 bits are 0s. Each 64bit word contains 6 pixels.
+
+  QCamera3Stream *stream = getStreamByIndex(0);
+  if (stream != NULL) {
+      cam_dimension_t dim;
+      memset(&dim, 0, sizeof(dim));
+      stream->getFrameDimension(dim);
+
+      cam_frame_len_offset_t offset;
+      memset(&offset, 0, sizeof(cam_frame_len_offset_t));
+      stream->getFrameOffset(offset);
+
+      uint32_t raw16_stride = ((uint32_t)dim.width + 15U) & ~15U;
+      uint16_t* raw16_buffer = (uint16_t *)frame->buffer;
+
+      // In-place format conversion.
+      // Raw16 format always occupy more memory than opaque raw10.
+      // Convert to Raw16 by iterating through all pixels from bottom-right
+      // to top-left of the image.
+      // One special notes:
+      // 1. Cross-platform raw16's stride is 16 pixels.
+      // 2. Opaque raw10's stride is 6 pixels, and aligned to 16 bytes.
+      for (int32_t ys = dim.height - 1; ys >= 0; ys--) {
+          uint32_t y = (uint32_t)ys;
+          uint64_t* row_start = (uint64_t *)frame->buffer +
+                  y * (uint32_t)offset.mp[0].stride_in_bytes / 8;
+          for (int32_t xs = dim.width - 1; xs >= 0; xs--) {
+              uint32_t x = (uint32_t)xs;
+              uint16_t raw16_pixel = 0x3FF & (row_start[x/6] >> (10*(x%6)));
+              raw16_buffer[y*raw16_stride+x] = raw16_pixel;
+          }
+      }
+  } else {
+      LOGE("Could not find stream");
+  }
+
+}
+
+void QCamera3RawChannel::convertMipiToRaw16(mm_camera_buf_def_t *frame)
+{
+    // Convert image buffer from mipi10 raw format to RAW16 format
+    // mipi10 opaque raw is stored in the format of:
+    // P3(1:0) P2(1:0) P1(1:0) P0(1:0) P3(9:2) P2(9:2) P1(9:2) P0(9:2)
+    // 4 pixels occupy 5 bytes, no padding needed
+
+    QCamera3Stream *stream = getStreamByIndex(0);
+    if (stream != NULL) {
+        cam_dimension_t dim;
+        memset(&dim, 0, sizeof(dim));
+        stream->getFrameDimension(dim);
+
+        cam_frame_len_offset_t offset;
+        memset(&offset, 0, sizeof(cam_frame_len_offset_t));
+        stream->getFrameOffset(offset);
+
+        uint32_t raw16_stride = ((uint32_t)dim.width + 15U) & ~15U;
+        uint16_t* raw16_buffer = (uint16_t *)frame->buffer;
+
+        // In-place format conversion.
+        // Raw16 format always occupy more memory than opaque raw10.
+        // Convert to Raw16 by iterating through all pixels from bottom-right
+        // to top-left of the image.
+        // One special notes:
+        // 1. Cross-platform raw16's stride is 16 pixels.
+        // 2. mipi raw10's stride is 4 pixels, and aligned to 16 bytes.
+        for (int32_t ys = dim.height - 1; ys >= 0; ys--) {
+            uint32_t y = (uint32_t)ys;
+            uint8_t* row_start = (uint8_t *)frame->buffer +
+                    y * (uint32_t)offset.mp[0].stride_in_bytes;
+            for (int32_t xs = dim.width - 1; xs >= 0; xs--) {
+                uint32_t x = (uint32_t)xs;
+                uint8_t upper_8bit = row_start[5*(x/4)+x%4];
+                uint8_t lower_2bit = ((row_start[5*(x/4)+4] >> (x%4)) & 0x3);
+                uint16_t raw16_pixel =
+                        (uint16_t)(((uint16_t)upper_8bit)<<2 |
+                        (uint16_t)lower_2bit);
+                raw16_buffer[y*raw16_stride+x] = raw16_pixel;
+            }
+        }
+    } else {
+        LOGE("Could not find stream");
+    }
+
+}
+
+/*===========================================================================
+ * FUNCTION   : getReprocessType
+ *
+ * DESCRIPTION: get the type of reprocess output supported by this channel
+ *
+ * PARAMETERS : NONE
+ *
+ * RETURN     : reprocess_type_t : type of reprocess
+ *==========================================================================*/
+reprocess_type_t QCamera3RawChannel::getReprocessType()
+{
+    return REPROCESS_TYPE_RAW;
+}
+
+
+/*************************************************************************************/
+// RAW Dump Channel related functions
+
+/*===========================================================================
+ * FUNCTION   : QCamera3RawDumpChannel
+ *
+ * DESCRIPTION: Constructor for RawDumpChannel
+ *
+ * PARAMETERS :
+ *   @cam_handle    : Handle for Camera
+ *   @cam_ops       : Function pointer table
+ *   @rawDumpSize   : Dimensions for the Raw stream
+ *   @paddinginfo   : Padding information for stream
+ *   @userData      : Cookie for parent
+ *   @pp mask       : PP feature mask for this stream
+ *   @numBuffers    : number of max dequeued buffers
+ *
+ * RETURN           : NA
+ *==========================================================================*/
+QCamera3RawDumpChannel::QCamera3RawDumpChannel(uint32_t cam_handle,
+                    uint32_t channel_handle,
+                    mm_camera_ops_t *cam_ops,
+                    cam_dimension_t rawDumpSize,
+                    cam_padding_info_t *paddingInfo,
+                    void *userData,
+                    cam_feature_mask_t postprocess_mask, uint32_t numBuffers) :
+                        QCamera3Channel(cam_handle, channel_handle, cam_ops, NULL,
+                                paddingInfo, postprocess_mask,
+                                userData, numBuffers),
+                        mDim(rawDumpSize),
+                        mMemory(NULL)
+{
+    char prop[PROPERTY_VALUE_MAX];
+    property_get("persist.camera.raw.dump", prop, "0");
+    mRawDump = atoi(prop);
+}
+
+/*===========================================================================
+ * FUNCTION   : QCamera3RawDumpChannel
+ *
+ * DESCRIPTION: Destructor for RawDumpChannel
+ *
+ * PARAMETERS :
+ *
+ * RETURN           : NA
+ *==========================================================================*/
+
+QCamera3RawDumpChannel::~QCamera3RawDumpChannel()
+{
+    destroy();
+}
+
+/*===========================================================================
+ * FUNCTION   : dumpRawSnapshot
+ *
+ * DESCRIPTION: Helper function to dump Raw frames
+ *
+ * PARAMETERS :
+ *  @frame      : stream buf frame to be dumped
+ *
+ *  RETURN      : NA
+ *==========================================================================*/
+void QCamera3RawDumpChannel::dumpRawSnapshot(mm_camera_buf_def_t *frame)
+{
+    QCamera3Stream *stream = getStreamByIndex(0);
+    if (stream != NULL) {
+        char buf[FILENAME_MAX];
+        struct timeval tv;
+        struct tm timeinfo_data;
+        struct tm *timeinfo;
+
+        cam_dimension_t dim;
+        memset(&dim, 0, sizeof(dim));
+        stream->getFrameDimension(dim);
+
+        cam_frame_len_offset_t offset;
+        memset(&offset, 0, sizeof(cam_frame_len_offset_t));
+        stream->getFrameOffset(offset);
+
+        gettimeofday(&tv, NULL);
+        timeinfo = localtime_r(&tv.tv_sec, &timeinfo_data);
+
+        if (NULL != timeinfo) {
+            memset(buf, 0, sizeof(buf));
+            snprintf(buf, sizeof(buf),
+                    QCAMERA_DUMP_FRM_LOCATION
+                    "%04d-%02d-%02d-%02d-%02d-%02d-%06ld_%d_%dx%d.raw",
+                    timeinfo->tm_year + 1900, timeinfo->tm_mon + 1,
+                    timeinfo->tm_mday, timeinfo->tm_hour,
+                    timeinfo->tm_min, timeinfo->tm_sec,tv.tv_usec,
+                    frame->frame_idx, dim.width, dim.height);
+
+            int file_fd = open(buf, O_RDWR| O_CREAT, 0777);
+            if (file_fd >= 0) {
+                ssize_t written_len =
+                        write(file_fd, frame->buffer, offset.frame_len);
+                LOGD("written number of bytes %zd", written_len);
+                close(file_fd);
+            } else {
+                LOGE("failed to open file to dump image");
+            }
+        } else {
+            LOGE("localtime_r() error");
+        }
+    } else {
+        LOGE("Could not find stream");
+    }
+
+}
+
+/*===========================================================================
+ * FUNCTION   : streamCbRoutine
+ *
+ * DESCRIPTION: Callback routine invoked for each frame generated for
+ *              Rawdump channel
+ *
+ * PARAMETERS :
+ *   @super_frame  : stream buf frame generated
+ *   @stream       : Underlying Stream object cookie
+ *
+ * RETURN          : NA
+ *==========================================================================*/
+void QCamera3RawDumpChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
+                                                __unused QCamera3Stream *stream)
+{
+    LOGD("E");
+    if (super_frame == NULL || super_frame->num_bufs != 1) {
+        LOGE("super_frame is not valid");
+        return;
+    }
+
+    if (mRawDump)
+        dumpRawSnapshot(super_frame->bufs[0]);
+
+    bufDone(super_frame);
+    free(super_frame);
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamBufs
+ *
+ * DESCRIPTION: Callback function provided to interface to get buffers.
+ *
+ * PARAMETERS :
+ *   @len       : Length of each buffer to be allocated
+ *
+ * RETURN     : NULL on buffer allocation failure
+ *              QCamera3StreamMem object on sucess
+ *==========================================================================*/
+QCamera3StreamMem* QCamera3RawDumpChannel::getStreamBufs(uint32_t len)
+{
+    int rc;
+    mMemory = new QCamera3StreamMem(mNumBuffers);
+
+    if (!mMemory) {
+        LOGE("unable to create heap memory");
+        return NULL;
+    }
+    rc = mMemory->allocateAll((size_t)len);
+    if (rc < 0) {
+        LOGE("unable to allocate heap memory");
+        delete mMemory;
+        mMemory = NULL;
+        return NULL;
+    }
+    return mMemory;
+}
+
+/*===========================================================================
+ * FUNCTION   : putStreamBufs
+ *
+ * DESCRIPTION: Callback function provided to interface to return buffers.
+ *              Although no handles are actually returned, implicitl assumption
+ *              that interface will no longer use buffers and channel can
+ *              deallocated if necessary.
+ *
+ * PARAMETERS : NA
+ *
+ * RETURN     : NA
+ *==========================================================================*/
+void QCamera3RawDumpChannel::putStreamBufs()
+{
+    mMemory->deallocate();
+    delete mMemory;
+    mMemory = NULL;
+}
+
+/*===========================================================================
+ * FUNCTION : request
+ *
+ * DESCRIPTION: Request function used as trigger
+ *
+ * PARAMETERS :
+ * @recvd_frame : buffer- this will be NULL since this is internal channel
+ * @frameNumber : Undefined again since this is internal stream
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3RawDumpChannel::request(buffer_handle_t * /*buffer*/,
+                                                uint32_t /*frameNumber*/)
+{
+    if (!m_bIsActive) {
+        return QCamera3Channel::start();
+    }
+    else
+        return 0;
+}
+
+/*===========================================================================
+ * FUNCTION : intialize
+ *
+ * DESCRIPTION: Initializes channel params and creates underlying stream
+ *
+ * PARAMETERS :
+ *    @isType : type of image stabilization required on this stream
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3RawDumpChannel::initialize(cam_is_type_t isType)
+{
+    int32_t rc;
+
+    mIsType = isType;
+    rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_RAW,
+        CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG, mDim, ROTATE_0, (uint8_t)mNumBuffers,
+        mPostProcMask, mIsType);
+    if (rc < 0) {
+        LOGE("addStream failed");
+    }
+    return rc;
+}
+/*************************************************************************************/
+
+/* QCamera3YUVChannel methods */
+
+/*===========================================================================
+ * FUNCTION   : QCamera3YUVChannel
+ *
+ * DESCRIPTION: constructor of QCamera3YUVChannel
+ *
+ * PARAMETERS :
+ *   @cam_handle : camera handle
+ *   @cam_ops    : ptr to camera ops table
+ *   @cb_routine : callback routine to frame aggregator
+ *   @paddingInfo : padding information for the stream
+ *   @stream     : camera3_stream_t structure
+ *   @stream_type: Channel stream type
+ *   @postprocess_mask: the postprocess mask for streams of this channel
+ *   @metadataChannel: handle to the metadataChannel
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3YUVChannel::QCamera3YUVChannel(uint32_t cam_handle,
+        uint32_t channel_handle,
+        mm_camera_ops_t *cam_ops,
+        channel_cb_routine cb_routine,
+        cam_padding_info_t *paddingInfo,
+        void *userData,
+        camera3_stream_t *stream,
+        cam_stream_type_t stream_type,
+        cam_feature_mask_t postprocess_mask,
+        QCamera3Channel *metadataChannel) :
+            QCamera3ProcessingChannel(cam_handle, channel_handle, cam_ops,
+                    cb_routine, paddingInfo, userData, stream, stream_type,
+                    postprocess_mask, metadataChannel)
+{
+
+    mBypass = (postprocess_mask == CAM_QCOM_FEATURE_NONE);
+    mFrameLen = 0;
+    mEdgeMode.edge_mode = CAM_EDGE_MODE_OFF;
+    mEdgeMode.sharpness = 0;
+    mNoiseRedMode = CAM_NOISE_REDUCTION_MODE_OFF;
+    memset(&mCropRegion, 0, sizeof(mCropRegion));
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera3YUVChannel
+ *
+ * DESCRIPTION: destructor of QCamera3YUVChannel
+ *
+ * PARAMETERS : none
+ *
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3YUVChannel::~QCamera3YUVChannel()
+{
+   // Deallocation of heap buffers allocated in mMemory is freed
+   // automatically by its destructor
+}
+
+/*===========================================================================
+ * FUNCTION   : initialize
+ *
+ * DESCRIPTION: Initialize and add camera channel & stream
+ *
+ * PARAMETERS :
+ * @isType    : the image stabilization type
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3YUVChannel::initialize(cam_is_type_t isType)
+{
+    ATRACE_CALL();
+    int32_t rc = NO_ERROR;
+    cam_dimension_t streamDim;
+
+    if (NULL == mCamera3Stream) {
+        LOGE("Camera stream uninitialized");
+        return NO_INIT;
+    }
+
+    if (1 <= m_numStreams) {
+        // Only one stream per channel supported in v3 Hal
+        return NO_ERROR;
+    }
+
+    mIsType  = isType;
+    mStreamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_CALLBACK);
+    streamDim.width = mCamera3Stream->width;
+    streamDim.height = mCamera3Stream->height;
+
+    rc = QCamera3Channel::addStream(mStreamType,
+            mStreamFormat,
+            streamDim,
+            ROTATE_0,
+            mNumBufs,
+            mPostProcMask,
+            mIsType);
+    if (rc < 0) {
+        LOGE("addStream failed");
+        return rc;
+    }
+
+    cam_stream_buf_plane_info_t buf_planes;
+    cam_padding_info_t paddingInfo = mPaddingInfo;
+
+    memset(&buf_planes, 0, sizeof(buf_planes));
+    //to ensure a big enough buffer size set the height and width
+    //padding to max(height padding, width padding)
+    paddingInfo.width_padding = MAX(paddingInfo.width_padding, paddingInfo.height_padding);
+    paddingInfo.height_padding = paddingInfo.width_padding;
+
+    rc = mm_stream_calc_offset_snapshot(mStreamFormat, &streamDim, &paddingInfo,
+            &buf_planes);
+    if (rc < 0) {
+        LOGE("mm_stream_calc_offset_preview failed");
+        return rc;
+    }
+
+    mFrameLen = buf_planes.plane_info.frame_len;
+
+    if (NO_ERROR != rc) {
+        LOGE("Initialize failed, rc = %d", rc);
+        return rc;
+    }
+
+    /* initialize offline meta memory for input reprocess */
+    rc = QCamera3ProcessingChannel::initialize(isType);
+    if (NO_ERROR != rc) {
+        LOGE("Processing Channel initialize failed, rc = %d",
+                 rc);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : request
+ *
+ * DESCRIPTION: entry function for a request on a YUV stream. This function
+ *              has the logic to service a request based on its type
+ *
+ * PARAMETERS :
+ * @buffer          : pointer to the output buffer
+ * @frameNumber     : frame number of the request
+ * @pInputBuffer    : pointer to input buffer if an input request
+ * @metadata        : parameters associated with the request
+ *
+ * RETURN     : 0 on a success start of capture
+ *              -EINVAL on invalid input
+ *              -ENODEV on serious error
+ *==========================================================================*/
+int32_t QCamera3YUVChannel::request(buffer_handle_t *buffer,
+        uint32_t frameNumber,
+        camera3_stream_buffer_t* pInputBuffer,
+        metadata_buffer_t* metadata, bool &needMetadata)
+{
+    int32_t rc = NO_ERROR;
+    Mutex::Autolock lock(mOfflinePpLock);
+
+    LOGD("pInputBuffer is %p frame number %d", pInputBuffer, frameNumber);
+    if (NULL == buffer || NULL == metadata) {
+        LOGE("Invalid buffer/metadata in channel request");
+        return BAD_VALUE;
+    }
+
+    PpInfo ppInfo;
+    memset(&ppInfo, 0, sizeof(ppInfo));
+    ppInfo.frameNumber = frameNumber;
+    ppInfo.offlinePpFlag = false;
+    if (mBypass && !pInputBuffer ) {
+        ppInfo.offlinePpFlag = needsFramePostprocessing(metadata);
+        ppInfo.output = buffer;
+        mOfflinePpInfoList.push_back(ppInfo);
+    }
+
+    LOGD("offlinePpFlag is %d", ppInfo.offlinePpFlag);
+    needMetadata = ppInfo.offlinePpFlag;
+    if (!ppInfo.offlinePpFlag) {
+        // regular request
+        return QCamera3ProcessingChannel::request(buffer, frameNumber,
+                pInputBuffer, metadata);
+    } else {
+        if(!m_bIsActive) {
+            rc = start();
+            if (NO_ERROR != rc)
+                return rc;
+        } else {
+            LOGD("Request on an existing stream");
+        }
+
+        //we need to send this frame through the CPP
+        //Allocate heap memory, then buf done on the buffer
+        uint32_t bufIdx;
+        if (mFreeHeapBufferList.empty()) {
+            rc = mMemory.allocateOne(mFrameLen);
+            if (rc < 0) {
+                LOGE("Failed allocating heap buffer. Fatal");
+                return BAD_VALUE;
+            } else {
+                bufIdx = (uint32_t)rc;
+            }
+        } else {
+            bufIdx = *(mFreeHeapBufferList.begin());
+            mFreeHeapBufferList.erase(mFreeHeapBufferList.begin());
+        }
+
+        /* Configure and start postproc if necessary */
+        reprocess_config_t reproc_cfg;
+        cam_dimension_t dim;
+        memset(&reproc_cfg, 0, sizeof(reprocess_config_t));
+        memset(&dim, 0, sizeof(dim));
+        mStreams[0]->getFrameDimension(dim);
+        setReprocConfig(reproc_cfg, NULL, metadata, mStreamFormat, dim);
+
+        // Start postprocessor without input buffer
+        startPostProc(reproc_cfg);
+
+        LOGD("erasing %d", bufIdx);
+
+        mMemory.markFrameNumber(bufIdx, frameNumber);
+        mStreams[0]->bufDone(bufIdx);
+
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : streamCbRoutine
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS :
+ * @super_frame : the super frame with filled buffer
+ * @stream      : stream on which the buffer was requested and filled
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3YUVChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
+        QCamera3Stream *stream)
+{
+    ATRACE_CALL();
+    uint8_t frameIndex;
+    int32_t resultFrameNumber;
+
+    if (checkStreamCbErrors(super_frame, stream) != NO_ERROR) {
+        LOGE("Error with the stream callback");
+        return;
+    }
+
+    frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
+    if(frameIndex >= mNumBufs) {
+         LOGE("Error, Invalid index for buffer");
+         stream->bufDone(frameIndex);
+         return;
+    }
+
+    if (mBypass) {
+        List<PpInfo>::iterator ppInfo;
+
+        Mutex::Autolock lock(mOfflinePpLock);
+        resultFrameNumber = mMemory.getFrameNumber(frameIndex);
+        for (ppInfo = mOfflinePpInfoList.begin();
+                ppInfo != mOfflinePpInfoList.end(); ppInfo++) {
+            if (ppInfo->frameNumber == (uint32_t)resultFrameNumber) {
+                break;
+            }
+        }
+        LOGD("frame index %d, frame number %d", frameIndex, resultFrameNumber);
+        //check the reprocessing required flag against the frame number
+        if (ppInfo == mOfflinePpInfoList.end()) {
+            LOGE("Error, request for frame number is a reprocess.");
+            stream->bufDone(frameIndex);
+            return;
+        }
+
+        if (ppInfo->offlinePpFlag) {
+            mm_camera_super_buf_t *frame =
+                    (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+            if (frame == NULL) {
+                LOGE("Error allocating memory to save received_frame structure.");
+                if(stream) {
+                    stream->bufDone(frameIndex);
+                }
+                return;
+            }
+
+            *frame = *super_frame;
+            m_postprocessor.processData(frame, ppInfo->output, resultFrameNumber);
+            free(super_frame);
+            return;
+        } else {
+            if (ppInfo != mOfflinePpInfoList.begin()) {
+                // There is pending reprocess buffer, cache current buffer
+                if (ppInfo->callback_buffer != NULL) {
+                    LOGE("Fatal: cached callback_buffer is already present");
+                }
+                ppInfo->callback_buffer = super_frame;
+                return;
+            } else {
+                mOfflinePpInfoList.erase(ppInfo);
+            }
+        }
+    }
+
+    QCamera3ProcessingChannel::streamCbRoutine(super_frame, stream);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : reprocessCbRoutine
+ *
+ * DESCRIPTION: callback function for the reprocessed frame. This frame now
+ *              should be returned to the framework. This same callback is
+ *              used during input reprocessing or offline postprocessing
+ *
+ * PARAMETERS :
+ * @resultBuffer      : buffer containing the reprocessed data
+ * @resultFrameNumber : frame number on which the buffer was requested
+ *
+ * RETURN     : NONE
+ *
+ *==========================================================================*/
+void QCamera3YUVChannel::reprocessCbRoutine(buffer_handle_t *resultBuffer,
+        uint32_t resultFrameNumber)
+{
+    LOGD("E: frame number %d", resultFrameNumber);
+    Vector<mm_camera_super_buf_t *> pendingCbs;
+
+    /* release the input buffer and input metadata buffer if used */
+    if (0 > mMemory.getHeapBufferIndex(resultFrameNumber)) {
+        /* mOfflineMemory and mOfflineMetaMemory used only for input reprocessing */
+        int32_t rc = releaseOfflineMemory(resultFrameNumber);
+        if (NO_ERROR != rc) {
+            LOGE("Error releasing offline memory rc = %d", rc);
+        }
+        /* Since reprocessing is done, send the callback to release the input buffer */
+        if (mChannelCB) {
+            mChannelCB(NULL, NULL, resultFrameNumber, true, mUserData);
+        }
+    }
+
+    if (mBypass) {
+        int32_t rc = handleOfflinePpCallback(resultFrameNumber, pendingCbs);
+        if (rc != NO_ERROR) {
+            return;
+        }
+    }
+
+    issueChannelCb(resultBuffer, resultFrameNumber);
+
+    // Call all pending callbacks to return buffers
+    for (size_t i = 0; i < pendingCbs.size(); i++) {
+        QCamera3ProcessingChannel::streamCbRoutine(
+                pendingCbs[i], mStreams[0]);
+    }
+
+}
+
+/*===========================================================================
+ * FUNCTION   : needsFramePostprocessing
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS :
+ *
+ * RETURN     :
+ *  TRUE if frame needs to be postprocessed
+ *  FALSE is frame does not need to be postprocessed
+ *
+ *==========================================================================*/
+bool QCamera3YUVChannel::needsFramePostprocessing(metadata_buffer_t *meta)
+{
+    bool ppNeeded = false;
+
+    //sharpness
+    IF_META_AVAILABLE(cam_edge_application_t, edgeMode,
+            CAM_INTF_META_EDGE_MODE, meta) {
+        mEdgeMode = *edgeMode;
+    }
+
+    //wnr
+    IF_META_AVAILABLE(uint32_t, noiseRedMode,
+            CAM_INTF_META_NOISE_REDUCTION_MODE, meta) {
+        mNoiseRedMode = *noiseRedMode;
+    }
+
+    //crop region
+    IF_META_AVAILABLE(cam_crop_region_t, scalerCropRegion,
+            CAM_INTF_META_SCALER_CROP_REGION, meta) {
+        mCropRegion = *scalerCropRegion;
+    }
+
+    if ((CAM_EDGE_MODE_OFF != mEdgeMode.edge_mode) &&
+            (CAM_EDGE_MODE_ZERO_SHUTTER_LAG != mEdgeMode.edge_mode)) {
+        ppNeeded = true;
+    }
+    if ((CAM_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG != mNoiseRedMode) &&
+            (CAM_NOISE_REDUCTION_MODE_OFF != mNoiseRedMode) &&
+            (CAM_NOISE_REDUCTION_MODE_MINIMAL != mNoiseRedMode)) {
+        ppNeeded = true;
+    }
+    if ((mCropRegion.width < (int32_t)mCamera3Stream->width) ||
+            (mCropRegion.height < (int32_t)mCamera3Stream->height)) {
+        ppNeeded = true;
+    }
+
+    return ppNeeded;
+}
+
+/*===========================================================================
+ * FUNCTION   : handleOfflinePpCallback
+ *
+ * DESCRIPTION: callback function for the reprocessed frame from offline
+ *              postprocessing.
+ *
+ * PARAMETERS :
+ * @resultFrameNumber : frame number on which the buffer was requested
+ * @pendingCbs        : pending buffers to be returned first
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3YUVChannel::handleOfflinePpCallback(uint32_t resultFrameNumber,
+            Vector<mm_camera_super_buf_t *>& pendingCbs)
+{
+    Mutex::Autolock lock(mOfflinePpLock);
+    List<PpInfo>::iterator ppInfo;
+
+    for (ppInfo = mOfflinePpInfoList.begin();
+            ppInfo != mOfflinePpInfoList.end(); ppInfo++) {
+        if (ppInfo->frameNumber == resultFrameNumber) {
+            break;
+        }
+    }
+
+    if (ppInfo == mOfflinePpInfoList.end()) {
+        LOGI("Request of frame number %d is reprocessing",
+                 resultFrameNumber);
+        return NO_ERROR;
+    } else if (ppInfo != mOfflinePpInfoList.begin()) {
+        LOGE("callback for frame number %d should be head of list",
+                 resultFrameNumber);
+        return BAD_VALUE;
+    }
+
+    if (ppInfo->offlinePpFlag) {
+        // Need to get the input buffer frame index from the
+        // mMemory object and add that to the free heap buffers list.
+        int32_t bufferIndex =
+                mMemory.getHeapBufferIndex(resultFrameNumber);
+        if (bufferIndex < 0) {
+            LOGE("Fatal %d: no buffer index for frame number %d",
+                     bufferIndex, resultFrameNumber);
+            return BAD_VALUE;
+        }
+        mFreeHeapBufferList.push_back(bufferIndex);
+        ppInfo = mOfflinePpInfoList.erase(ppInfo);
+
+        // Return pending buffer callbacks
+        while (ppInfo != mOfflinePpInfoList.end() &&
+                !ppInfo->offlinePpFlag && ppInfo->callback_buffer) {
+
+            // Call stream callbacks for cached buffers
+            pendingCbs.push_back(ppInfo->callback_buffer);
+
+            ppInfo = mOfflinePpInfoList.erase(ppInfo);
+        }
+
+    } else {
+        LOGE("Fatal: request of frame number %d doesn't need"
+                " offline postprocessing. However there is"
+                " reprocessing callback.",
+                resultFrameNumber);
+        return BAD_VALUE;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getReprocessType
+ *
+ * DESCRIPTION: get the type of reprocess output supported by this channel
+ *
+ * PARAMETERS : NONE
+ *
+ * RETURN     : reprocess_type_t : type of reprocess
+ *==========================================================================*/
+reprocess_type_t QCamera3YUVChannel::getReprocessType()
+{
+    return REPROCESS_TYPE_YUV;
+}
+
+/* QCamera3PicChannel methods */
+
+/*===========================================================================
+ * FUNCTION   : jpegEvtHandle
+ *
+ * DESCRIPTION: Function registerd to mm-jpeg-interface to handle jpeg events.
+                Construct result payload and call mChannelCb to deliver buffer
+                to framework.
+ *
+ * PARAMETERS :
+ *   @status    : status of jpeg job
+ *   @client_hdl: jpeg client handle
+ *   @jobId     : jpeg job Id
+ *   @p_ouput   : ptr to jpeg output result struct
+ *   @userdata  : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3PicChannel::jpegEvtHandle(jpeg_job_status_t status,
+                                              uint32_t /*client_hdl*/,
+                                              uint32_t jobId,
+                                              mm_jpeg_output_t *p_output,
+                                              void *userdata)
+{
+    ATRACE_CALL();
+    buffer_handle_t *resultBuffer = NULL;
+    buffer_handle_t *jpegBufferHandle = NULL;
+    int resultStatus = CAMERA3_BUFFER_STATUS_OK;
+    camera3_stream_buffer_t result;
+    camera3_jpeg_blob_t jpegHeader;
+
+    KPI_ATRACE_INT("SNAPSHOT", 0);
+    QCamera3PicChannel *obj = (QCamera3PicChannel *)userdata;
+    if (obj) {
+        //Construct payload for process_capture_result. Call mChannelCb
+
+        qcamera_hal3_jpeg_data_t *job = obj->m_postprocessor.findJpegJobByJobId(jobId);
+
+        if ((job == NULL) || (status == JPEG_JOB_STATUS_ERROR)) {
+            LOGE("Error in jobId: (%d) with status: %d", jobId, status);
+            resultStatus = CAMERA3_BUFFER_STATUS_ERROR;
+        }
+
+        if (NULL != job) {
+            uint32_t bufIdx = (uint32_t)job->jpeg_settings->out_buf_index;
+            LOGD("jpeg out_buf_index: %d", bufIdx);
+
+            //Construct jpeg transient header of type camera3_jpeg_blob_t
+            //Append at the end of jpeg image of buf_filled_len size
+
+            jpegHeader.jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
+            if (JPEG_JOB_STATUS_DONE == status) {
+                jpegHeader.jpeg_size = (uint32_t)p_output->buf_filled_len;
+                char* jpeg_buf = (char *)p_output->buf_vaddr;
+
+                ssize_t maxJpegSize = -1;
+
+                // Gralloc buffer may have additional padding for 4K page size
+                // Follow size guidelines based on spec since framework relies
+                // on that to reach end of buffer and with it the header
+
+                //Handle same as resultBuffer, but for readablity
+                jpegBufferHandle =
+                        (buffer_handle_t *)obj->mMemory.getBufferHandle(bufIdx);
+
+                if (NULL != jpegBufferHandle) {
+                    maxJpegSize = ((private_handle_t*)(*jpegBufferHandle))->width;
+                    if (maxJpegSize > obj->mMemory.getSize(bufIdx)) {
+                        maxJpegSize = obj->mMemory.getSize(bufIdx);
+                    }
+
+                    size_t jpeg_eof_offset =
+                            (size_t)(maxJpegSize - (ssize_t)sizeof(jpegHeader));
+                    char *jpeg_eof = &jpeg_buf[jpeg_eof_offset];
+                    memcpy(jpeg_eof, &jpegHeader, sizeof(jpegHeader));
+                    obj->mMemory.cleanInvalidateCache(bufIdx);
+                } else {
+                    LOGE("JPEG buffer not found and index: %d",
+                            bufIdx);
+                    resultStatus = CAMERA3_BUFFER_STATUS_ERROR;
+                }
+            }
+
+            ////Use below data to issue framework callback
+            resultBuffer =
+                    (buffer_handle_t *)obj->mMemory.getBufferHandle(bufIdx);
+            int32_t resultFrameNumber = obj->mMemory.getFrameNumber(bufIdx);
+            int32_t rc = obj->mMemory.unregisterBuffer(bufIdx);
+            if (NO_ERROR != rc) {
+                LOGE("Error %d unregistering stream buffer %d",
+                     rc, bufIdx);
+            }
+
+            result.stream = obj->mCamera3Stream;
+            result.buffer = resultBuffer;
+            result.status = resultStatus;
+            result.acquire_fence = -1;
+            result.release_fence = -1;
+
+            // Release any snapshot buffers before calling
+            // the user callback. The callback can potentially
+            // unblock pending requests to snapshot stream.
+            int32_t snapshotIdx = -1;
+            mm_camera_super_buf_t* src_frame = NULL;
+
+            if (job->src_reproc_frame)
+                src_frame = job->src_reproc_frame;
+            else
+                src_frame = job->src_frame;
+
+            if (src_frame) {
+                if (obj->mStreams[0]->getMyHandle() ==
+                        src_frame->bufs[0]->stream_id) {
+                    snapshotIdx = (int32_t)src_frame->bufs[0]->buf_idx;
+                } else {
+                    LOGE("Snapshot stream id %d and source frame %d don't match!",
+                             obj->mStreams[0]->getMyHandle(),
+                            src_frame->bufs[0]->stream_id);
+                }
+            }
+            if (0 <= snapshotIdx) {
+                Mutex::Autolock lock(obj->mFreeBuffersLock);
+                obj->mFreeBufferList.push_back((uint32_t)snapshotIdx);
+            } else {
+                LOGE("Snapshot buffer not found!");
+            }
+
+            LOGI("Issue Jpeg Callback frameNumber = %d status = %d",
+                    resultFrameNumber, resultStatus);
+            if (obj->mChannelCB) {
+                obj->mChannelCB(NULL,
+                        &result,
+                        (uint32_t)resultFrameNumber,
+                        false,
+                        obj->mUserData);
+            }
+
+            // release internal data for jpeg job
+            if ((NULL != job->fwk_frame) || (NULL != job->fwk_src_buffer)) {
+                /* unregister offline input buffer */
+                int32_t inputBufIndex =
+                        obj->mOfflineMemory.getGrallocBufferIndex((uint32_t)resultFrameNumber);
+                if (0 <= inputBufIndex) {
+                    rc = obj->mOfflineMemory.unregisterBuffer(inputBufIndex);
+                } else {
+                    LOGE("could not find the input buf index, frame number %d",
+                             resultFrameNumber);
+                }
+                if (NO_ERROR != rc) {
+                    LOGE("Error %d unregistering input buffer %d",
+                             rc, bufIdx);
+                }
+
+                /* unregister offline meta buffer */
+                int32_t metaBufIndex =
+                        obj->mOfflineMetaMemory.getHeapBufferIndex((uint32_t)resultFrameNumber);
+                if (0 <= metaBufIndex) {
+                    Mutex::Autolock lock(obj->mFreeOfflineMetaBuffersLock);
+                    obj->mFreeOfflineMetaBuffersList.push_back((uint32_t)metaBufIndex);
+                } else {
+                    LOGE("could not find the input meta buf index, frame number %d",
+                             resultFrameNumber);
+                }
+            }
+            obj->m_postprocessor.releaseOfflineBuffers(false);
+            obj->m_postprocessor.releaseJpegJobData(job);
+            free(job);
+        }
+
+        return;
+        // }
+    } else {
+        LOGE("Null userdata in jpeg callback");
+    }
+}
+
+QCamera3PicChannel::QCamera3PicChannel(uint32_t cam_handle,
+                    uint32_t channel_handle,
+                    mm_camera_ops_t *cam_ops,
+                    channel_cb_routine cb_routine,
+                    cam_padding_info_t *paddingInfo,
+                    void *userData,
+                    camera3_stream_t *stream,
+                    cam_feature_mask_t postprocess_mask,
+                    __unused bool is4KVideo,
+                    bool isInputStreamConfigured,
+                    QCamera3Channel *metadataChannel,
+                    uint32_t numBuffers) :
+                        QCamera3ProcessingChannel(cam_handle, channel_handle,
+                                cam_ops, cb_routine, paddingInfo, userData,
+                                stream, CAM_STREAM_TYPE_SNAPSHOT,
+                                postprocess_mask, metadataChannel, numBuffers),
+                        mNumSnapshotBufs(0),
+                        mInputBufferHint(isInputStreamConfigured),
+                        mYuvMemory(NULL),
+                        mFrameLen(0)
+{
+    QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)mUserData;
+    m_max_pic_dim = hal_obj->calcMaxJpegDim();
+    mYuvWidth = stream->width;
+    mYuvHeight = stream->height;
+    mStreamType = CAM_STREAM_TYPE_SNAPSHOT;
+    // Use same pixelformat for 4K video case
+    mStreamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_SNAPSHOT);
+    int32_t rc = m_postprocessor.initJpeg(jpegEvtHandle, &m_max_pic_dim, this);
+    if (rc != 0) {
+        LOGE("Init Postprocessor failed");
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : flush
+ *
+ * DESCRIPTION: flush pic channel, which will stop all processing within, including
+ *              the reprocessing channel in postprocessor and YUV stream.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3PicChannel::flush()
+{
+    int32_t rc = NO_ERROR;
+    if(!m_bIsActive) {
+        LOGE("Attempt to flush inactive channel");
+        return NO_INIT;
+    }
+
+    rc = m_postprocessor.flush();
+    if (rc == 0) {
+        LOGE("Postprocessor flush failed, rc = %d", rc);
+        return rc;
+    }
+
+    if (0 < mOfflineMetaMemory.getCnt()) {
+        mOfflineMetaMemory.deallocate();
+    }
+    if (0 < mOfflineMemory.getCnt()) {
+        mOfflineMemory.unregisterBuffers();
+    }
+    Mutex::Autolock lock(mFreeBuffersLock);
+    mFreeBufferList.clear();
+    for (uint32_t i = 0; i < mCamera3Stream->max_buffers; i++) {
+        mFreeBufferList.push_back(i);
+    }
+    return rc;
+}
+
+
+QCamera3PicChannel::~QCamera3PicChannel()
+{
+}
+
+int32_t QCamera3PicChannel::initialize(cam_is_type_t isType)
+{
+    int32_t rc = NO_ERROR;
+    cam_dimension_t streamDim;
+    cam_stream_type_t streamType;
+    cam_format_t streamFormat;
+
+    if (NULL == mCamera3Stream) {
+        LOGE("Camera stream uninitialized");
+        return NO_INIT;
+    }
+
+    if (1 <= m_numStreams) {
+        // Only one stream per channel supported in v3 Hal
+        return NO_ERROR;
+    }
+
+    mIsType = isType;
+    streamType = mStreamType;
+    streamFormat = mStreamFormat;
+    streamDim.width = (int32_t)mYuvWidth;
+    streamDim.height = (int32_t)mYuvHeight;
+
+    mNumSnapshotBufs = mCamera3Stream->max_buffers;
+    rc = QCamera3Channel::addStream(streamType, streamFormat, streamDim,
+            ROTATE_0, (uint8_t)mCamera3Stream->max_buffers, mPostProcMask,
+            mIsType);
+
+    if (NO_ERROR != rc) {
+        LOGE("Initialize failed, rc = %d", rc);
+        return rc;
+    }
+
+    /* initialize offline meta memory for input reprocess */
+    rc = QCamera3ProcessingChannel::initialize(isType);
+    if (NO_ERROR != rc) {
+        LOGE("Processing Channel initialize failed, rc = %d",
+                 rc);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : request
+ *
+ * DESCRIPTION: handle the request - either with an input buffer or a direct
+ *              output request
+ *
+ * PARAMETERS :
+ * @buffer       : pointer to the output buffer
+ * @frameNumber  : frame number of the request
+ * @pInputBuffer : pointer to input buffer if an input request
+ * @metadata     : parameters associated with the request
+ *
+ * RETURN     : 0 on a success start of capture
+ *              -EINVAL on invalid input
+ *              -ENODEV on serious error
+ *==========================================================================*/
+int32_t QCamera3PicChannel::request(buffer_handle_t *buffer,
+        uint32_t frameNumber,
+        camera3_stream_buffer_t *pInputBuffer,
+        metadata_buffer_t *metadata)
+{
+    ATRACE_CALL();
+    //FIX ME: Return buffer back in case of failures below.
+
+    int32_t rc = NO_ERROR;
+
+    reprocess_config_t reproc_cfg;
+    cam_dimension_t dim;
+    memset(&reproc_cfg, 0, sizeof(reprocess_config_t));
+    //make sure to set the correct input stream dim in case of YUV size override
+    //and recalculate the plane info
+    dim.width = (int32_t)mYuvWidth;
+    dim.height = (int32_t)mYuvHeight;
+    setReprocConfig(reproc_cfg, pInputBuffer, metadata, mStreamFormat, dim);
+
+    // Picture stream has already been started before any request comes in
+    if (!m_bIsActive) {
+        LOGE("Channel not started!!");
+        return NO_INIT;
+    }
+
+    int index = mMemory.getMatchBufIndex((void*)buffer);
+
+    if(index < 0) {
+        rc = registerBuffer(buffer, mIsType);
+        if (NO_ERROR != rc) {
+            LOGE("On-the-fly buffer registration failed %d",
+                     rc);
+            return rc;
+        }
+
+        index = mMemory.getMatchBufIndex((void*)buffer);
+        if (index < 0) {
+            LOGE("Could not find object among registered buffers");
+            return DEAD_OBJECT;
+        }
+    }
+    LOGD("buffer index %d, frameNumber: %u", index, frameNumber);
+
+    rc = mMemory.markFrameNumber((uint32_t)index, frameNumber);
+
+    // Start postprocessor
+    startPostProc(reproc_cfg);
+
+    // Queue jpeg settings
+    rc = queueJpegSetting((uint32_t)index, metadata);
+
+    if (pInputBuffer == NULL) {
+        Mutex::Autolock lock(mFreeBuffersLock);
+        uint32_t bufIdx;
+        if (mFreeBufferList.empty()) {
+            rc = mYuvMemory->allocateOne(mFrameLen);
+            if (rc < 0) {
+                LOGE("Failed to allocate heap buffer. Fatal");
+                return rc;
+            } else {
+                bufIdx = (uint32_t)rc;
+            }
+        } else {
+            List<uint32_t>::iterator it = mFreeBufferList.begin();
+            bufIdx = *it;
+            mFreeBufferList.erase(it);
+        }
+        mYuvMemory->markFrameNumber(bufIdx, frameNumber);
+        mStreams[0]->bufDone(bufIdx);
+    } else {
+        qcamera_fwk_input_pp_data_t *src_frame = NULL;
+        src_frame = (qcamera_fwk_input_pp_data_t *)calloc(1,
+                sizeof(qcamera_fwk_input_pp_data_t));
+        if (src_frame == NULL) {
+            LOGE("No memory for src frame");
+            return NO_MEMORY;
+        }
+        rc = setFwkInputPPData(src_frame, pInputBuffer, &reproc_cfg, metadata,
+                NULL /*fwk output buffer*/, frameNumber);
+        if (NO_ERROR != rc) {
+            LOGE("Error %d while setting framework input PP data", rc);
+            free(src_frame);
+            return rc;
+        }
+        LOGH("Post-process started");
+        m_postprocessor.processData(src_frame);
+    }
+    return rc;
+}
+
+
+
+/*===========================================================================
+ * FUNCTION   : dataNotifyCB
+ *
+ * DESCRIPTION: Channel Level callback used for super buffer data notify.
+ *              This function is registered with mm-camera-interface to handle
+ *              data notify
+ *
+ * PARAMETERS :
+ *   @recvd_frame   : stream frame received
+ *   userdata       : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3PicChannel::dataNotifyCB(mm_camera_super_buf_t *recvd_frame,
+                                 void *userdata)
+{
+    ATRACE_CALL();
+    LOGD("E\n");
+    QCamera3PicChannel *channel = (QCamera3PicChannel *)userdata;
+
+    if (channel == NULL) {
+        LOGE("invalid channel pointer");
+        return;
+    }
+
+    if(channel->m_numStreams != 1) {
+        LOGE("Error: Bug: This callback assumes one stream per channel");
+        return;
+    }
+
+
+    if(channel->mStreams[0] == NULL) {
+        LOGE("Error: Invalid Stream object");
+        return;
+    }
+
+    channel->QCamera3PicChannel::streamCbRoutine(recvd_frame, channel->mStreams[0]);
+
+    LOGD("X\n");
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : streamCbRoutine
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS :
+ * @super_frame : the super frame with filled buffer
+ * @stream      : stream on which the buffer was requested and filled
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3PicChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
+                            QCamera3Stream *stream)
+{
+    ATRACE_CALL();
+    //TODO
+    //Used only for getting YUV. Jpeg callback will be sent back from channel
+    //directly to HWI. Refer to func jpegEvtHandle
+
+    //Got the yuv callback. Calling yuv callback handler in PostProc
+    uint8_t frameIndex;
+    mm_camera_super_buf_t* frame = NULL;
+    cam_dimension_t dim;
+    cam_frame_len_offset_t offset;
+
+    memset(&dim, 0, sizeof(dim));
+    memset(&offset, 0, sizeof(cam_frame_len_offset_t));
+
+    if (checkStreamCbErrors(super_frame, stream) != NO_ERROR) {
+        LOGE("Error with the stream callback");
+        return;
+    }
+
+    frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
+    LOGD("recvd buf_idx: %u for further processing",
+         (uint32_t)frameIndex);
+    if(frameIndex >= mNumSnapshotBufs) {
+         LOGE("Error, Invalid index for buffer");
+         if(stream) {
+             Mutex::Autolock lock(mFreeBuffersLock);
+             mFreeBufferList.push_back(frameIndex);
+             stream->bufDone(frameIndex);
+         }
+         return;
+    }
+
+    frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+    if (frame == NULL) {
+       LOGE("Error allocating memory to save received_frame structure.");
+       if(stream) {
+           Mutex::Autolock lock(mFreeBuffersLock);
+           mFreeBufferList.push_back(frameIndex);
+           stream->bufDone(frameIndex);
+       }
+       return;
+    }
+    *frame = *super_frame;
+    stream->getFrameDimension(dim);
+    stream->getFrameOffset(offset);
+    dumpYUV(frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_INPUT_REPROCESS);
+
+    m_postprocessor.processData(frame);
+    free(super_frame);
+    return;
+}
+
+QCamera3StreamMem* QCamera3PicChannel::getStreamBufs(uint32_t len)
+{
+    mYuvMemory = new QCamera3StreamMem(mCamera3Stream->max_buffers, false);
+    if (!mYuvMemory) {
+        LOGE("unable to create metadata memory");
+        return NULL;
+    }
+    mFrameLen = len;
+
+    return mYuvMemory;
+}
+
+void QCamera3PicChannel::putStreamBufs()
+{
+    QCamera3ProcessingChannel::putStreamBufs();
+
+    mYuvMemory->deallocate();
+    delete mYuvMemory;
+    mYuvMemory = NULL;
+    mFreeBufferList.clear();
+}
+
+int32_t QCamera3PicChannel::queueJpegSetting(uint32_t index, metadata_buffer_t *metadata)
+{
+    QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)mUserData;
+    jpeg_settings_t *settings =
+            (jpeg_settings_t *)malloc(sizeof(jpeg_settings_t));
+
+    if (!settings) {
+        LOGE("out of memory allocating jpeg_settings");
+        return -ENOMEM;
+    }
+
+    memset(settings, 0, sizeof(jpeg_settings_t));
+
+    settings->out_buf_index = index;
+
+    settings->jpeg_orientation = 0;
+    IF_META_AVAILABLE(int32_t, orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
+        settings->jpeg_orientation = *orientation;
+    }
+
+    settings->jpeg_quality = 85;
+    IF_META_AVAILABLE(uint32_t, quality1, CAM_INTF_META_JPEG_QUALITY, metadata) {
+        settings->jpeg_quality = (uint8_t) *quality1;
+    }
+
+    IF_META_AVAILABLE(uint32_t, quality2, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
+        settings->jpeg_thumb_quality = (uint8_t) *quality2;
+    }
+
+    IF_META_AVAILABLE(cam_dimension_t, dimension, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
+        settings->thumbnail_size = *dimension;
+    }
+
+    settings->gps_timestamp_valid = 0;
+    IF_META_AVAILABLE(int64_t, timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
+        settings->gps_timestamp = *timestamp;
+        settings->gps_timestamp_valid = 1;
+    }
+
+    settings->gps_coordinates_valid = 0;
+    IF_META_AVAILABLE(double, coordinates, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
+        memcpy(settings->gps_coordinates, coordinates, 3*sizeof(double));
+        settings->gps_coordinates_valid = 1;
+    }
+
+    IF_META_AVAILABLE(uint8_t, proc_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
+        memset(settings->gps_processing_method, 0,
+                sizeof(settings->gps_processing_method));
+        strlcpy(settings->gps_processing_method, (const char *)proc_methods,
+                sizeof(settings->gps_processing_method));
+    }
+
+    // Image description
+    const char *eepromVersion = hal_obj->getEepromVersionInfo();
+    const uint32_t *ldafCalib = hal_obj->getLdafCalib();
+    if ((eepromVersion && strlen(eepromVersion)) ||
+            ldafCalib) {
+        int len = 0;
+        settings->image_desc_valid = true;
+        if (eepromVersion && strlen(eepromVersion)) {
+            len = snprintf(settings->image_desc, sizeof(settings->image_desc),
+                    "M:%s ", eepromVersion);
+        }
+        if (ldafCalib) {
+            snprintf(settings->image_desc + len,
+                    sizeof(settings->image_desc) - len, "L:%u-%u",
+                    ldafCalib[0], ldafCalib[1]);
+        }
+    }
+
+    return m_postprocessor.processJpegSettingData(settings);
+}
+
+
+void QCamera3PicChannel::overrideYuvSize(uint32_t width, uint32_t height)
+{
+   mYuvWidth = width;
+   mYuvHeight = height;
+}
+
+/*===========================================================================
+ * FUNCTION   : getReprocessType
+ *
+ * DESCRIPTION: get the type of reprocess output supported by this channel
+ *
+ * PARAMETERS : NONE
+ *
+ * RETURN     : reprocess_type_t : type of reprocess
+ *==========================================================================*/
+reprocess_type_t QCamera3PicChannel::getReprocessType()
+{
+    /* a picture channel could either use the postprocessor for reprocess+jpeg
+       or only for reprocess */
+    reprocess_type_t expectedReprocess;
+    if (mPostProcMask == CAM_QCOM_FEATURE_NONE || mInputBufferHint) {
+        expectedReprocess = REPROCESS_TYPE_JPEG;
+    } else {
+        expectedReprocess = REPROCESS_TYPE_NONE;
+    }
+    LOGH("expectedReprocess from Pic Channel is %d", expectedReprocess);
+    return expectedReprocess;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : QCamera3ReprocessChannel
+ *
+ * DESCRIPTION: constructor of QCamera3ReprocessChannel
+ *
+ * PARAMETERS :
+ *   @cam_handle : camera handle
+ *   @cam_ops    : ptr to camera ops table
+ *   @pp_mask    : post-proccess feature mask
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3ReprocessChannel::QCamera3ReprocessChannel(uint32_t cam_handle,
+                                                 uint32_t channel_handle,
+                                                 mm_camera_ops_t *cam_ops,
+                                                 channel_cb_routine cb_routine,
+                                                 cam_padding_info_t *paddingInfo,
+                                                 cam_feature_mask_t postprocess_mask,
+                                                 void *userData, void *ch_hdl) :
+    /* In case of framework reprocessing, pproc and jpeg operations could be
+     * parallelized by allowing 1 extra buffer for reprocessing output:
+     * ch_hdl->getNumBuffers() + 1 */
+    QCamera3Channel(cam_handle, channel_handle, cam_ops, cb_routine, paddingInfo,
+                    postprocess_mask, userData,
+                    ((QCamera3ProcessingChannel *)ch_hdl)->getNumBuffers()
+                              + (MAX_REPROCESS_PIPELINE_STAGES - 1)),
+    inputChHandle(ch_hdl),
+    mOfflineBuffersIndex(-1),
+    mFrameLen(0),
+    mReprocessType(REPROCESS_TYPE_NONE),
+    m_pSrcChannel(NULL),
+    m_pMetaChannel(NULL),
+    mMemory(NULL),
+    mGrallocMemory(0),
+    mReprocessPerfMode(false)
+{
+    memset(mSrcStreamHandles, 0, sizeof(mSrcStreamHandles));
+    mOfflineBuffersIndex = mNumBuffers -1;
+    mOfflineMetaIndex = (int32_t) (2*mNumBuffers -1);
+}
+
+
+/*===========================================================================
+ * FUNCTION   : QCamera3ReprocessChannel
+ *
+ * DESCRIPTION: constructor of QCamera3ReprocessChannel
+ *
+ * PARAMETERS :
+ *   @cam_handle : camera handle
+ *   @cam_ops    : ptr to camera ops table
+ *   @pp_mask    : post-proccess feature mask
+ *
+ * RETURN     : none
+ *==========================================================================*/
+int32_t QCamera3ReprocessChannel::initialize(cam_is_type_t isType)
+{
+    int32_t rc = NO_ERROR;
+    mm_camera_channel_attr_t attr;
+
+    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
+    attr.max_unmatched_frames = 1;
+
+    m_handle = m_camOps->add_channel(m_camHandle,
+                                      &attr,
+                                      NULL,
+                                      this);
+    if (m_handle == 0) {
+        LOGE("Add channel failed");
+        return UNKNOWN_ERROR;
+    }
+
+    mIsType = isType;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : registerBuffer
+ *
+ * DESCRIPTION: register streaming buffer to the channel object
+ *
+ * PARAMETERS :
+ *   @buffer     : buffer to be registered
+ *   @isType     : the image stabilization type for the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3ReprocessChannel::registerBuffer(buffer_handle_t *buffer,
+        cam_is_type_t isType)
+{
+    ATRACE_CALL();
+    int rc = 0;
+    mIsType = isType;
+    cam_stream_type_t streamType;
+
+    if (buffer == NULL) {
+        LOGE("Error: Cannot register a NULL buffer");
+        return BAD_VALUE;
+    }
+
+    if ((uint32_t)mGrallocMemory.getCnt() > (mNumBuffers - 1)) {
+        LOGE("Trying to register more buffers than initially requested");
+        return BAD_VALUE;
+    }
+
+    if (0 == m_numStreams) {
+        rc = initialize(mIsType);
+        if (rc != NO_ERROR) {
+            LOGE("Couldn't initialize camera stream %d",
+                     rc);
+            return rc;
+        }
+    }
+
+    streamType = mStreams[0]->getMyType();
+    rc = mGrallocMemory.registerBuffer(buffer, streamType);
+    if (ALREADY_EXISTS == rc) {
+        return NO_ERROR;
+    } else if (NO_ERROR != rc) {
+        LOGE("Buffer %p couldn't be registered %d", buffer, rc);
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCamera3ReprocessChannel
+ *
+ * DESCRIPTION: constructor of QCamera3ReprocessChannel
+ *
+ * PARAMETERS :
+ *   @cam_handle : camera handle
+ *   @cam_ops    : ptr to camera ops table
+ *   @pp_mask    : post-proccess feature mask
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3ReprocessChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
+                                  QCamera3Stream *stream)
+{
+    //Got the pproc data callback. Now send to jpeg encoding
+    uint8_t frameIndex;
+    uint32_t resultFrameNumber;
+    mm_camera_super_buf_t* frame = NULL;
+    QCamera3ProcessingChannel *obj = (QCamera3ProcessingChannel *)inputChHandle;
+    cam_dimension_t dim;
+    cam_frame_len_offset_t offset;
+
+    memset(&dim, 0, sizeof(dim));
+    memset(&offset, 0, sizeof(cam_frame_len_offset_t));
+    if(!super_frame) {
+         LOGE("Invalid Super buffer");
+         return;
+    }
+
+    if(super_frame->num_bufs != 1) {
+         LOGE("Multiple streams are not supported");
+         return;
+    }
+    if(super_frame->bufs[0] == NULL ) {
+         LOGE("Error, Super buffer frame does not contain valid buffer");
+         return;
+    }
+    frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
+
+
+    if (mReprocessType == REPROCESS_TYPE_JPEG) {
+        resultFrameNumber =  mMemory->getFrameNumber(frameIndex);
+        frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+        if (frame == NULL) {
+           LOGE("Error allocating memory to save received_frame structure.");
+           if(stream) {
+               stream->bufDone(frameIndex);
+           }
+           return;
+        }
+        LOGI("bufIndex: %u recvd from post proc",
+                 (uint32_t)frameIndex);
+        *frame = *super_frame;
+
+        stream->getFrameDimension(dim);
+        stream->getFrameOffset(offset);
+        dumpYUV(frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_SNAPSHOT);
+        /* Since reprocessing is done, send the callback to release the input buffer */
+        if (mChannelCB) {
+            mChannelCB(NULL, NULL, resultFrameNumber, true, mUserData);
+        }
+        obj->m_postprocessor.processPPData(frame);
+    } else {
+        buffer_handle_t *resultBuffer;
+        frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
+        resultBuffer = (buffer_handle_t *)mGrallocMemory.getBufferHandle(frameIndex);
+        resultFrameNumber = mGrallocMemory.getFrameNumber(frameIndex);
+        int32_t rc = stream->bufRelease(frameIndex);
+        if (NO_ERROR != rc) {
+            LOGE("Error %d releasing stream buffer %d",
+                     rc, frameIndex);
+        }
+        rc = mGrallocMemory.unregisterBuffer(frameIndex);
+        if (NO_ERROR != rc) {
+            LOGE("Error %d unregistering stream buffer %d",
+                     rc, frameIndex);
+        }
+        obj->reprocessCbRoutine(resultBuffer, resultFrameNumber);
+
+        obj->m_postprocessor.releaseOfflineBuffers(false);
+        qcamera_hal3_pp_data_t *pp_job = obj->m_postprocessor.dequeuePPJob(resultFrameNumber);
+        if (pp_job != NULL) {
+            obj->m_postprocessor.releasePPJobData(pp_job);
+        }
+        free(pp_job);
+        resetToCamPerfNormal(resultFrameNumber);
+    }
+    free(super_frame);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : resetToCamPerfNormal
+ *
+ * DESCRIPTION: Set the perf mode to normal if all the priority frames
+ *              have been reprocessed
+ *
+ * PARAMETERS :
+ *      @frameNumber: Frame number of the reprocess completed frame
+ *
+ * RETURN     : QCamera3StreamMem *
+ *==========================================================================*/
+int32_t QCamera3ReprocessChannel::resetToCamPerfNormal(uint32_t frameNumber)
+{
+    int32_t rc = NO_ERROR;
+    bool resetToPerfNormal = false;
+    {
+        Mutex::Autolock lock(mPriorityFramesLock);
+        /* remove the priority frame number from the list */
+        for (size_t i = 0; i < mPriorityFrames.size(); i++) {
+            if (mPriorityFrames[i] == frameNumber) {
+                mPriorityFrames.removeAt(i);
+            }
+        }
+        /* reset the perf mode if pending priority frame list is empty */
+        if (mReprocessPerfMode && mPriorityFrames.empty()) {
+            resetToPerfNormal = true;
+        }
+    }
+    if (resetToPerfNormal) {
+        QCamera3Stream *pStream = mStreams[0];
+        cam_stream_parm_buffer_t param;
+        memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+
+        param.type = CAM_STREAM_PARAM_TYPE_REQUEST_OPS_MODE;
+        param.perf_mode = CAM_PERF_NORMAL;
+        rc = pStream->setParameter(param);
+        {
+            Mutex::Autolock lock(mPriorityFramesLock);
+            mReprocessPerfMode = false;
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamBufs
+ *
+ * DESCRIPTION: register the buffers of the reprocess channel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : QCamera3StreamMem *
+ *==========================================================================*/
+QCamera3StreamMem* QCamera3ReprocessChannel::getStreamBufs(uint32_t len)
+{
+    if (mReprocessType == REPROCESS_TYPE_JPEG) {
+        mMemory = new QCamera3StreamMem(mNumBuffers, false);
+        if (!mMemory) {
+            LOGE("unable to create reproc memory");
+            return NULL;
+        }
+        mFrameLen = len;
+        return mMemory;
+    }
+    return &mGrallocMemory;
+}
+
+/*===========================================================================
+ * FUNCTION   : putStreamBufs
+ *
+ * DESCRIPTION: release the reprocess channel buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     :
+ *==========================================================================*/
+void QCamera3ReprocessChannel::putStreamBufs()
+{
+   if (mReprocessType == REPROCESS_TYPE_JPEG) {
+       mMemory->deallocate();
+       delete mMemory;
+       mMemory = NULL;
+       mFreeBufferList.clear();
+   } else {
+       mGrallocMemory.unregisterBuffers();
+   }
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera3ReprocessChannel
+ *
+ * DESCRIPTION: destructor of QCamera3ReprocessChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3ReprocessChannel::~QCamera3ReprocessChannel()
+{
+    destroy();
+
+    if (m_handle) {
+        m_camOps->delete_channel(m_camHandle, m_handle);
+        LOGD("deleting channel %d", m_handle);
+        m_handle = 0;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : start
+ *
+ * DESCRIPTION: start reprocess channel.
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3ReprocessChannel::start()
+{
+    ATRACE_CALL();
+    int32_t rc = NO_ERROR;
+
+    rc = QCamera3Channel::start();
+
+    if (rc == NO_ERROR) {
+       rc = m_camOps->start_channel(m_camHandle, m_handle);
+
+       // Check failure
+       if (rc != NO_ERROR) {
+           LOGE("start_channel failed %d", rc);
+           QCamera3Channel::stop();
+       }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop
+ *
+ * DESCRIPTION: stop reprocess channel.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3ReprocessChannel::stop()
+{
+    ATRACE_CALL();
+    int32_t rc = NO_ERROR;
+
+    rc = QCamera3Channel::stop();
+    rc |= m_camOps->stop_channel(m_camHandle, m_handle);
+    // Unmapping the buffers
+    unmapOfflineBuffers(true);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamBySrcHandle
+ *
+ * DESCRIPTION: find reprocess stream by its source stream handle
+ *
+ * PARAMETERS :
+ *   @srcHandle : source stream handle
+ *
+ * RETURN     : ptr to reprocess stream if found. NULL if not found
+ *==========================================================================*/
+QCamera3Stream * QCamera3ReprocessChannel::getStreamBySrcHandle(uint32_t srcHandle)
+{
+    QCamera3Stream *pStream = NULL;
+
+    for (uint32_t i = 0; i < m_numStreams; i++) {
+        if (mSrcStreamHandles[i] == srcHandle) {
+            pStream = mStreams[i];
+            break;
+        }
+    }
+    return pStream;
+}
+
+/*===========================================================================
+ * FUNCTION   : getSrcStreamBySrcHandle
+ *
+ * DESCRIPTION: find source stream by source stream handle
+ *
+ * PARAMETERS :
+ *   @srcHandle : source stream handle
+ *
+ * RETURN     : ptr to reprocess stream if found. NULL if not found
+ *==========================================================================*/
+QCamera3Stream * QCamera3ReprocessChannel::getSrcStreamBySrcHandle(uint32_t srcHandle)
+{
+    QCamera3Stream *pStream = NULL;
+
+    if (NULL == m_pSrcChannel) {
+        return NULL;
+    }
+
+    for (uint32_t i = 0; i < m_numStreams; i++) {
+        if (mSrcStreamHandles[i] == srcHandle) {
+            pStream = m_pSrcChannel->getStreamByIndex(i);
+            break;
+        }
+    }
+    return pStream;
+}
+
+/*===========================================================================
+ * FUNCTION   : unmapOfflineBuffers
+ *
+ * DESCRIPTION: Unmaps offline buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3ReprocessChannel::unmapOfflineBuffers(bool all)
+{
+    int rc = NO_ERROR;
+    if (!mOfflineBuffers.empty()) {
+        QCamera3Stream *stream = NULL;
+        List<OfflineBuffer>::iterator it = mOfflineBuffers.begin();
+        for (; it != mOfflineBuffers.end(); it++) {
+           stream = (*it).stream;
+           if (NULL != stream) {
+               rc = stream->unmapBuf((*it).type,
+                                     (*it).index,
+                                        -1);
+               if (NO_ERROR != rc) {
+                   LOGE("Error during offline buffer unmap %d",
+                          rc);
+               }
+               LOGD("Unmapped buffer with index %d", (*it).index);
+           }
+           if (!all) {
+               mOfflineBuffers.erase(it);
+               break;
+           }
+        }
+        if (all) {
+           mOfflineBuffers.clear();
+        }
+    }
+
+    if (!mOfflineMetaBuffers.empty()) {
+        QCamera3Stream *stream = NULL;
+        List<OfflineBuffer>::iterator it = mOfflineMetaBuffers.begin();
+        for (; it != mOfflineMetaBuffers.end(); it++) {
+           stream = (*it).stream;
+           if (NULL != stream) {
+               rc = stream->unmapBuf((*it).type,
+                                     (*it).index,
+                                        -1);
+               if (NO_ERROR != rc) {
+                   LOGE("Error during offline buffer unmap %d",
+                          rc);
+               }
+               LOGD("Unmapped meta buffer with index %d", (*it).index);
+           }
+           if (!all) {
+               mOfflineMetaBuffers.erase(it);
+               break;
+           }
+        }
+        if (all) {
+           mOfflineMetaBuffers.clear();
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : bufDone
+ *
+ * DESCRIPTION: Return reprocess stream buffer to free buffer list.
+ *              Note that this function doesn't queue buffer back to kernel.
+ *              It's up to doReprocessOffline to do that instead.
+ * PARAMETERS :
+ *   @recvd_frame  : stream buf frame to be returned
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3ReprocessChannel::bufDone(mm_camera_super_buf_t *recvd_frame)
+{
+    int rc = NO_ERROR;
+    if (recvd_frame && recvd_frame->num_bufs == 1) {
+        Mutex::Autolock lock(mFreeBuffersLock);
+        uint32_t buf_idx = recvd_frame->bufs[0]->buf_idx;
+        mFreeBufferList.push_back(buf_idx);
+
+    } else {
+        LOGE("Fatal. Not supposed to be here");
+        rc = BAD_VALUE;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : overrideMetadata
+ *
+ * DESCRIPTION: Override metadata entry such as rotation, crop, and CDS info.
+ *
+ * PARAMETERS :
+ *   @frame     : input frame from source stream
+ *   meta_buffer: metadata buffer
+ *   @metadata  : corresponding metadata
+ *   @fwk_frame :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3ReprocessChannel::overrideMetadata(qcamera_hal3_pp_buffer_t *pp_buffer,
+        mm_camera_buf_def_t *meta_buffer, jpeg_settings_t *jpeg_settings,
+        qcamera_fwk_input_pp_data_t &fwk_frame)
+{
+    int32_t rc = NO_ERROR;
+    QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)mUserData;
+    if ((NULL == meta_buffer) || (NULL == pp_buffer) || (NULL == pp_buffer->input) ||
+            (NULL == hal_obj)) {
+        return BAD_VALUE;
+    }
+
+    metadata_buffer_t *meta = (metadata_buffer_t *)meta_buffer->buffer;
+    mm_camera_super_buf_t *frame = pp_buffer->input;
+    if (NULL == meta) {
+        return BAD_VALUE;
+    }
+
+    for (uint32_t i = 0; i < frame->num_bufs; i++) {
+        QCamera3Stream *pStream = getStreamBySrcHandle(frame->bufs[i]->stream_id);
+        QCamera3Stream *pSrcStream = getSrcStreamBySrcHandle(frame->bufs[i]->stream_id);
+
+        if (pStream != NULL && pSrcStream != NULL) {
+            if (jpeg_settings) {
+                // Find rotation info for reprocess stream
+                cam_rotation_info_t rotation_info;
+                memset(&rotation_info, 0, sizeof(rotation_info));
+                if (jpeg_settings->jpeg_orientation == 0) {
+                   rotation_info.rotation = ROTATE_0;
+                } else if (jpeg_settings->jpeg_orientation == 90) {
+                   rotation_info.rotation = ROTATE_90;
+                } else if (jpeg_settings->jpeg_orientation == 180) {
+                   rotation_info.rotation = ROTATE_180;
+                } else if (jpeg_settings->jpeg_orientation == 270) {
+                   rotation_info.rotation = ROTATE_270;
+                }
+                rotation_info.streamId = mStreams[0]->getMyServerID();
+                ADD_SET_PARAM_ENTRY_TO_BATCH(meta, CAM_INTF_PARM_ROTATION, rotation_info);
+            }
+
+            // Find and insert crop info for reprocess stream
+            IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, meta) {
+                if (MAX_NUM_STREAMS > crop_data->num_of_streams) {
+                    for (int j = 0; j < crop_data->num_of_streams; j++) {
+                        if (crop_data->crop_info[j].stream_id ==
+                                pSrcStream->getMyServerID()) {
+
+                            // Store crop/roi information for offline reprocess
+                            // in the reprocess stream slot
+                            crop_data->crop_info[crop_data->num_of_streams].crop =
+                                    crop_data->crop_info[j].crop;
+                            crop_data->crop_info[crop_data->num_of_streams].roi_map =
+                                    crop_data->crop_info[j].roi_map;
+                            crop_data->crop_info[crop_data->num_of_streams].stream_id =
+                                    mStreams[0]->getMyServerID();
+                            crop_data->num_of_streams++;
+
+                            LOGD("Reprocess stream server id: %d",
+                                     mStreams[0]->getMyServerID());
+                            LOGD("Found offline reprocess crop %dx%d %dx%d",
+                                    crop_data->crop_info[j].crop.left,
+                                    crop_data->crop_info[j].crop.top,
+                                    crop_data->crop_info[j].crop.width,
+                                    crop_data->crop_info[j].crop.height);
+                            LOGD("Found offline reprocess roimap %dx%d %dx%d",
+                                    crop_data->crop_info[j].roi_map.left,
+                                    crop_data->crop_info[j].roi_map.top,
+                                    crop_data->crop_info[j].roi_map.width,
+                                    crop_data->crop_info[j].roi_map.height);
+
+                            break;
+                        }
+                    }
+                } else {
+                    LOGE("No space to add reprocess stream crop/roi information");
+                }
+            }
+
+            IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, meta) {
+                uint8_t cnt = cdsInfo->num_of_streams;
+                if (cnt <= MAX_NUM_STREAMS) {
+                    cam_stream_cds_info_t repro_cds_info;
+                    memset(&repro_cds_info, 0, sizeof(repro_cds_info));
+                    repro_cds_info.stream_id = mStreams[0]->getMyServerID();
+                    for (size_t i = 0; i < cnt; i++) {
+                        if (cdsInfo->cds_info[i].stream_id ==
+                                pSrcStream->getMyServerID()) {
+                            repro_cds_info.cds_enable =
+                                    cdsInfo->cds_info[i].cds_enable;
+                            break;
+                        }
+                    }
+                    cdsInfo->num_of_streams = 1;
+                    cdsInfo->cds_info[0] = repro_cds_info;
+                } else {
+                    LOGE("No space to add reprocess stream cds information");
+                }
+            }
+
+            fwk_frame.input_buffer = *frame->bufs[i];
+            fwk_frame.metadata_buffer = *meta_buffer;
+            fwk_frame.output_buffer = pp_buffer->output;
+            break;
+        } else {
+            LOGE("Source/Re-process streams are invalid");
+            rc |= BAD_VALUE;
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+* FUNCTION : overrideFwkMetadata
+*
+* DESCRIPTION: Override frameworks metadata such as rotation, crop, and CDS data.
+*
+* PARAMETERS :
+* @frame : input frame for reprocessing
+*
+* RETURN : int32_t type of status
+* NO_ERROR -- success
+* none-zero failure code
+*==========================================================================*/
+int32_t QCamera3ReprocessChannel::overrideFwkMetadata(
+        qcamera_fwk_input_pp_data_t *frame)
+{
+    if (NULL == frame) {
+        LOGE("Incorrect input frame");
+        return BAD_VALUE;
+    }
+
+    if (NULL == frame->metadata_buffer.buffer) {
+        LOGE("No metadata available");
+        return BAD_VALUE;
+    }
+    metadata_buffer_t *meta = (metadata_buffer_t *) frame->metadata_buffer.buffer;
+
+    // Not doing rotation at all for YUV to YUV reprocess
+    if (mReprocessType != REPROCESS_TYPE_JPEG) {
+        LOGD("Override rotation to 0 for channel reprocess type %d",
+                mReprocessType);
+        cam_rotation_info_t rotation_info;
+        memset(&rotation_info, 0, sizeof(rotation_info));
+        rotation_info.rotation = ROTATE_0;
+        rotation_info.streamId = mStreams[0]->getMyServerID();
+        ADD_SET_PARAM_ENTRY_TO_BATCH(meta, CAM_INTF_PARM_ROTATION, rotation_info);
+    }
+
+    // Find and insert crop info for reprocess stream
+    IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, meta) {
+        if (1 == crop_data->num_of_streams) {
+            // Store crop/roi information for offline reprocess
+            // in the reprocess stream slot
+            crop_data->crop_info[crop_data->num_of_streams].crop =
+                    crop_data->crop_info[0].crop;
+            crop_data->crop_info[crop_data->num_of_streams].roi_map =
+                    crop_data->crop_info[0].roi_map;
+            crop_data->crop_info[crop_data->num_of_streams].stream_id =
+                    mStreams[0]->getMyServerID();
+            crop_data->num_of_streams++;
+
+            LOGD("Reprocess stream server id: %d",
+                     mStreams[0]->getMyServerID());
+            LOGD("Found offline reprocess crop %dx%d %dx%d",
+                    crop_data->crop_info[0].crop.left,
+                    crop_data->crop_info[0].crop.top,
+                    crop_data->crop_info[0].crop.width,
+                    crop_data->crop_info[0].crop.height);
+            LOGD("Found offline reprocess roi map %dx%d %dx%d",
+                    crop_data->crop_info[0].roi_map.left,
+                    crop_data->crop_info[0].roi_map.top,
+                    crop_data->crop_info[0].roi_map.width,
+                    crop_data->crop_info[0].roi_map.height);
+        } else {
+            LOGE("Incorrect number of offline crop data entries %d",
+                    crop_data->num_of_streams);
+            return BAD_VALUE;
+        }
+    } else {
+        LOGW("Crop data not present");
+    }
+
+    IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, meta) {
+        if (1 == cdsInfo->num_of_streams) {
+            cdsInfo->cds_info[0].stream_id = mStreams[0]->getMyServerID();
+        } else {
+            LOGE("Incorrect number of offline cds info entries %d",
+                     cdsInfo->num_of_streams);
+            return BAD_VALUE;
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : doReprocessOffline
+ *
+ * DESCRIPTION: request to do a reprocess on the frame
+ *
+ * PARAMETERS :
+ *   @frame     : input frame for reprocessing
+ *   @isPriorityFrame: Hint that this frame is of priority, equivalent to
+ *              real time, even though it is processed in offline mechanism
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+ int32_t  QCamera3ReprocessChannel::doReprocessOffline(
+        qcamera_fwk_input_pp_data_t *frame, bool isPriorityFrame)
+{
+    int32_t rc = 0;
+    int index;
+    OfflineBuffer mappedBuffer;
+
+    if (m_numStreams < 1) {
+        LOGE("No reprocess stream is created");
+        return -1;
+    }
+
+    if (NULL == frame) {
+        LOGE("Incorrect input frame");
+        return BAD_VALUE;
+    }
+
+    if (NULL == frame->metadata_buffer.buffer) {
+        LOGE("No metadata available");
+        return BAD_VALUE;
+    }
+
+    if (NULL == frame->input_buffer.buffer) {
+        LOGE("No input buffer available");
+        return BAD_VALUE;
+    }
+
+    if ((0 == m_numStreams) || (NULL == mStreams[0])) {
+        LOGE("Reprocess stream not initialized!");
+        return NO_INIT;
+    }
+
+    QCamera3Stream *pStream = mStreams[0];
+
+    //qbuf the output buffer if it was allocated by the framework
+    if (mReprocessType != REPROCESS_TYPE_JPEG && frame->output_buffer != NULL) {
+        if(!m_bIsActive) {
+            rc = registerBuffer(frame->output_buffer, mIsType);
+            if (NO_ERROR != rc) {
+                LOGE("On-the-fly buffer registration failed %d",
+                         rc);
+                return rc;
+            }
+
+            rc = start();
+            if (NO_ERROR != rc) {
+                return rc;
+            }
+        }
+        index = mGrallocMemory.getMatchBufIndex((void*)frame->output_buffer);
+        if(index < 0) {
+            rc = registerBuffer(frame->output_buffer, mIsType);
+            if (NO_ERROR != rc) {
+                LOGE("On-the-fly buffer registration failed %d",
+                         rc);
+                return rc;
+            }
+
+            index = mGrallocMemory.getMatchBufIndex((void*)frame->output_buffer);
+            if (index < 0) {
+                LOGE("Could not find object among registered buffers");
+                return DEAD_OBJECT;
+            }
+        }
+        rc = pStream->bufDone(index);
+        if(rc != NO_ERROR) {
+            LOGE("Failed to Q new buffer to stream");
+            return rc;
+        }
+        rc = mGrallocMemory.markFrameNumber(index, frame->frameNumber);
+
+    } else if (mReprocessType == REPROCESS_TYPE_JPEG) {
+        Mutex::Autolock lock(mFreeBuffersLock);
+        uint32_t bufIdx;
+        if (mFreeBufferList.empty()) {
+            rc = mMemory->allocateOne(mFrameLen);
+            if (rc < 0) {
+                LOGE("Failed allocating heap buffer. Fatal");
+                return BAD_VALUE;
+            } else {
+                bufIdx = (uint32_t)rc;
+            }
+        } else {
+            bufIdx = *(mFreeBufferList.begin());
+            mFreeBufferList.erase(mFreeBufferList.begin());
+        }
+
+        mMemory->markFrameNumber(bufIdx, frame->frameNumber);
+        rc = pStream->bufDone(bufIdx);
+        if (rc != NO_ERROR) {
+            LOGE("Failed to queue new buffer to stream");
+            return rc;
+        }
+    }
+
+    int32_t max_idx = (int32_t) (mNumBuffers - 1);
+    //loop back the indices if max burst count reached
+    if (mOfflineBuffersIndex == max_idx) {
+       mOfflineBuffersIndex = -1;
+    }
+    uint32_t buf_idx = (uint32_t)(mOfflineBuffersIndex + 1);
+    rc = pStream->mapBuf(
+            CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
+            buf_idx, -1,
+            frame->input_buffer.fd, frame->input_buffer.buffer,
+            frame->input_buffer.frame_len);
+    if (NO_ERROR == rc) {
+        mappedBuffer.index = buf_idx;
+        mappedBuffer.stream = pStream;
+        mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF;
+        mOfflineBuffers.push_back(mappedBuffer);
+        mOfflineBuffersIndex = (int32_t)buf_idx;
+        LOGD("Mapped buffer with index %d", mOfflineBuffersIndex);
+    }
+
+    max_idx = (int32_t) ((mNumBuffers * 2) - 1);
+    //loop back the indices if max burst count reached
+    if (mOfflineMetaIndex == max_idx) {
+       mOfflineMetaIndex = (int32_t) (mNumBuffers - 1);
+    }
+    uint32_t meta_buf_idx = (uint32_t)(mOfflineMetaIndex + 1);
+    rc |= pStream->mapBuf(
+            CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF,
+            meta_buf_idx, -1,
+            frame->metadata_buffer.fd, frame->metadata_buffer.buffer,
+            frame->metadata_buffer.frame_len);
+    if (NO_ERROR == rc) {
+        mappedBuffer.index = meta_buf_idx;
+        mappedBuffer.stream = pStream;
+        mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF;
+        mOfflineMetaBuffers.push_back(mappedBuffer);
+        mOfflineMetaIndex = (int32_t)meta_buf_idx;
+        LOGD("Mapped meta buffer with index %d", mOfflineMetaIndex);
+    }
+
+    if (rc == NO_ERROR) {
+        cam_stream_parm_buffer_t param;
+        uint32_t numPendingPriorityFrames = 0;
+
+        if(isPriorityFrame && (mReprocessType != REPROCESS_TYPE_JPEG)) {
+            Mutex::Autolock lock(mPriorityFramesLock);
+            /* read the length before pushing the frame number to check if
+             * vector is empty */
+            numPendingPriorityFrames = mPriorityFrames.size();
+            mPriorityFrames.push(frame->frameNumber);
+        }
+
+        if(isPriorityFrame && !numPendingPriorityFrames &&
+            (mReprocessType != REPROCESS_TYPE_JPEG)) {
+            memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+            param.type = CAM_STREAM_PARAM_TYPE_REQUEST_OPS_MODE;
+            param.perf_mode = CAM_PERF_HIGH_PERFORMANCE;
+            rc = pStream->setParameter(param);
+            if (rc != NO_ERROR) {
+                ALOGE("%s: setParameter for CAM_PERF_HIGH_PERFORMANCE failed",
+                    __func__);
+            }
+            {
+                Mutex::Autolock lock(mPriorityFramesLock);
+                mReprocessPerfMode = true;
+            }
+        }
+
+        memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+        param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
+        param.reprocess.buf_index = buf_idx;
+        param.reprocess.frame_idx = frame->input_buffer.frame_idx;
+        param.reprocess.meta_present = 1;
+        param.reprocess.meta_buf_index = meta_buf_idx;
+
+        LOGI("Offline reprocessing id = %d buf Id = %d meta index = %d",
+                    param.reprocess.frame_idx, param.reprocess.buf_index,
+                    param.reprocess.meta_buf_index);
+        rc = pStream->setParameter(param);
+        if (rc != NO_ERROR) {
+            LOGE("stream setParameter for reprocess failed");
+            resetToCamPerfNormal(frame->frameNumber);
+        }
+    } else {
+        LOGE("Input buffer memory map failed: %d", rc);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : doReprocess
+ *
+ * DESCRIPTION: request to do a reprocess on the frame
+ *
+ * PARAMETERS :
+ *   @buf_fd     : fd to the input buffer that needs reprocess
+ *   @buffer     : Buffer ptr
+ *   @buf_lenght : length of the input buffer
+ *   @ret_val    : result of reprocess.
+ *                 Example: Could be faceID in case of register face image.
+ *   @meta_frame : metadata frame.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3ReprocessChannel::doReprocess(int buf_fd, void *buffer, size_t buf_length,
+        int32_t &ret_val, mm_camera_super_buf_t *meta_frame)
+{
+    int32_t rc = 0;
+    if (m_numStreams < 1) {
+        LOGE("No reprocess stream is created");
+        return -1;
+    }
+    if (meta_frame == NULL) {
+        LOGE("Did not get corresponding metadata in time");
+        return -1;
+    }
+
+    uint8_t buf_idx = 0;
+    for (uint32_t i = 0; i < m_numStreams; i++) {
+        rc = mStreams[i]->mapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
+                                 buf_idx, -1,
+                                 buf_fd, buffer, buf_length);
+
+        if (rc == NO_ERROR) {
+            cam_stream_parm_buffer_t param;
+            memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+            param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
+            param.reprocess.buf_index = buf_idx;
+            param.reprocess.meta_present = 1;
+            param.reprocess.meta_stream_handle = m_pMetaChannel->mStreams[0]->getMyServerID();
+            param.reprocess.meta_buf_index = meta_frame->bufs[0]->buf_idx;
+
+            LOGI("Online reprocessing id = %d buf Id = %d meta index = %d",
+                    param.reprocess.frame_idx, param.reprocess.buf_index,
+                    param.reprocess.meta_buf_index);
+            rc = mStreams[i]->setParameter(param);
+            if (rc == NO_ERROR) {
+                ret_val = param.reprocess.ret_val;
+            }
+            mStreams[i]->unmapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
+                                  buf_idx, -1);
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addReprocStreamsFromSource
+ *
+ * DESCRIPTION: add reprocess streams from input source channel
+ *
+ * PARAMETERS :
+ *   @config         : pp feature configuration
+ *   @src_config     : source reprocess configuration
+ *   @isType         : type of image stabilization required on this stream
+ *   @pMetaChannel   : ptr to metadata channel to get corresp. metadata
+ *
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3ReprocessChannel::addReprocStreamsFromSource(cam_pp_feature_config_t &pp_config,
+        const reprocess_config_t &src_config , cam_is_type_t is_type,
+        QCamera3Channel *pMetaChannel)
+{
+    int32_t rc = 0;
+    cam_stream_reproc_config_t reprocess_config;
+    cam_stream_type_t streamType;
+
+    cam_dimension_t streamDim = src_config.output_stream_dim;
+
+    if (NULL != src_config.src_channel) {
+        QCamera3Stream *pSrcStream = src_config.src_channel->getStreamByIndex(0);
+        if (pSrcStream == NULL) {
+           LOGE("source channel doesn't have a stream");
+           return BAD_VALUE;
+        }
+        mSrcStreamHandles[m_numStreams] = pSrcStream->getMyHandle();
+    }
+
+    streamType = CAM_STREAM_TYPE_OFFLINE_PROC;
+    reprocess_config.pp_type = CAM_OFFLINE_REPROCESS_TYPE;
+
+    reprocess_config.offline.input_fmt = src_config.stream_format;
+    reprocess_config.offline.input_dim = src_config.input_stream_dim;
+    reprocess_config.offline.input_buf_planes.plane_info =
+            src_config.input_stream_plane_info.plane_info;
+    reprocess_config.offline.num_of_bufs = (uint8_t)mNumBuffers;
+    reprocess_config.offline.input_type = src_config.stream_type;
+
+    reprocess_config.pp_feature_config = pp_config;
+    QCamera3Stream *pStream = new QCamera3Stream(m_camHandle,
+            m_handle,
+            m_camOps,
+            &mPaddingInfo,
+            (QCamera3Channel*)this);
+    if (pStream == NULL) {
+        LOGE("No mem for Stream");
+        return NO_MEMORY;
+    }
+
+    rc = pStream->init(streamType, src_config.stream_format,
+            streamDim, ROTATE_0, &reprocess_config,
+            (uint8_t)mNumBuffers,
+            reprocess_config.pp_feature_config.feature_mask,
+            is_type,
+            0,/* batchSize */
+            QCamera3Channel::streamCbRoutine, this);
+
+    if (rc == 0) {
+        mStreams[m_numStreams] = pStream;
+        m_numStreams++;
+    } else {
+        LOGE("failed to create reprocess stream");
+        delete pStream;
+    }
+
+    if (rc == NO_ERROR) {
+        m_pSrcChannel = src_config.src_channel;
+        m_pMetaChannel = pMetaChannel;
+        mReprocessType = src_config.reprocess_type;
+        LOGD("mReprocessType is %d", mReprocessType);
+    }
+    mm_camera_req_buf_t buf;
+    memset(&buf, 0x0, sizeof(buf));
+    buf.type = MM_CAMERA_REQ_SUPER_BUF;
+    buf.num_buf_requested = 1;
+    if(m_camOps->request_super_buf(m_camHandle,m_handle, &buf) < 0) {
+        LOGE("Request for super buffer failed");
+    }
+    return rc;
+}
+
+/* QCamera3SupportChannel methods */
+
+cam_dimension_t QCamera3SupportChannel::kDim = {640, 480};
+
+QCamera3SupportChannel::QCamera3SupportChannel(uint32_t cam_handle,
+                    uint32_t channel_handle,
+                    mm_camera_ops_t *cam_ops,
+                    cam_padding_info_t *paddingInfo,
+                    cam_feature_mask_t postprocess_mask,
+                    cam_stream_type_t streamType,
+                    cam_dimension_t *dim,
+                    cam_format_t streamFormat,
+                    uint8_t hw_analysis_supported,
+                    cam_color_filter_arrangement_t color_arrangement,
+                    void *userData, uint32_t numBuffers) :
+                        QCamera3Channel(cam_handle, channel_handle, cam_ops,
+                                NULL, paddingInfo, postprocess_mask,
+                                userData, numBuffers),
+                        mMemory(NULL)
+{
+    memcpy(&mDim, dim, sizeof(cam_dimension_t));
+    mStreamType = streamType;
+    mStreamFormat = streamFormat;
+   // Make Analysis same as Preview format
+   if (!hw_analysis_supported && mStreamType == CAM_STREAM_TYPE_ANALYSIS &&
+           color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
+        mStreamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_PREVIEW);
+   }
+}
+
+QCamera3SupportChannel::~QCamera3SupportChannel()
+{
+    destroy();
+
+    if (mMemory) {
+        mMemory->deallocate();
+        delete mMemory;
+        mMemory = NULL;
+    }
+}
+
+int32_t QCamera3SupportChannel::initialize(cam_is_type_t isType)
+{
+    int32_t rc;
+
+    if (mMemory || m_numStreams > 0) {
+        LOGE("metadata channel already initialized");
+        return -EINVAL;
+    }
+
+    mIsType = isType;
+    rc = QCamera3Channel::addStream(mStreamType,
+            mStreamFormat, mDim, ROTATE_0, MIN_STREAMING_BUFFER_NUM,
+            mPostProcMask, mIsType);
+    if (rc < 0) {
+        LOGE("addStream failed");
+    }
+    return rc;
+}
+
+int32_t QCamera3SupportChannel::request(buffer_handle_t * /*buffer*/,
+                                                uint32_t /*frameNumber*/)
+{
+    return NO_ERROR;
+}
+
+void QCamera3SupportChannel::streamCbRoutine(
+                        mm_camera_super_buf_t *super_frame,
+                        QCamera3Stream * /*stream*/)
+{
+    if (super_frame == NULL || super_frame->num_bufs != 1) {
+        LOGE("super_frame is not valid");
+        return;
+    }
+    bufDone(super_frame);
+    free(super_frame);
+}
+
+QCamera3StreamMem* QCamera3SupportChannel::getStreamBufs(uint32_t len)
+{
+    int rc;
+    mMemory = new QCamera3StreamMem(mNumBuffers);
+    if (!mMemory) {
+        LOGE("unable to create heap memory");
+        return NULL;
+    }
+    rc = mMemory->allocateAll(len);
+    if (rc < 0) {
+        LOGE("unable to allocate heap memory");
+        delete mMemory;
+        mMemory = NULL;
+        return NULL;
+    }
+    return mMemory;
+}
+
+void QCamera3SupportChannel::putStreamBufs()
+{
+    mMemory->deallocate();
+    delete mMemory;
+    mMemory = NULL;
+}
+
+}; // namespace qcamera
diff --git a/msmcobalt/QCamera2/HAL3/QCamera3Channel.h b/msmcobalt/QCamera2/HAL3/QCamera3Channel.h
new file mode 100644
index 0000000..84c1679
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL3/QCamera3Channel.h
@@ -0,0 +1,628 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA3_CHANNEL_H__
+#define __QCAMERA3_CHANNEL_H__
+
+// System dependencies
+#include <utils/List.h>
+#include <utils/Mutex.h>
+#include <utils/Vector.h>
+#include "gralloc_priv.h"
+
+// Camera dependencies
+#include "cam_intf.h"
+#include "cam_types.h"
+#include "hardware/camera3.h"
+#include "QCamera3HALHeader.h"
+#include "QCamera3Mem.h"
+#include "QCamera3PostProc.h"
+#include "QCamera3Stream.h"
+#include "QCamera3StreamMem.h"
+
+extern "C" {
+#include "mm_camera_interface.h"
+#include "mm_jpeg_interface.h"
+}
+
+using namespace android;
+
+#define MIN_STREAMING_BUFFER_NUM 7+11
+
+#define QCAMERA_DUMP_FRM_PREVIEW          1
+#define QCAMERA_DUMP_FRM_VIDEO            (1<<1)
+#define QCAMERA_DUMP_FRM_SNAPSHOT         (1<<2)
+#define QCAMERA_DUMP_FRM_CALLBACK         (1<<3)
+#define QCAMERA_DUMP_FRM_INPUT_REPROCESS  (1<<6)
+
+typedef int64_t nsecs_t;
+
+namespace qcamera {
+
+typedef void (*channel_cb_routine)(mm_camera_super_buf_t *metadata,
+                                camera3_stream_buffer_t *buffer,
+                                uint32_t frame_number, bool isInputBuffer,
+                                void *userdata);
+class QCamera3Channel
+{
+public:
+    QCamera3Channel(uint32_t cam_handle,
+                   uint32_t channel_handle,
+                   mm_camera_ops_t *cam_ops,
+                   channel_cb_routine cb_routine,
+                   cam_padding_info_t *paddingInfo,
+                   cam_feature_mask_t postprocess_mask,
+                   void *userData, uint32_t numBuffers);
+    virtual ~QCamera3Channel();
+
+    virtual int32_t start();
+    virtual int32_t stop();
+    virtual int32_t setBatchSize(uint32_t);
+    virtual int32_t queueBatchBuf();
+    virtual int32_t setPerFrameMapUnmap(bool enable);
+    int32_t bufDone(mm_camera_super_buf_t *recvd_frame);
+    int32_t setBundleInfo(const cam_bundle_config_t &bundleInfo);
+
+    virtual uint32_t getStreamTypeMask();
+    uint32_t getStreamID(uint32_t streamMask);
+    void destroy();
+    virtual int32_t initialize(cam_is_type_t isType) = 0;
+    virtual int32_t request(buffer_handle_t * /*buffer*/,
+                uint32_t /*frameNumber*/){ return 0;};
+    virtual int32_t request(buffer_handle_t * /*buffer*/,
+                uint32_t /*frameNumber*/,
+                camera3_stream_buffer_t* /*pInputBuffer*/,
+                metadata_buffer_t* /*metadata*/){ return 0;};
+    virtual void streamCbRoutine(mm_camera_super_buf_t *super_frame,
+                            QCamera3Stream *stream) = 0;
+
+    virtual int32_t registerBuffer(buffer_handle_t *buffer, cam_is_type_t isType) = 0;
+    virtual QCamera3StreamMem *getStreamBufs(uint32_t len) = 0;
+    virtual void putStreamBufs() = 0;
+    virtual int32_t flush();
+
+    QCamera3Stream *getStreamByHandle(uint32_t streamHandle);
+    uint32_t getMyHandle() const {return m_handle;};
+    uint32_t getNumOfStreams() const {return m_numStreams;};
+    uint32_t getNumBuffers() const {return mNumBuffers;};
+    QCamera3Stream *getStreamByIndex(uint32_t index);
+
+    static void streamCbRoutine(mm_camera_super_buf_t *super_frame,
+                QCamera3Stream *stream, void *userdata);
+    void dumpYUV(mm_camera_buf_def_t *frame, cam_dimension_t dim,
+            cam_frame_len_offset_t offset, uint8_t name);
+    bool isUBWCEnabled();
+    cam_format_t getStreamDefaultFormat(cam_stream_type_t type);
+
+    void *mUserData;
+    cam_padding_info_t mPaddingInfo;
+    QCamera3Stream *mStreams[MAX_STREAM_NUM_IN_BUNDLE];
+    uint32_t m_numStreams;
+protected:
+
+    int32_t addStream(cam_stream_type_t streamType,
+                      cam_format_t streamFormat,
+                      cam_dimension_t streamDim,
+                      cam_rotation_t streamRotation,
+                      uint8_t minStreamBufnum,
+                      cam_feature_mask_t postprocessMask,
+                      cam_is_type_t isType,
+                      uint32_t batchSize = 0);
+
+    int32_t allocateStreamInfoBuf(camera3_stream_t *stream);
+
+    uint32_t m_camHandle;
+    mm_camera_ops_t *m_camOps;
+    bool m_bIsActive;
+
+    uint32_t m_handle;
+
+
+    mm_camera_buf_notify_t mDataCB;
+
+
+    QCamera3HeapMemory *mStreamInfoBuf;
+    channel_cb_routine mChannelCB;
+    //cam_padding_info_t *mPaddingInfo;
+    cam_feature_mask_t mPostProcMask;
+    uint32_t mYUVDump;
+    cam_is_type_t mIsType;
+    uint32_t mNumBuffers;
+    /* Enable unmapping of buffer before issuing buffer callback. Default value
+     * for this flag is true and is selectively set to false for the usecases
+     * such as HFR to avoid any performance hit due to mapping/unmapping */
+    bool    mPerFrameMapUnmapEnable;
+    uint32_t mFrmNum;
+    uint32_t mDumpFrmCnt;
+    uint32_t mSkipMode;
+    uint32_t mDumpSkipCnt;
+};
+
+/* QCamera3ProcessingChannel is used to handle all streams that are directly
+ * generated by hardware and given to frameworks without any postprocessing at HAL.
+ * It also handles input streams that require reprocessing by hardware and then
+ * returned to frameworks. */
+class QCamera3ProcessingChannel : public QCamera3Channel
+{
+public:
+   QCamera3ProcessingChannel(uint32_t cam_handle,
+           uint32_t channel_handle,
+           mm_camera_ops_t *cam_ops,
+           channel_cb_routine cb_routine,
+           cam_padding_info_t *paddingInfo,
+           void *userData,
+           camera3_stream_t *stream,
+           cam_stream_type_t stream_type,
+           cam_feature_mask_t postprocess_mask,
+           QCamera3Channel *metadataChannel,
+           uint32_t numBuffers = MAX_INFLIGHT_REQUESTS);
+
+    ~QCamera3ProcessingChannel();
+
+    virtual int32_t initialize(cam_is_type_t isType);
+    virtual int32_t request(buffer_handle_t *buffer,
+            uint32_t frameNumber,
+            camera3_stream_buffer_t* pInputBuffer,
+            metadata_buffer_t* metadata);
+    virtual void streamCbRoutine(mm_camera_super_buf_t *super_frame,
+            QCamera3Stream *stream);
+    virtual QCamera3StreamMem *getStreamBufs(uint32_t len);
+    virtual void putStreamBufs();
+    virtual int32_t registerBuffer(buffer_handle_t *buffer, cam_is_type_t isType);
+
+    virtual int32_t stop();
+
+    virtual reprocess_type_t getReprocessType() = 0;
+
+    virtual void reprocessCbRoutine(buffer_handle_t *resultBuffer,
+            uint32_t resultFrameNumber);
+
+    int32_t queueReprocMetadata(mm_camera_super_buf_t *metadata);
+    int32_t metadataBufDone(mm_camera_super_buf_t *recvd_frame);
+    int32_t translateStreamTypeAndFormat(camera3_stream_t *stream,
+            cam_stream_type_t &streamType,
+            cam_format_t &streamFormat);
+    int32_t setReprocConfig(reprocess_config_t &reproc_cfg,
+            camera3_stream_buffer_t *pInputBuffer,
+            metadata_buffer_t *metadata,
+            cam_format_t streamFormat, cam_dimension_t dim);
+    int32_t setFwkInputPPData(qcamera_fwk_input_pp_data_t *src_frame,
+            camera3_stream_buffer_t *pInputBuffer,
+            reprocess_config_t *reproc_cfg,
+            metadata_buffer_t *metadata,
+            buffer_handle_t *output_buffer,
+            uint32_t frameNumber);
+    int32_t checkStreamCbErrors(mm_camera_super_buf_t *super_frame,
+            QCamera3Stream *stream);
+    int32_t getStreamSize(cam_dimension_t &dim);
+
+    QCamera3PostProcessor m_postprocessor; // post processor
+    void showDebugFPS(int32_t streamType);
+
+protected:
+    uint8_t mDebugFPS;
+    int mFrameCount;
+    int mLastFrameCount;
+    nsecs_t mLastFpsTime;
+    bool isWNREnabled() {return m_bWNROn;};
+    void startPostProc(const reprocess_config_t &reproc_cfg);
+    void issueChannelCb(buffer_handle_t *resultBuffer,
+            uint32_t resultFrameNumber);
+    int32_t releaseOfflineMemory(uint32_t resultFrameNumber);
+
+    QCamera3StreamMem mMemory; //output buffer allocated by fwk
+    camera3_stream_t *mCamera3Stream;
+    uint32_t mNumBufs;
+    cam_stream_type_t mStreamType;
+    cam_format_t mStreamFormat;
+    uint8_t mIntent;
+
+    bool mPostProcStarted;
+    bool mInputBufferConfig;   // Set when the processing channel is configured
+                               // for processing input(framework) buffers
+
+    QCamera3Channel *m_pMetaChannel;
+    mm_camera_super_buf_t *mMetaFrame;
+    QCamera3StreamMem mOfflineMemory;      //reprocessing input buffer
+    QCamera3StreamMem mOfflineMetaMemory; //reprocessing metadata buffer
+    List<uint32_t> mFreeOfflineMetaBuffersList;
+    Mutex mFreeOfflineMetaBuffersLock;
+
+private:
+
+    bool m_bWNROn;
+};
+
+/* QCamera3RegularChannel is used to handle all streams that are directly
+ * generated by hardware and given to frameworks without any postprocessing at HAL.
+ * Examples are: all IMPLEMENTATION_DEFINED streams, CPU_READ streams. */
+class QCamera3RegularChannel : public QCamera3ProcessingChannel
+{
+public:
+    QCamera3RegularChannel(uint32_t cam_handle,
+                    uint32_t channel_handle,
+                    mm_camera_ops_t *cam_ops,
+                    channel_cb_routine cb_routine,
+                    cam_padding_info_t *paddingInfo,
+                    void *userData,
+                    camera3_stream_t *stream,
+                    cam_stream_type_t stream_type,
+                    cam_feature_mask_t postprocess_mask,
+                    QCamera3Channel *metadataChannel,
+                    uint32_t numBuffers = MAX_INFLIGHT_REQUESTS);
+
+    virtual ~QCamera3RegularChannel();
+
+    virtual int32_t setBatchSize(uint32_t batchSize);
+    virtual uint32_t getStreamTypeMask();
+    virtual int32_t queueBatchBuf();
+    virtual int32_t initialize(cam_is_type_t isType);
+    using QCamera3ProcessingChannel::request;
+    virtual int32_t request(buffer_handle_t *buffer, uint32_t frameNumber);
+    virtual reprocess_type_t getReprocessType();
+
+private:
+    int32_t initialize(struct private_handle_t *priv_handle);
+
+    uint32_t mBatchSize;
+    cam_rotation_t mRotation;
+};
+
+/* QCamera3MetadataChannel is for metadata stream generated by camera daemon. */
+class QCamera3MetadataChannel : public QCamera3Channel
+{
+public:
+    QCamera3MetadataChannel(uint32_t cam_handle,
+                    uint32_t channel_handle,
+                    mm_camera_ops_t *cam_ops,
+                    channel_cb_routine cb_routine,
+                    cam_padding_info_t *paddingInfo,
+                    cam_feature_mask_t postprocess_mask,
+                    void *userData,
+                    uint32_t numBuffers = MIN_STREAMING_BUFFER_NUM);
+    virtual ~QCamera3MetadataChannel();
+
+    virtual int32_t initialize(cam_is_type_t isType);
+
+    virtual int32_t request(buffer_handle_t *buffer, uint32_t frameNumber);
+    virtual void streamCbRoutine(mm_camera_super_buf_t *super_frame,
+                            QCamera3Stream *stream);
+
+    virtual QCamera3StreamMem *getStreamBufs(uint32_t le);
+    virtual void putStreamBufs();
+    virtual int32_t registerBuffer(buffer_handle_t * /*buffer*/, cam_is_type_t /*isType*/)
+            { return NO_ERROR; };
+
+private:
+    QCamera3StreamMem *mMemory;
+};
+
+/* QCamera3RawChannel is for opaqueu/cross-platform raw stream containing
+ * vendor specific bayer data or 16-bit unpacked bayer data */
+class QCamera3RawChannel : public QCamera3RegularChannel
+{
+public:
+    QCamera3RawChannel(uint32_t cam_handle,
+                    uint32_t channel_handle,
+                    mm_camera_ops_t *cam_ops,
+                    channel_cb_routine cb_routine,
+                    cam_padding_info_t *paddingInfo,
+                    void *userData,
+                    camera3_stream_t *stream,
+                    cam_feature_mask_t postprocess_mask,
+                    QCamera3Channel *metadataChannel,
+                    bool raw_16 = false,
+                    uint32_t numBuffers = MAX_INFLIGHT_REQUESTS);
+
+    virtual ~QCamera3RawChannel();
+
+    virtual int32_t initialize(cam_is_type_t isType);
+
+    virtual void streamCbRoutine(mm_camera_super_buf_t *super_frame,
+                            QCamera3Stream *stream);
+
+    virtual reprocess_type_t getReprocessType();
+
+private:
+    bool mRawDump;
+    bool mIsRaw16;
+
+    void dumpRawSnapshot(mm_camera_buf_def_t *frame);
+    void convertLegacyToRaw16(mm_camera_buf_def_t *frame);
+    void convertMipiToRaw16(mm_camera_buf_def_t *frame);
+};
+
+/*
+ * QCamera3RawDumpChannel is for internal use only for Raw dump
+ */
+
+class QCamera3RawDumpChannel : public QCamera3Channel
+{
+public:
+    QCamera3RawDumpChannel(uint32_t cam_handle,
+                    uint32_t channel_handle,
+                    mm_camera_ops_t *cam_ops,
+                    cam_dimension_t rawDumpSize,
+                    cam_padding_info_t *paddingInfo,
+                    void *userData,
+                    cam_feature_mask_t postprocess_mask, uint32_t numBuffers = 3U);
+    virtual ~QCamera3RawDumpChannel();
+    virtual int32_t initialize(cam_is_type_t isType);
+    virtual void streamCbRoutine(mm_camera_super_buf_t *super_frame,
+                            QCamera3Stream *stream);
+    virtual QCamera3StreamMem *getStreamBufs(uint32_t le);
+    virtual void putStreamBufs();
+    virtual int32_t registerBuffer(buffer_handle_t * /*buffer*/, cam_is_type_t /*isType*/)
+            { return NO_ERROR; };
+    virtual int32_t request(buffer_handle_t *buffer, uint32_t frameNumber);
+    void dumpRawSnapshot(mm_camera_buf_def_t *frame);
+
+public:
+    cam_dimension_t mDim;
+
+private:
+    bool mRawDump;
+    QCamera3StreamMem *mMemory;
+};
+
+/* QCamera3YUVChannel is used to handle flexible YUV streams that are directly
+ * generated by hardware and given to frameworks without any postprocessing at HAL.
+ * It is also used to handle input buffers that generate YUV outputs */
+class QCamera3YUVChannel : public QCamera3ProcessingChannel
+{
+public:
+    QCamera3YUVChannel(uint32_t cam_handle,
+            uint32_t channel_handle,
+            mm_camera_ops_t *cam_ops,
+            channel_cb_routine cb_routine,
+            cam_padding_info_t *paddingInfo,
+            void *userData,
+            camera3_stream_t *stream,
+            cam_stream_type_t stream_type,
+            cam_feature_mask_t postprocess_mask,
+            QCamera3Channel *metadataChannel);
+    ~QCamera3YUVChannel();
+    virtual int32_t initialize(cam_is_type_t isType);
+    using QCamera3ProcessingChannel::request;
+    virtual int32_t request(buffer_handle_t *buffer,
+            uint32_t frameNumber,
+            camera3_stream_buffer_t* pInputBuffer,
+            metadata_buffer_t* metadata, bool &needMetadata);
+    virtual reprocess_type_t getReprocessType();
+    virtual void streamCbRoutine(mm_camera_super_buf_t *super_frame,
+            QCamera3Stream *stream);
+    virtual void putStreamBufs();
+    virtual void reprocessCbRoutine(buffer_handle_t *resultBuffer,
+        uint32_t resultFrameNumber);
+
+private:
+    typedef struct {
+        uint32_t frameNumber;
+        bool offlinePpFlag;
+        buffer_handle_t *output;
+        mm_camera_super_buf_t *callback_buffer;
+    } PpInfo;
+
+    // Whether offline postprocessing is required for this channel
+    bool mBypass;
+    uint32_t mFrameLen;
+
+    // Current edge, noise, and crop region setting
+    cam_edge_application_t mEdgeMode;
+    uint32_t mNoiseRedMode;
+    cam_crop_region_t mCropRegion;
+
+    // Mutex to protect mOfflinePpFlagMap and mFreeHeapBufferList
+    Mutex mOfflinePpLock;
+    // Map between free number and whether the request needs to be
+    // postprocessed.
+    List<PpInfo> mOfflinePpInfoList;
+    // Heap buffer index list
+    List<uint32_t> mFreeHeapBufferList;
+
+private:
+    bool needsFramePostprocessing(metadata_buffer_t* meta);
+    int32_t handleOfflinePpCallback(uint32_t resultFrameNumber,
+            Vector<mm_camera_super_buf_t *>& pendingCbs);
+};
+
+/* QCamera3PicChannel is for JPEG stream, which contains a YUV stream generated
+ * by the hardware, and encoded to a JPEG stream */
+class QCamera3PicChannel : public QCamera3ProcessingChannel
+{
+public:
+    QCamera3PicChannel(uint32_t cam_handle,
+            uint32_t channel_handle,
+            mm_camera_ops_t *cam_ops,
+            channel_cb_routine cb_routine,
+            cam_padding_info_t *paddingInfo,
+            void *userData,
+            camera3_stream_t *stream,
+            cam_feature_mask_t postprocess_mask,
+            bool is4KVideo,
+            bool isInputStreamConfigured,
+            QCamera3Channel *metadataChannel,
+            uint32_t numBuffers = MAX_INFLIGHT_REQUESTS);
+    ~QCamera3PicChannel();
+
+    virtual int32_t initialize(cam_is_type_t isType);
+    virtual int32_t flush();
+    virtual int32_t request(buffer_handle_t *buffer,
+            uint32_t frameNumber,
+            camera3_stream_buffer_t* pInputBuffer,
+            metadata_buffer_t* metadata);
+    virtual void streamCbRoutine(mm_camera_super_buf_t *super_frame,
+            QCamera3Stream *stream);
+
+    virtual QCamera3StreamMem *getStreamBufs(uint32_t le);
+    virtual void putStreamBufs();
+    virtual reprocess_type_t getReprocessType();
+
+    QCamera3Exif *getExifData(metadata_buffer_t *metadata,
+            jpeg_settings_t *jpeg_settings);
+    void overrideYuvSize(uint32_t width, uint32_t height);
+    static void jpegEvtHandle(jpeg_job_status_t status,
+            uint32_t /*client_hdl*/,
+            uint32_t jobId,
+            mm_jpeg_output_t *p_output,
+            void *userdata);
+    static void dataNotifyCB(mm_camera_super_buf_t *recvd_frame,
+            void *userdata);
+
+private:
+    int32_t queueJpegSetting(uint32_t out_buf_index, metadata_buffer_t *metadata);
+
+public:
+    cam_dimension_t m_max_pic_dim;
+
+private:
+    uint32_t mNumSnapshotBufs;
+    uint32_t mYuvWidth, mYuvHeight;
+    int32_t mCurrentBufIndex;
+    bool mInputBufferHint;
+    QCamera3StreamMem *mYuvMemory;
+    // Keep a list of free buffers
+    Mutex mFreeBuffersLock;
+    List<uint32_t> mFreeBufferList;
+    uint32_t mFrameLen;
+};
+
+// reprocess channel class
+class QCamera3ReprocessChannel : public QCamera3Channel
+{
+public:
+    QCamera3ReprocessChannel(uint32_t cam_handle,
+                            uint32_t channel_handle,
+                            mm_camera_ops_t *cam_ops,
+                            channel_cb_routine cb_routine,
+                            cam_padding_info_t *paddingInfo,
+                            cam_feature_mask_t postprocess_mask,
+                            void *userData, void *ch_hdl);
+    QCamera3ReprocessChannel();
+    virtual ~QCamera3ReprocessChannel();
+    // offline reprocess
+    virtual int32_t start();
+    virtual int32_t stop();
+    int32_t doReprocessOffline(qcamera_fwk_input_pp_data_t *frame,
+            bool isPriorityFrame = false);
+    int32_t doReprocess(int buf_fd,void *buffer, size_t buf_length, int32_t &ret_val,
+                        mm_camera_super_buf_t *meta_buf);
+    int32_t overrideMetadata(qcamera_hal3_pp_buffer_t *pp_buffer,
+            mm_camera_buf_def_t *meta_buffer,
+            jpeg_settings_t *jpeg_settings,
+            qcamera_fwk_input_pp_data_t &fwk_frame);
+    int32_t overrideFwkMetadata(qcamera_fwk_input_pp_data_t *frame);
+    virtual QCamera3StreamMem *getStreamBufs(uint32_t len);
+    virtual void putStreamBufs();
+    virtual int32_t initialize(cam_is_type_t isType);
+    int32_t unmapOfflineBuffers(bool all);
+    int32_t bufDone(mm_camera_super_buf_t *recvd_frame);
+    virtual void streamCbRoutine(mm_camera_super_buf_t *super_frame,
+                            QCamera3Stream *stream);
+    static void dataNotifyCB(mm_camera_super_buf_t *recvd_frame,
+                                       void* userdata);
+    int32_t addReprocStreamsFromSource(cam_pp_feature_config_t &pp_config,
+           const reprocess_config_t &src_config,
+           cam_is_type_t is_type,
+           QCamera3Channel *pMetaChannel);
+    QCamera3Stream *getStreamBySrcHandle(uint32_t srcHandle);
+    QCamera3Stream *getSrcStreamBySrcHandle(uint32_t srcHandle);
+    virtual int32_t registerBuffer(buffer_handle_t * buffer, cam_is_type_t isType);
+
+public:
+    void *inputChHandle;
+
+private:
+    typedef struct {
+        QCamera3Stream *stream;
+        cam_mapping_buf_type type;
+        uint32_t index;
+    } OfflineBuffer;
+
+    int32_t resetToCamPerfNormal(uint32_t frameNumber);
+    android::List<OfflineBuffer> mOfflineBuffers;
+    android::List<OfflineBuffer> mOfflineMetaBuffers;
+    int32_t mOfflineBuffersIndex;
+    int32_t mOfflineMetaIndex;
+    uint32_t mFrameLen;
+    Mutex mFreeBuffersLock; // Lock for free heap buffers
+    List<int32_t> mFreeBufferList; // Free heap buffers list
+    reprocess_type_t mReprocessType;
+    uint32_t mSrcStreamHandles[MAX_STREAM_NUM_IN_BUNDLE];
+    QCamera3ProcessingChannel *m_pSrcChannel; // ptr to source channel for reprocess
+    QCamera3Channel *m_pMetaChannel;
+    QCamera3StreamMem *mMemory;
+    QCamera3StreamMem mGrallocMemory;
+    Vector<uint32_t> mPriorityFrames;
+    Mutex            mPriorityFramesLock;
+    bool             mReprocessPerfMode;
+};
+
+
+/* QCamera3SupportChannel is for HAL internal consumption only */
+class QCamera3SupportChannel : public QCamera3Channel
+{
+public:
+    QCamera3SupportChannel(uint32_t cam_handle,
+                    uint32_t channel_handle,
+                    mm_camera_ops_t *cam_ops,
+                    cam_padding_info_t *paddingInfo,
+                    cam_feature_mask_t postprocess_mask,
+                    cam_stream_type_t streamType,
+                    cam_dimension_t *dim,
+                    cam_format_t streamFormat,
+                    uint8_t hw_analysis_supported,
+                    cam_color_filter_arrangement_t color_arrangement,
+                    void *userData,
+                    uint32_t numBuffers = MIN_STREAMING_BUFFER_NUM
+                    );
+    virtual ~QCamera3SupportChannel();
+
+    virtual int32_t initialize(cam_is_type_t isType);
+
+    virtual int32_t request(buffer_handle_t *buffer, uint32_t frameNumber);
+    virtual void streamCbRoutine(mm_camera_super_buf_t *super_frame,
+                            QCamera3Stream *stream);
+
+    virtual QCamera3StreamMem *getStreamBufs(uint32_t le);
+    virtual void putStreamBufs();
+    virtual int32_t registerBuffer(buffer_handle_t * /*buffer*/, cam_is_type_t /*isType*/)
+            { return NO_ERROR; };
+
+    static cam_dimension_t kDim;
+private:
+    QCamera3StreamMem *mMemory;
+    cam_dimension_t mDim;
+    cam_stream_type_t mStreamType;
+    cam_format_t mStreamFormat;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_CHANNEL_H__ */
diff --git a/msmcobalt/QCamera2/HAL3/QCamera3CropRegionMapper.cpp b/msmcobalt/QCamera2/HAL3/QCamera3CropRegionMapper.cpp
new file mode 100644
index 0000000..94a398b
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL3/QCamera3CropRegionMapper.cpp
@@ -0,0 +1,272 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+#define LOG_TAG "QCamera3CropRegionMapper"
+
+// Camera dependencies
+#include "QCamera3CropRegionMapper.h"
+#include "QCamera3HWI.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+using namespace android;
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : QCamera3CropRegionMapper
+ *
+ * DESCRIPTION: Constructor
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera3CropRegionMapper::QCamera3CropRegionMapper()
+        : mSensorW(0),
+          mSensorH(0),
+          mActiveArrayW(0),
+          mActiveArrayH(0)
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera3CropRegionMapper
+ *
+ * DESCRIPTION: destructor
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+
+QCamera3CropRegionMapper::~QCamera3CropRegionMapper()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : update
+ *
+ * DESCRIPTION: update sensor active array size and sensor output size
+ *
+ * PARAMETERS :
+ *   @active_array_w : active array width
+ *   @active_array_h : active array height
+ *   @sensor_w       : sensor output width
+ *   @sensor_h       : sensor output height
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3CropRegionMapper::update(uint32_t active_array_w,
+        uint32_t active_array_h, uint32_t sensor_w,
+        uint32_t sensor_h)
+{
+    // Sanity check
+    if (active_array_w == 0 || active_array_h == 0 ||
+            sensor_w == 0 || sensor_h == 0) {
+        LOGE("active_array size and sensor output size must be non zero");
+        return;
+    }
+    if (active_array_w < sensor_w || active_array_h < sensor_h) {
+        LOGE("invalid input: active_array [%d, %d], sensor size [%d, %d]",
+                 active_array_w, active_array_h, sensor_w, sensor_h);
+        return;
+    }
+    mSensorW = sensor_w;
+    mSensorH = sensor_h;
+    mActiveArrayW = active_array_w;
+    mActiveArrayH = active_array_h;
+
+    LOGH("active_array: %d x %d, sensor size %d x %d",
+            mActiveArrayW, mActiveArrayH, mSensorW, mSensorH);
+}
+
+/*===========================================================================
+ * FUNCTION   : toActiveArray
+ *
+ * DESCRIPTION: Map crop rectangle from sensor output space to active array space
+ *
+ * PARAMETERS :
+ *   @crop_left   : x coordinate of top left corner of rectangle
+ *   @crop_top    : y coordinate of top left corner of rectangle
+ *   @crop_width  : width of rectangle
+ *   @crop_height : height of rectangle
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3CropRegionMapper::toActiveArray(int32_t& crop_left, int32_t& crop_top,
+        int32_t& crop_width, int32_t& crop_height)
+{
+    if (mSensorW == 0 || mSensorH == 0 ||
+            mActiveArrayW == 0 || mActiveArrayH == 0) {
+        LOGE("sensor/active array sizes are not initialized!");
+        return;
+    }
+
+    crop_left = crop_left * mActiveArrayW / mSensorW;
+    crop_top = crop_top * mActiveArrayH / mSensorH;
+    crop_width = crop_width * mActiveArrayW / mSensorW;
+    crop_height = crop_height * mActiveArrayH / mSensorH;
+
+    boundToSize(crop_left, crop_top, crop_width, crop_height,
+            mActiveArrayW, mActiveArrayH);
+}
+
+/*===========================================================================
+ * FUNCTION   : toSensor
+ *
+ * DESCRIPTION: Map crop rectangle from active array space to sensor output space
+ *
+ * PARAMETERS :
+ *   @crop_left   : x coordinate of top left corner of rectangle
+ *   @crop_top    : y coordinate of top left corner of rectangle
+ *   @crop_width  : width of rectangle
+ *   @crop_height : height of rectangle
+ *
+ * RETURN     : none
+ *==========================================================================*/
+
+void QCamera3CropRegionMapper::toSensor(int32_t& crop_left, int32_t& crop_top,
+        int32_t& crop_width, int32_t& crop_height)
+{
+    if (mSensorW == 0 || mSensorH == 0 ||
+            mActiveArrayW == 0 || mActiveArrayH == 0) {
+        LOGE("sensor/active array sizes are not initialized!");
+        return;
+    }
+
+    crop_left = crop_left * mSensorW / mActiveArrayW;
+    crop_top = crop_top * mSensorH / mActiveArrayH;
+    crop_width = crop_width * mSensorW / mActiveArrayW;
+    crop_height = crop_height * mSensorH / mActiveArrayH;
+
+    LOGD("before bounding left %d, top %d, width %d, height %d",
+         crop_left, crop_top, crop_width, crop_height);
+    boundToSize(crop_left, crop_top, crop_width, crop_height,
+            mSensorW, mSensorH);
+    LOGD("after bounding left %d, top %d, width %d, height %d",
+         crop_left, crop_top, crop_width, crop_height);
+}
+
+/*===========================================================================
+ * FUNCTION   : boundToSize
+ *
+ * DESCRIPTION: Bound a particular rectangle inside a bounding box
+ *
+ * PARAMETERS :
+ *   @left    : x coordinate of top left corner of rectangle
+ *   @top     : y coordinate of top left corner of rectangle
+ *   @width   : width of rectangle
+ *   @height  : height of rectangle
+ *   @bound_w : width of bounding box
+ *   @bound_y : height of bounding box
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3CropRegionMapper::boundToSize(int32_t& left, int32_t& top,
+            int32_t& width, int32_t& height, int32_t bound_w, int32_t bound_h)
+{
+    if (left < 0) {
+        left = 0;
+    }
+    if (top < 0) {
+        top = 0;
+    }
+
+    if ((left + width) > bound_w) {
+        width = bound_w - left;
+    }
+    if ((top + height) > bound_h) {
+        height = bound_h - top;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : toActiveArray
+ *
+ * DESCRIPTION: Map co-ordinate from sensor output space to active array space
+ *
+ * PARAMETERS :
+ *   @x   : x coordinate
+ *   @y   : y coordinate
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3CropRegionMapper::toActiveArray(uint32_t& x, uint32_t& y)
+{
+    if (mSensorW == 0 || mSensorH == 0 ||
+            mActiveArrayW == 0 || mActiveArrayH == 0) {
+        LOGE("sensor/active array sizes are not initialized!");
+        return;
+    }
+    if ((x > static_cast<uint32_t>(mSensorW)) ||
+            (y > static_cast<uint32_t>(mSensorH))) {
+        LOGE("invalid co-ordinate (%d, %d) in (0, 0, %d, %d) space",
+                 x, y, mSensorW, mSensorH);
+        return;
+    }
+    x = x * mActiveArrayW / mSensorW;
+    y = y * mActiveArrayH / mSensorH;
+}
+
+/*===========================================================================
+ * FUNCTION   : toSensor
+ *
+ * DESCRIPTION: Map co-ordinate from active array space to sensor output space
+ *
+ * PARAMETERS :
+ *   @x   : x coordinate
+ *   @y   : y coordinate
+ *
+ * RETURN     : none
+ *==========================================================================*/
+
+void QCamera3CropRegionMapper::toSensor(uint32_t& x, uint32_t& y)
+{
+    if (mSensorW == 0 || mSensorH == 0 ||
+            mActiveArrayW == 0 || mActiveArrayH == 0) {
+        LOGE("sensor/active array sizes are not initialized!");
+        return;
+    }
+
+    if ((x > static_cast<uint32_t>(mActiveArrayW)) ||
+            (y > static_cast<uint32_t>(mActiveArrayH))) {
+        LOGE("invalid co-ordinate (%d, %d) in (0, 0, %d, %d) space",
+                 x, y, mSensorW, mSensorH);
+        return;
+    }
+    x = x * mSensorW / mActiveArrayW;
+    y = y * mSensorH / mActiveArrayH;
+}
+
+}; //end namespace android
diff --git a/msmcobalt/QCamera2/HAL3/QCamera3CropRegionMapper.h b/msmcobalt/QCamera2/HAL3/QCamera3CropRegionMapper.h
new file mode 100644
index 0000000..31c8578
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL3/QCamera3CropRegionMapper.h
@@ -0,0 +1,65 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#ifndef __QCAMERA3CROPREGIONMAPPER_H__
+#define __QCAMERA3CROPREGIONMAPPER_H__
+
+// System dependencies
+#include <utils/Errors.h>
+
+using namespace android;
+
+namespace qcamera {
+
+class QCamera3CropRegionMapper {
+public:
+    QCamera3CropRegionMapper();
+    virtual ~QCamera3CropRegionMapper();
+
+    void update(uint32_t active_array_w, uint32_t active_array_h,
+            uint32_t sensor_w, uint32_t sensor_h);
+    void toActiveArray(int32_t& crop_left, int32_t& crop_top,
+            int32_t& crop_width, int32_t& crop_height);
+    void toSensor(int32_t& crop_left, int32_t& crop_top,
+            int32_t& crop_width, int32_t& crop_height);
+    void toActiveArray(uint32_t& x, uint32_t& y);
+    void toSensor(uint32_t& x, uint32_t& y);
+
+private:
+    /* sensor output size */
+    int32_t mSensorW, mSensorH;
+    int32_t mActiveArrayW, mActiveArrayH;
+
+    void boundToSize(int32_t& left, int32_t& top, int32_t& width,
+            int32_t& height, int32_t bound_w, int32_t bound_h);
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA3CROPREGIONMAPPER_H__ */
diff --git a/msmcobalt/QCamera2/HAL3/QCamera3HALHeader.h b/msmcobalt/QCamera2/HAL3/QCamera3HALHeader.h
new file mode 100644
index 0000000..062b14f
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL3/QCamera3HALHeader.h
@@ -0,0 +1,96 @@
+/* Copyright (c) 2013-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*	notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*	copyright notice, this list of conditions and the following
+*	disclaimer in the documentation and/or other materials provided
+*	with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*	contributors may be used to endorse or promote products derived
+*	from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+#ifndef __QCAMERA_HALHEADER_H__
+#define __QCAMERA_HALHEADER_H__
+
+// System dependencies
+#include "hardware/gralloc.h"
+
+// Camera dependencies
+#include "cam_types.h"
+
+using namespace android;
+
+namespace qcamera {
+
+#define MAX(a, b) ((a) > (b) ? (a) : (b))
+#define MIN(a, b) ((a) < (b) ? (a) : (b))
+
+#define IS_USAGE_ZSL(usage)  (((usage) & (GRALLOC_USAGE_HW_CAMERA_ZSL)) \
+        == (GRALLOC_USAGE_HW_CAMERA_ZSL))
+
+class QCamera3ProcessingChannel;
+
+    typedef enum {
+        INVALID,
+        VALID,
+    } stream_status_t;
+
+    typedef enum {
+       REPROCESS_TYPE_NONE,
+       REPROCESS_TYPE_JPEG,
+       REPROCESS_TYPE_YUV,
+       REPROCESS_TYPE_PRIVATE,
+       REPROCESS_TYPE_RAW
+    } reprocess_type_t;
+
+    typedef struct {
+        uint32_t out_buf_index;
+        int32_t jpeg_orientation;
+        uint8_t jpeg_quality;
+        uint8_t jpeg_thumb_quality;
+        cam_dimension_t thumbnail_size;
+        uint8_t gps_timestamp_valid;
+        int64_t gps_timestamp;
+        uint8_t gps_coordinates_valid;
+        double gps_coordinates[3];
+        char gps_processing_method[GPS_PROCESSING_METHOD_SIZE];
+        uint8_t image_desc_valid;
+        char image_desc[EXIF_IMAGE_DESCRIPTION_SIZE];
+    } jpeg_settings_t;
+
+    typedef struct {
+        int32_t iso_speed;
+        int64_t exposure_time;
+    } metadata_response_t;
+
+    typedef struct {
+        cam_stream_type_t stream_type;
+        cam_format_t stream_format;
+        cam_dimension_t input_stream_dim;
+        cam_stream_buf_plane_info_t input_stream_plane_info;
+        cam_dimension_t output_stream_dim;
+        cam_padding_info_t *padding;
+        reprocess_type_t reprocess_type;
+        QCamera3ProcessingChannel *src_channel;
+    } reprocess_config_t;
+
+};//namespace qcamera
+
+#endif
diff --git a/msmcobalt/QCamera2/HAL3/QCamera3HWI.cpp b/msmcobalt/QCamera2/HAL3/QCamera3HWI.cpp
new file mode 100644
index 0000000..9ce8d53
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL3/QCamera3HWI.cpp
@@ -0,0 +1,10641 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCamera3HWI"
+//#define LOG_NDEBUG 0
+
+#define __STDC_LIMIT_MACROS
+
+// To remove
+#include <cutils/properties.h>
+
+// System dependencies
+#include <dlfcn.h>
+#include <fcntl.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include "utils/Timers.h"
+#include "sys/ioctl.h"
+#include <sync/sync.h>
+#include "gralloc_priv.h"
+
+// Display dependencies
+#include "qdMetaData.h"
+
+// Camera dependencies
+#include "android/QCamera3External.h"
+#include "util/QCameraFlash.h"
+#include "QCamera3HWI.h"
+#include "QCamera3VendorTags.h"
+#include "QCameraTrace.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+using namespace android;
+
+namespace qcamera {
+
+#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
+
+#define EMPTY_PIPELINE_DELAY 2
+#define PARTIAL_RESULT_COUNT 2
+#define FRAME_SKIP_DELAY     0
+
+#define MAX_VALUE_8BIT ((1<<8)-1)
+#define MAX_VALUE_10BIT ((1<<10)-1)
+#define MAX_VALUE_12BIT ((1<<12)-1)
+
+#define VIDEO_4K_WIDTH  3840
+#define VIDEO_4K_HEIGHT 2160
+
+#define MAX_EIS_WIDTH 1920
+#define MAX_EIS_HEIGHT 1080
+
+#define MAX_RAW_STREAMS        1
+#define MAX_STALLING_STREAMS   1
+#define MAX_PROCESSED_STREAMS  3
+/* Batch mode is enabled only if FPS set is equal to or greater than this */
+#define MIN_FPS_FOR_BATCH_MODE (120)
+#define PREVIEW_FPS_FOR_HFR    (30)
+#define DEFAULT_VIDEO_FPS      (30.0)
+#define MAX_HFR_BATCH_SIZE     (8)
+#define REGIONS_TUPLE_COUNT    5
+#define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
+#define BURST_REPROCESS_PERF_TIME_OUT  (1000) // milliseconds
+// Set a threshold for detection of missing buffers //seconds
+#define MISSING_REQUEST_BUF_TIMEOUT 3
+#define FLUSH_TIMEOUT 3
+#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
+
+#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
+                                              CAM_QCOM_FEATURE_CROP |\
+                                              CAM_QCOM_FEATURE_ROTATION |\
+                                              CAM_QCOM_FEATURE_SHARPNESS |\
+                                              CAM_QCOM_FEATURE_SCALE |\
+                                              CAM_QCOM_FEATURE_CAC |\
+                                              CAM_QCOM_FEATURE_CDS )
+
+#define TIMEOUT_NEVER -1
+
+cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
+const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
+extern pthread_mutex_t gCamLock;
+volatile uint32_t gCamHal3LogLevel = 1;
+extern uint8_t gNumCameraSessions;
+
+const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
+    {"On",  CAM_CDS_MODE_ON},
+    {"Off", CAM_CDS_MODE_OFF},
+    {"Auto",CAM_CDS_MODE_AUTO}
+};
+
+const QCamera3HardwareInterface::QCameraMap<
+        camera_metadata_enum_android_control_effect_mode_t,
+        cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
+    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
+    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
+    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
+    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
+    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
+    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
+    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
+    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
+    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
+};
+
+const QCamera3HardwareInterface::QCameraMap<
+        camera_metadata_enum_android_control_awb_mode_t,
+        cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
+    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
+    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
+    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
+    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
+    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
+    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
+    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
+    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
+    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
+};
+
+const QCamera3HardwareInterface::QCameraMap<
+        camera_metadata_enum_android_control_scene_mode_t,
+        cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
+    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
+    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
+    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
+    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
+    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
+    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
+    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
+    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
+    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
+    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
+    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
+    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
+    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
+    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
+    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
+    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
+};
+
+const QCamera3HardwareInterface::QCameraMap<
+        camera_metadata_enum_android_control_af_mode_t,
+        cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
+    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
+    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
+    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
+    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
+    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
+    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
+    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
+};
+
+const QCamera3HardwareInterface::QCameraMap<
+        camera_metadata_enum_android_color_correction_aberration_mode_t,
+        cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
+    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
+            CAM_COLOR_CORRECTION_ABERRATION_OFF },
+    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
+            CAM_COLOR_CORRECTION_ABERRATION_FAST },
+    { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
+            CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
+};
+
+const QCamera3HardwareInterface::QCameraMap<
+        camera_metadata_enum_android_control_ae_antibanding_mode_t,
+        cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
+    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
+    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
+    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
+    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
+};
+
+const QCamera3HardwareInterface::QCameraMap<
+        camera_metadata_enum_android_control_ae_mode_t,
+        cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
+    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
+    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
+    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
+    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
+    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
+};
+
+const QCamera3HardwareInterface::QCameraMap<
+        camera_metadata_enum_android_flash_mode_t,
+        cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
+    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
+    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
+    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
+};
+
+const QCamera3HardwareInterface::QCameraMap<
+        camera_metadata_enum_android_statistics_face_detect_mode_t,
+        cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
+    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
+    { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
+    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
+};
+
+const QCamera3HardwareInterface::QCameraMap<
+        camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
+        cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
+    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
+      CAM_FOCUS_UNCALIBRATED },
+    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
+      CAM_FOCUS_APPROXIMATE },
+    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
+      CAM_FOCUS_CALIBRATED }
+};
+
+const QCamera3HardwareInterface::QCameraMap<
+        camera_metadata_enum_android_lens_state_t,
+        cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
+    { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
+    { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
+};
+
+const int32_t available_thumbnail_sizes[] = {0, 0,
+                                             176, 144,
+                                             240, 144,
+                                             256, 144,
+                                             240, 160,
+                                             256, 154,
+                                             240, 240,
+                                             320, 240};
+
+const QCamera3HardwareInterface::QCameraMap<
+        camera_metadata_enum_android_sensor_test_pattern_mode_t,
+        cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
+    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
+    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
+    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
+    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
+    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
+    { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1,      CAM_TEST_PATTERN_CUSTOM1},
+};
+
+/* Since there is no mapping for all the options some Android enum are not listed.
+ * Also, the order in this list is important because while mapping from HAL to Android it will
+ * traverse from lower to higher index which means that for HAL values that are map to different
+ * Android values, the traverse logic will select the first one found.
+ */
+const QCamera3HardwareInterface::QCameraMap<
+        camera_metadata_enum_android_sensor_reference_illuminant1_t,
+        cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
+};
+
+const QCamera3HardwareInterface::QCameraMap<
+        int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
+    { 60, CAM_HFR_MODE_60FPS},
+    { 90, CAM_HFR_MODE_90FPS},
+    { 120, CAM_HFR_MODE_120FPS},
+    { 150, CAM_HFR_MODE_150FPS},
+    { 180, CAM_HFR_MODE_180FPS},
+    { 210, CAM_HFR_MODE_210FPS},
+    { 240, CAM_HFR_MODE_240FPS},
+    { 480, CAM_HFR_MODE_480FPS},
+};
+
+camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
+    .initialize                         = QCamera3HardwareInterface::initialize,
+    .configure_streams                  = QCamera3HardwareInterface::configure_streams,
+    .register_stream_buffers            = NULL,
+    .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
+    .process_capture_request            = QCamera3HardwareInterface::process_capture_request,
+    .get_metadata_vendor_tag_ops        = NULL,
+    .dump                               = QCamera3HardwareInterface::dump,
+    .flush                              = QCamera3HardwareInterface::flush,
+    .reserved                           = {0},
+};
+
+// initialise to some default value
+uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
+
+/*===========================================================================
+ * FUNCTION   : QCamera3HardwareInterface
+ *
+ * DESCRIPTION: constructor of QCamera3HardwareInterface
+ *
+ * PARAMETERS :
+ *   @cameraId  : camera ID
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
+        const camera_module_callbacks_t *callbacks)
+    : mCameraId(cameraId),
+      mCameraHandle(NULL),
+      mCameraInitialized(false),
+      mCallbackOps(NULL),
+      mMetadataChannel(NULL),
+      mPictureChannel(NULL),
+      mRawChannel(NULL),
+      mSupportChannel(NULL),
+      mAnalysisChannel(NULL),
+      mRawDumpChannel(NULL),
+      mDummyBatchChannel(NULL),
+      m_perfLock(),
+      mCommon(),
+      mChannelHandle(0),
+      mFirstConfiguration(true),
+      mFlush(false),
+      mFlushPerf(false),
+      mParamHeap(NULL),
+      mParameters(NULL),
+      mPrevParameters(NULL),
+      m_bIsVideo(false),
+      m_bIs4KVideo(false),
+      m_bEisSupportedSize(false),
+      m_bEisEnable(false),
+      m_MobicatMask(0),
+      mMinProcessedFrameDuration(0),
+      mMinJpegFrameDuration(0),
+      mMinRawFrameDuration(0),
+      mMetaFrameCount(0U),
+      mUpdateDebugLevel(false),
+      mCallbacks(callbacks),
+      mCaptureIntent(0),
+      mCacMode(0),
+      mBatchSize(0),
+      mToBeQueuedVidBufs(0),
+      mHFRVideoFps(DEFAULT_VIDEO_FPS),
+      mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
+      mFirstFrameNumberInBatch(0),
+      mNeedSensorRestart(false),
+      mLdafCalibExist(false),
+      mPowerHintEnabled(false),
+      mLastCustIntentFrmNum(-1),
+      mState(CLOSED),
+      mIsDeviceLinked(false),
+      mIsMainCamera(true),
+      mLinkedCameraId(0),
+      m_pRelCamSyncHeap(NULL),
+      m_pRelCamSyncBuf(NULL)
+{
+    getLogLevel();
+    m_perfLock.lock_init();
+    mCommon.init(gCamCapability[cameraId]);
+    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
+    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
+    mCameraDevice.common.close = close_camera_device;
+    mCameraDevice.ops = &mCameraOps;
+    mCameraDevice.priv = this;
+    gCamCapability[cameraId]->version = CAM_HAL_V3;
+    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
+    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
+    gCamCapability[cameraId]->min_num_pp_bufs = 3;
+
+    pthread_cond_init(&mBuffersCond, NULL);
+
+    pthread_cond_init(&mRequestCond, NULL);
+    mPendingLiveRequest = 0;
+    mCurrentRequestId = -1;
+    pthread_mutex_init(&mMutex, NULL);
+
+    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
+        mDefaultMetadata[i] = NULL;
+
+    // Getting system props of different kinds
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.raw.dump", prop, "0");
+    mEnableRawDump = atoi(prop);
+    if (mEnableRawDump)
+        LOGD("Raw dump from Camera HAL enabled");
+
+    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
+    memset(mLdafCalib, 0, sizeof(mLdafCalib));
+
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.tnr.preview", prop, "0");
+    m_bTnrPreview = (uint8_t)atoi(prop);
+
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.tnr.video", prop, "0");
+    m_bTnrVideo = (uint8_t)atoi(prop);
+
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.avtimer.debug", prop, "0");
+    m_debug_avtimer = (uint8_t)atoi(prop);
+
+    //Load and read GPU library.
+    lib_surface_utils = NULL;
+    LINK_get_surface_pixel_alignment = NULL;
+    mSurfaceStridePadding = CAM_PAD_TO_32;
+    lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
+    if (lib_surface_utils) {
+        *(void **)&LINK_get_surface_pixel_alignment =
+                dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
+         if (LINK_get_surface_pixel_alignment) {
+             mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
+         }
+         dlclose(lib_surface_utils);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera3HardwareInterface
+ *
+ * DESCRIPTION: destructor of QCamera3HardwareInterface
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3HardwareInterface::~QCamera3HardwareInterface()
+{
+    LOGD("E");
+
+    /* Turn off current power hint before acquiring perfLock in case they
+     * conflict with each other */
+    disablePowerHint();
+
+    m_perfLock.lock_acq();
+
+    /* We need to stop all streams before deleting any stream */
+    if (mRawDumpChannel) {
+        mRawDumpChannel->stop();
+    }
+
+    // NOTE: 'camera3_stream_t *' objects are already freed at
+    //        this stage by the framework
+    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
+        it != mStreamInfo.end(); it++) {
+        QCamera3ProcessingChannel *channel = (*it)->channel;
+        if (channel) {
+            channel->stop();
+        }
+    }
+    if (mSupportChannel)
+        mSupportChannel->stop();
+
+    if (mAnalysisChannel) {
+        mAnalysisChannel->stop();
+    }
+    if (mMetadataChannel) {
+        mMetadataChannel->stop();
+    }
+    if (mChannelHandle) {
+        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
+                mChannelHandle);
+        LOGD("stopping channel %d", mChannelHandle);
+    }
+
+    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
+        it != mStreamInfo.end(); it++) {
+        QCamera3ProcessingChannel *channel = (*it)->channel;
+        if (channel)
+            delete channel;
+        free (*it);
+    }
+    if (mSupportChannel) {
+        delete mSupportChannel;
+        mSupportChannel = NULL;
+    }
+
+    if (mAnalysisChannel) {
+        delete mAnalysisChannel;
+        mAnalysisChannel = NULL;
+    }
+    if (mRawDumpChannel) {
+        delete mRawDumpChannel;
+        mRawDumpChannel = NULL;
+    }
+    if (mDummyBatchChannel) {
+        delete mDummyBatchChannel;
+        mDummyBatchChannel = NULL;
+    }
+
+    mPictureChannel = NULL;
+
+    if (mMetadataChannel) {
+        delete mMetadataChannel;
+        mMetadataChannel = NULL;
+    }
+
+    /* Clean up all channels */
+    if (mCameraInitialized) {
+        if(!mFirstConfiguration){
+            //send the last unconfigure
+            cam_stream_size_info_t stream_config_info;
+            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
+            stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
+            stream_config_info.buffer_info.max_buffers =
+                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
+            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
+                    stream_config_info);
+            int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
+            if (rc < 0) {
+                LOGE("set_parms failed for unconfigure");
+            }
+        }
+        deinitParameters();
+    }
+
+    if (mChannelHandle) {
+        mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
+                mChannelHandle);
+        LOGH("deleting channel %d", mChannelHandle);
+        mChannelHandle = 0;
+    }
+
+    if (mState != CLOSED)
+        closeCamera();
+
+    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
+        req.mPendingBufferList.clear();
+    }
+    mPendingBuffersMap.mPendingBuffersInRequest.clear();
+    mPendingReprocessResultList.clear();
+    for (pendingRequestIterator i = mPendingRequestsList.begin();
+            i != mPendingRequestsList.end();) {
+        i = erasePendingRequest(i);
+    }
+    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
+        if (mDefaultMetadata[i])
+            free_camera_metadata(mDefaultMetadata[i]);
+
+    m_perfLock.lock_rel();
+    m_perfLock.lock_deinit();
+
+    pthread_cond_destroy(&mRequestCond);
+
+    pthread_cond_destroy(&mBuffersCond);
+
+    pthread_mutex_destroy(&mMutex);
+    LOGD("X");
+}
+
+/*===========================================================================
+ * FUNCTION   : erasePendingRequest
+ *
+ * DESCRIPTION: function to erase a desired pending request after freeing any
+ *              allocated memory
+ *
+ * PARAMETERS :
+ *   @i       : iterator pointing to pending request to be erased
+ *
+ * RETURN     : iterator pointing to the next request
+ *==========================================================================*/
+QCamera3HardwareInterface::pendingRequestIterator
+        QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
+{
+    if (i->input_buffer != NULL) {
+        free(i->input_buffer);
+        i->input_buffer = NULL;
+    }
+    if (i->settings != NULL)
+        free_camera_metadata((camera_metadata_t*)i->settings);
+    return mPendingRequestsList.erase(i);
+}
+
+/*===========================================================================
+ * FUNCTION   : camEvtHandle
+ *
+ * DESCRIPTION: Function registered to mm-camera-interface to handle events
+ *
+ * PARAMETERS :
+ *   @camera_handle : interface layer camera handle
+ *   @evt           : ptr to event
+ *   @user_data     : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
+                                          mm_camera_event_t *evt,
+                                          void *user_data)
+{
+    QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
+    if (obj && evt) {
+        switch(evt->server_event_type) {
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                pthread_mutex_lock(&obj->mMutex);
+                obj->mState = ERROR;
+                pthread_mutex_unlock(&obj->mMutex);
+                LOGE("Fatal, camera daemon died");
+                break;
+
+            case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
+                LOGD("HAL got request pull from Daemon");
+                pthread_mutex_lock(&obj->mMutex);
+                obj->mWokenUpByDaemon = true;
+                obj->unblockRequestIfNecessary();
+                pthread_mutex_unlock(&obj->mMutex);
+                break;
+
+            default:
+                LOGW("Warning: Unhandled event %d",
+                        evt->server_event_type);
+                break;
+        }
+    } else {
+        LOGE("NULL user_data/evt");
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : openCamera
+ *
+ * DESCRIPTION: open camera
+ *
+ * PARAMETERS :
+ *   @hw_device  : double ptr for camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
+{
+    int rc = 0;
+    if (mState != CLOSED) {
+        *hw_device = NULL;
+        return PERMISSION_DENIED;
+    }
+
+    m_perfLock.lock_acq();
+    LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
+             mCameraId);
+
+    rc = openCamera();
+    if (rc == 0) {
+        *hw_device = &mCameraDevice.common;
+    } else
+        *hw_device = NULL;
+
+    m_perfLock.lock_rel();
+    LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
+             mCameraId, rc);
+
+    if (rc == NO_ERROR) {
+        mState = OPENED;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : openCamera
+ *
+ * DESCRIPTION: open camera
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3HardwareInterface::openCamera()
+{
+    int rc = 0;
+    char value[PROPERTY_VALUE_MAX];
+
+    KPI_ATRACE_CALL();
+    if (mCameraHandle) {
+        LOGE("Failure: Camera already opened");
+        return ALREADY_EXISTS;
+    }
+
+    rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
+    if (rc < 0) {
+        LOGE("Failed to reserve flash for camera id: %d",
+                mCameraId);
+        return UNKNOWN_ERROR;
+    }
+
+    rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
+    if (rc) {
+        LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
+        return rc;
+    }
+
+    if (!mCameraHandle) {
+        LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
+        return -ENODEV;
+    }
+
+    rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
+            camEvtHandle, (void *)this);
+
+    if (rc < 0) {
+        LOGE("Error, failed to register event callback");
+        /* Not closing camera here since it is already handled in destructor */
+        return FAILED_TRANSACTION;
+    }
+
+    mExifParams.debug_params =
+            (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
+    if (mExifParams.debug_params) {
+        memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
+    } else {
+        LOGE("Out of Memory. Allocation failed for 3A debug exif params");
+        return NO_MEMORY;
+    }
+    mFirstConfiguration = true;
+
+    //Notify display HAL that a camera session is active.
+    //But avoid calling the same during bootup because camera service might open/close
+    //cameras at boot time during its initialization and display service will also internally
+    //wait for camera service to initialize first while calling this display API, resulting in a
+    //deadlock situation. Since boot time camera open/close calls are made only to fetch
+    //capabilities, no need of this display bw optimization.
+    //Use "service.bootanim.exit" property to know boot status.
+    property_get("service.bootanim.exit", value, "0");
+    if (atoi(value) == 1) {
+        pthread_mutex_lock(&gCamLock);
+        if (gNumCameraSessions++ == 0) {
+            setCameraLaunchStatus(true);
+        }
+        pthread_mutex_unlock(&gCamLock);
+    }
+
+    //fill the session id needed while linking dual cam
+    pthread_mutex_lock(&gCamLock);
+    rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
+        &sessionId[mCameraId]);
+    pthread_mutex_unlock(&gCamLock);
+
+    if (rc < 0) {
+        LOGE("Error, failed to get sessiion id");
+        return UNKNOWN_ERROR;
+    } else {
+        //Allocate related cam sync buffer
+        //this is needed for the payload that goes along with bundling cmd for related
+        //camera use cases
+        m_pRelCamSyncHeap = new QCamera3HeapMemory(1);
+        rc = m_pRelCamSyncHeap->allocate(sizeof(cam_sync_related_sensors_event_info_t));
+        if(rc != OK) {
+            rc = NO_MEMORY;
+            LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
+            return NO_MEMORY;
+        }
+
+        //Map memory for related cam sync buffer
+        rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
+                CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF,
+                m_pRelCamSyncHeap->getFd(0),
+                sizeof(cam_sync_related_sensors_event_info_t),
+                m_pRelCamSyncHeap->getPtr(0));
+        if(rc < 0) {
+            LOGE("Dualcam: failed to map Related cam sync buffer");
+            rc = FAILED_TRANSACTION;
+            return NO_MEMORY;
+        }
+        m_pRelCamSyncBuf =
+                (cam_sync_related_sensors_event_info_t*) DATA_PTR(m_pRelCamSyncHeap,0);
+    }
+
+    LOGH("mCameraId=%d",mCameraId);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : closeCamera
+ *
+ * DESCRIPTION: close camera
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3HardwareInterface::closeCamera()
+{
+    KPI_ATRACE_CALL();
+    int rc = NO_ERROR;
+    char value[PROPERTY_VALUE_MAX];
+
+    LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
+             mCameraId);
+    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
+    mCameraHandle = NULL;
+
+    //reset session id to some invalid id
+    pthread_mutex_lock(&gCamLock);
+    sessionId[mCameraId] = 0xDEADBEEF;
+    pthread_mutex_unlock(&gCamLock);
+
+    //Notify display HAL that there is no active camera session
+    //but avoid calling the same during bootup. Refer to openCamera
+    //for more details.
+    property_get("service.bootanim.exit", value, "0");
+    if (atoi(value) == 1) {
+        pthread_mutex_lock(&gCamLock);
+        if (--gNumCameraSessions == 0) {
+            setCameraLaunchStatus(false);
+        }
+        pthread_mutex_unlock(&gCamLock);
+    }
+
+    if (NULL != m_pRelCamSyncHeap) {
+        m_pRelCamSyncHeap->deallocate();
+        delete m_pRelCamSyncHeap;
+        m_pRelCamSyncHeap = NULL;
+        m_pRelCamSyncBuf = NULL;
+    }
+
+    if (mExifParams.debug_params) {
+        free(mExifParams.debug_params);
+        mExifParams.debug_params = NULL;
+    }
+    if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
+        LOGW("Failed to release flash for camera id: %d",
+                mCameraId);
+    }
+    mState = CLOSED;
+    LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
+         mCameraId, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : initialize
+ *
+ * DESCRIPTION: Initialize frameworks callback functions
+ *
+ * PARAMETERS :
+ *   @callback_ops : callback function to frameworks
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+int QCamera3HardwareInterface::initialize(
+        const struct camera3_callback_ops *callback_ops)
+{
+    ATRACE_CALL();
+    int rc;
+
+    LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
+    pthread_mutex_lock(&mMutex);
+
+    // Validate current state
+    switch (mState) {
+        case OPENED:
+            /* valid state */
+            break;
+        default:
+            LOGE("Invalid state %d", mState);
+            rc = -ENODEV;
+            goto err1;
+    }
+
+    rc = initParameters();
+    if (rc < 0) {
+        LOGE("initParamters failed %d", rc);
+        goto err1;
+    }
+    mCallbackOps = callback_ops;
+
+    mChannelHandle = mCameraHandle->ops->add_channel(
+            mCameraHandle->camera_handle, NULL, NULL, this);
+    if (mChannelHandle == 0) {
+        LOGE("add_channel failed");
+        rc = -ENOMEM;
+        pthread_mutex_unlock(&mMutex);
+        return rc;
+    }
+
+    pthread_mutex_unlock(&mMutex);
+    mCameraInitialized = true;
+    mState = INITIALIZED;
+    LOGI("X");
+    return 0;
+
+err1:
+    pthread_mutex_unlock(&mMutex);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : validateStreamDimensions
+ *
+ * DESCRIPTION: Check if the configuration requested are those advertised
+ *
+ * PARAMETERS :
+ *   @stream_list : streams to be configured
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+int QCamera3HardwareInterface::validateStreamDimensions(
+        camera3_stream_configuration_t *streamList)
+{
+    int rc = NO_ERROR;
+    size_t count = 0;
+
+    camera3_stream_t *inputStream = NULL;
+    /*
+    * Loop through all streams to find input stream if it exists*
+    */
+    for (size_t i = 0; i< streamList->num_streams; i++) {
+        if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
+            if (inputStream != NULL) {
+                LOGE("Error, Multiple input streams requested");
+                return -EINVAL;
+            }
+            inputStream = streamList->streams[i];
+        }
+    }
+    /*
+    * Loop through all streams requested in configuration
+    * Check if unsupported sizes have been requested on any of them
+    */
+    for (size_t j = 0; j < streamList->num_streams; j++) {
+        bool sizeFound = false;
+        camera3_stream_t *newStream = streamList->streams[j];
+
+        uint32_t rotatedHeight = newStream->height;
+        uint32_t rotatedWidth = newStream->width;
+        if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
+                (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
+            rotatedHeight = newStream->width;
+            rotatedWidth = newStream->height;
+        }
+
+        /*
+        * Sizes are different for each type of stream format check against
+        * appropriate table.
+        */
+        switch (newStream->format) {
+        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
+        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
+        case HAL_PIXEL_FORMAT_RAW10:
+            count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
+            for (size_t i = 0; i < count; i++) {
+                if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
+                        (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
+                    sizeFound = true;
+                    break;
+                }
+            }
+            break;
+        case HAL_PIXEL_FORMAT_BLOB:
+            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
+            /* Verify set size against generated sizes table */
+            for (size_t i = 0; i < count; i++) {
+                if (((int32_t)rotatedWidth ==
+                        gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
+                        ((int32_t)rotatedHeight ==
+                        gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
+                    sizeFound = true;
+                    break;
+                }
+            }
+            break;
+        case HAL_PIXEL_FORMAT_YCbCr_420_888:
+        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+        default:
+            if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
+                    || newStream->stream_type == CAMERA3_STREAM_INPUT
+                    || IS_USAGE_ZSL(newStream->usage)) {
+                if (((int32_t)rotatedWidth ==
+                                gCamCapability[mCameraId]->active_array_size.width) &&
+                                ((int32_t)rotatedHeight ==
+                                gCamCapability[mCameraId]->active_array_size.height)) {
+                    sizeFound = true;
+                    break;
+                }
+                /* We could potentially break here to enforce ZSL stream
+                 * set from frameworks always is full active array size
+                 * but it is not clear from the spc if framework will always
+                 * follow that, also we have logic to override to full array
+                 * size, so keeping the logic lenient at the moment
+                 */
+            }
+            count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
+                    MAX_SIZES_CNT);
+            for (size_t i = 0; i < count; i++) {
+                if (((int32_t)rotatedWidth ==
+                            gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
+                            ((int32_t)rotatedHeight ==
+                            gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
+                    sizeFound = true;
+                    break;
+                }
+            }
+            break;
+        } /* End of switch(newStream->format) */
+
+        /* We error out even if a single stream has unsupported size set */
+        if (!sizeFound) {
+            LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
+                    rotatedWidth, rotatedHeight, newStream->format,
+                    gCamCapability[mCameraId]->active_array_size.width,
+                    gCamCapability[mCameraId]->active_array_size.height);
+            rc = -EINVAL;
+            break;
+        }
+    } /* End of for each stream */
+    return rc;
+}
+
+/*==============================================================================
+ * FUNCTION   : isSupportChannelNeeded
+ *
+ * DESCRIPTION: Simple heuristic func to determine if support channels is needed
+ *
+ * PARAMETERS :
+ *   @stream_list : streams to be configured
+ *   @stream_config_info : the config info for streams to be configured
+ *
+ * RETURN     : Boolen true/false decision
+ *
+ *==========================================================================*/
+bool QCamera3HardwareInterface::isSupportChannelNeeded(
+        camera3_stream_configuration_t *streamList,
+        cam_stream_size_info_t stream_config_info)
+{
+    uint32_t i;
+    bool pprocRequested = false;
+    /* Check for conditions where PProc pipeline does not have any streams*/
+    for (i = 0; i < stream_config_info.num_streams; i++) {
+        if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
+                stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
+            pprocRequested = true;
+            break;
+        }
+    }
+
+    if (pprocRequested == false )
+        return true;
+
+    /* Dummy stream needed if only raw or jpeg streams present */
+    for (i = 0; i < streamList->num_streams; i++) {
+        switch(streamList->streams[i]->format) {
+            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
+            case HAL_PIXEL_FORMAT_RAW10:
+            case HAL_PIXEL_FORMAT_RAW16:
+            case HAL_PIXEL_FORMAT_BLOB:
+                break;
+            default:
+                return false;
+        }
+    }
+    return true;
+}
+
+/*==============================================================================
+ * FUNCTION   : getSensorOutputSize
+ *
+ * DESCRIPTION: Get sensor output size based on current stream configuratoin
+ *
+ * PARAMETERS :
+ *   @sensor_dim : sensor output dimension (output)
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ *==========================================================================*/
+int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
+{
+    int32_t rc = NO_ERROR;
+
+    cam_dimension_t max_dim = {0, 0};
+    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
+        if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
+            max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
+        if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
+            max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
+    }
+
+    clear_metadata_buffer(mParameters);
+
+    rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
+            max_dim);
+    if (rc != NO_ERROR) {
+        LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
+        return rc;
+    }
+
+    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
+    if (rc != NO_ERROR) {
+        LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
+        return rc;
+    }
+
+    clear_metadata_buffer(mParameters);
+    ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
+
+    rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
+            mParameters);
+    if (rc != NO_ERROR) {
+        LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
+        return rc;
+    }
+
+    READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
+    LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
+
+    return rc;
+}
+
+/*==============================================================================
+ * FUNCTION   : enablePowerHint
+ *
+ * DESCRIPTION: enable single powerhint for preview and different video modes.
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : NULL
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::enablePowerHint()
+{
+    if (!mPowerHintEnabled) {
+        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, true);
+        mPowerHintEnabled = true;
+    }
+}
+
+/*==============================================================================
+ * FUNCTION   : disablePowerHint
+ *
+ * DESCRIPTION: disable current powerhint.
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : NULL
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::disablePowerHint()
+{
+    if (mPowerHintEnabled) {
+        m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
+        mPowerHintEnabled = false;
+    }
+}
+
+/*==============================================================================
+ * FUNCTION   : addToPPFeatureMask
+ *
+ * DESCRIPTION: add additional features to pp feature mask based on
+ *              stream type and usecase
+ *
+ * PARAMETERS :
+ *   @stream_format : stream type for feature mask
+ *   @stream_idx : stream idx within postprocess_mask list to change
+ *
+ * RETURN     : NULL
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
+        uint32_t stream_idx)
+{
+    char feature_mask_value[PROPERTY_VALUE_MAX];
+    cam_feature_mask_t feature_mask;
+    int args_converted;
+    int property_len;
+
+    /* Get feature mask from property */
+    property_len = property_get("persist.camera.hal3.feature",
+            feature_mask_value, "0");
+    if ((property_len > 2) && (feature_mask_value[0] == '0') &&
+            (feature_mask_value[1] == 'x')) {
+        args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
+    } else {
+        args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
+    }
+    if (1 != args_converted) {
+        feature_mask = 0;
+        LOGE("Wrong feature mask %s", feature_mask_value);
+        return;
+    }
+
+    switch (stream_format) {
+    case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
+        /* Add LLVD to pp feature mask only if video hint is enabled */
+        if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
+            mStreamConfigInfo.postprocess_mask[stream_idx]
+                    |= CAM_QTI_FEATURE_SW_TNR;
+            LOGH("Added SW TNR to pp feature mask");
+        } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
+            mStreamConfigInfo.postprocess_mask[stream_idx]
+                    |= CAM_QCOM_FEATURE_LLVD;
+            LOGH("Added LLVD SeeMore to pp feature mask");
+        }
+        break;
+    }
+    default:
+        break;
+    }
+    LOGD("PP feature mask %llx",
+            mStreamConfigInfo.postprocess_mask[stream_idx]);
+}
+
+/*==============================================================================
+ * FUNCTION   : updateFpsInPreviewBuffer
+ *
+ * DESCRIPTION: update FPS information in preview buffer.
+ *
+ * PARAMETERS :
+ *   @metadata    : pointer to metadata buffer
+ *   @frame_number: frame_number to look for in pending buffer list
+ *
+ * RETURN     : None
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
+        uint32_t frame_number)
+{
+    // Mark all pending buffers for this particular request
+    // with corresponding framerate information
+    for (List<PendingBuffersInRequest>::iterator req =
+            mPendingBuffersMap.mPendingBuffersInRequest.begin();
+            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
+        for(List<PendingBufferInfo>::iterator j =
+                req->mPendingBufferList.begin();
+                j != req->mPendingBufferList.end(); j++) {
+            QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
+            if ((req->frame_number == frame_number) &&
+                (channel->getStreamTypeMask() &
+                (1U << CAM_STREAM_TYPE_PREVIEW))) {
+                IF_META_AVAILABLE(cam_fps_range_t, float_range,
+                    CAM_INTF_PARM_FPS_RANGE, metadata) {
+                    typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
+                    struct private_handle_t *priv_handle =
+                        (struct private_handle_t *)(*(j->buffer));
+                    setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
+                }
+            }
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : configureStreams
+ *
+ * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
+ *              and output streams.
+ *
+ * PARAMETERS :
+ *   @stream_list : streams to be configured
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+int QCamera3HardwareInterface::configureStreams(
+        camera3_stream_configuration_t *streamList)
+{
+    ATRACE_CALL();
+    int rc = 0;
+
+    // Acquire perfLock before configure streams
+    m_perfLock.lock_acq();
+    rc = configureStreamsPerfLocked(streamList);
+    m_perfLock.lock_rel();
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : configureStreamsPerfLocked
+ *
+ * DESCRIPTION: configureStreams while perfLock is held.
+ *
+ * PARAMETERS :
+ *   @stream_list : streams to be configured
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3HardwareInterface::configureStreamsPerfLocked(
+        camera3_stream_configuration_t *streamList)
+{
+    ATRACE_CALL();
+    int rc = 0;
+
+    // Sanity check stream_list
+    if (streamList == NULL) {
+        LOGE("NULL stream configuration");
+        return BAD_VALUE;
+    }
+    if (streamList->streams == NULL) {
+        LOGE("NULL stream list");
+        return BAD_VALUE;
+    }
+
+    if (streamList->num_streams < 1) {
+        LOGE("Bad number of streams requested: %d",
+                streamList->num_streams);
+        return BAD_VALUE;
+    }
+
+    if (streamList->num_streams >= MAX_NUM_STREAMS) {
+        LOGE("Maximum number of streams %d exceeded: %d",
+                MAX_NUM_STREAMS, streamList->num_streams);
+        return BAD_VALUE;
+    }
+
+    mOpMode = streamList->operation_mode;
+    LOGD("mOpMode: %d", mOpMode);
+
+    /* first invalidate all the steams in the mStreamList
+     * if they appear again, they will be validated */
+    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
+            it != mStreamInfo.end(); it++) {
+        QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
+        if (channel) {
+          channel->stop();
+        }
+        (*it)->status = INVALID;
+    }
+
+    if (mRawDumpChannel) {
+        mRawDumpChannel->stop();
+        delete mRawDumpChannel;
+        mRawDumpChannel = NULL;
+    }
+
+    if (mSupportChannel)
+        mSupportChannel->stop();
+
+    if (mAnalysisChannel) {
+        mAnalysisChannel->stop();
+    }
+    if (mMetadataChannel) {
+        /* If content of mStreamInfo is not 0, there is metadata stream */
+        mMetadataChannel->stop();
+    }
+    if (mChannelHandle) {
+        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
+                mChannelHandle);
+        LOGD("stopping channel %d", mChannelHandle);
+    }
+
+    pthread_mutex_lock(&mMutex);
+
+    // Check state
+    switch (mState) {
+        case INITIALIZED:
+        case CONFIGURED:
+        case STARTED:
+            /* valid state */
+            break;
+        default:
+            LOGE("Invalid state %d", mState);
+            pthread_mutex_unlock(&mMutex);
+            return -ENODEV;
+    }
+
+    /* Check whether we have video stream */
+    m_bIs4KVideo = false;
+    m_bIsVideo = false;
+    m_bEisSupportedSize = false;
+    m_bTnrEnabled = false;
+    bool isZsl = false;
+    uint32_t videoWidth = 0U;
+    uint32_t videoHeight = 0U;
+    size_t rawStreamCnt = 0;
+    size_t stallStreamCnt = 0;
+    size_t processedStreamCnt = 0;
+    // Number of streams on ISP encoder path
+    size_t numStreamsOnEncoder = 0;
+    size_t numYuv888OnEncoder = 0;
+    bool bYuv888OverrideJpeg = false;
+    cam_dimension_t largeYuv888Size = {0, 0};
+    cam_dimension_t maxViewfinderSize = {0, 0};
+    bool bJpegExceeds4K = false;
+    bool bJpegOnEncoder = false;
+    bool bUseCommonFeatureMask = false;
+    cam_feature_mask_t commonFeatureMask = 0;
+    bool bSmallJpegSize = false;
+    uint32_t width_ratio;
+    uint32_t height_ratio;
+    maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
+    camera3_stream_t *inputStream = NULL;
+    bool isJpeg = false;
+    cam_dimension_t jpegSize = {0, 0};
+
+    cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
+
+    /*EIS configuration*/
+    bool eisSupported = false;
+    bool oisSupported = false;
+    int32_t margin_index = -1;
+    uint8_t eis_prop_set;
+    uint32_t maxEisWidth = 0;
+    uint32_t maxEisHeight = 0;
+
+    memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
+
+    size_t count = IS_TYPE_MAX;
+    count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
+    for (size_t i = 0; i < count; i++) {
+        if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
+            (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0))
+        {
+            eisSupported = true;
+            margin_index = (int32_t)i;
+            break;
+        }
+    }
+
+    count = CAM_OPT_STAB_MAX;
+    count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
+    for (size_t i = 0; i < count; i++) {
+        if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
+            oisSupported = true;
+            break;
+        }
+    }
+
+    if (eisSupported) {
+        maxEisWidth = MAX_EIS_WIDTH;
+        maxEisHeight = MAX_EIS_HEIGHT;
+    }
+
+    /* EIS setprop control */
+    char eis_prop[PROPERTY_VALUE_MAX];
+    memset(eis_prop, 0, sizeof(eis_prop));
+    property_get("persist.camera.eis.enable", eis_prop, "0");
+    eis_prop_set = (uint8_t)atoi(eis_prop);
+
+    m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
+            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
+
+    /* stream configurations */
+    for (size_t i = 0; i < streamList->num_streams; i++) {
+        camera3_stream_t *newStream = streamList->streams[i];
+        LOGI("stream[%d] type = %d, format = %d, width = %d, "
+                "height = %d, rotation = %d, usage = 0x%x",
+                 i, newStream->stream_type, newStream->format,
+                newStream->width, newStream->height, newStream->rotation,
+                newStream->usage);
+        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
+                newStream->stream_type == CAMERA3_STREAM_INPUT){
+            isZsl = true;
+        }
+        if (newStream->stream_type == CAMERA3_STREAM_INPUT){
+            inputStream = newStream;
+        }
+
+        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
+            isJpeg = true;
+            jpegSize.width = newStream->width;
+            jpegSize.height = newStream->height;
+            if (newStream->width > VIDEO_4K_WIDTH ||
+                    newStream->height > VIDEO_4K_HEIGHT)
+                bJpegExceeds4K = true;
+        }
+
+        if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
+                (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
+            m_bIsVideo = true;
+            videoWidth = newStream->width;
+            videoHeight = newStream->height;
+            if ((VIDEO_4K_WIDTH <= newStream->width) &&
+                    (VIDEO_4K_HEIGHT <= newStream->height)) {
+                m_bIs4KVideo = true;
+            }
+            m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
+                                  (newStream->height <= maxEisHeight);
+        }
+        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
+                newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
+            switch (newStream->format) {
+            case HAL_PIXEL_FORMAT_BLOB:
+                stallStreamCnt++;
+                if (isOnEncoder(maxViewfinderSize, newStream->width,
+                        newStream->height)) {
+                    numStreamsOnEncoder++;
+                    bJpegOnEncoder = true;
+                }
+                width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
+                        newStream->width);
+                height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
+                        newStream->height);;
+                FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
+                        "FATAL: max_downscale_factor cannot be zero and so assert");
+                if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
+                    (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
+                    LOGH("Setting small jpeg size flag to true");
+                    bSmallJpegSize = true;
+                }
+                break;
+            case HAL_PIXEL_FORMAT_RAW10:
+            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
+            case HAL_PIXEL_FORMAT_RAW16:
+                rawStreamCnt++;
+                break;
+            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+                processedStreamCnt++;
+                if (isOnEncoder(maxViewfinderSize, newStream->width,
+                        newStream->height)) {
+                    if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
+                            !IS_USAGE_ZSL(newStream->usage)) {
+                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
+                    }
+                    numStreamsOnEncoder++;
+                }
+                break;
+            case HAL_PIXEL_FORMAT_YCbCr_420_888:
+                processedStreamCnt++;
+                if (isOnEncoder(maxViewfinderSize, newStream->width,
+                        newStream->height)) {
+                    // If Yuv888 size is not greater than 4K, set feature mask
+                    // to SUPERSET so that it support concurrent request on
+                    // YUV and JPEG.
+                    if (newStream->width <= VIDEO_4K_WIDTH &&
+                            newStream->height <= VIDEO_4K_HEIGHT) {
+                        commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
+                    }
+                    numStreamsOnEncoder++;
+                    numYuv888OnEncoder++;
+                    largeYuv888Size.width = newStream->width;
+                    largeYuv888Size.height = newStream->height;
+                }
+                break;
+            default:
+                processedStreamCnt++;
+                if (isOnEncoder(maxViewfinderSize, newStream->width,
+                        newStream->height)) {
+                    commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
+                    numStreamsOnEncoder++;
+                }
+                break;
+            }
+
+        }
+    }
+
+    if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
+            gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
+            !m_bIsVideo) {
+        m_bEisEnable = false;
+    }
+
+    /* Logic to enable/disable TNR based on specific config size/etc.*/
+    if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
+            ((videoWidth == 1920 && videoHeight == 1080) ||
+            (videoWidth == 1280 && videoHeight == 720)) &&
+            (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
+        m_bTnrEnabled = true;
+
+    /* Check if num_streams is sane */
+    if (stallStreamCnt > MAX_STALLING_STREAMS ||
+            rawStreamCnt > MAX_RAW_STREAMS ||
+            processedStreamCnt > MAX_PROCESSED_STREAMS) {
+        LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
+                 stallStreamCnt, rawStreamCnt, processedStreamCnt);
+        pthread_mutex_unlock(&mMutex);
+        return -EINVAL;
+    }
+    /* Check whether we have zsl stream or 4k video case */
+    if (isZsl && m_bIsVideo) {
+        LOGE("Currently invalid configuration ZSL&Video!");
+        pthread_mutex_unlock(&mMutex);
+        return -EINVAL;
+    }
+    /* Check if stream sizes are sane */
+    if (numStreamsOnEncoder > 2) {
+        LOGE("Number of streams on ISP encoder path exceeds limits of 2");
+        pthread_mutex_unlock(&mMutex);
+        return -EINVAL;
+    } else if (1 < numStreamsOnEncoder){
+        bUseCommonFeatureMask = true;
+        LOGH("Multiple streams above max viewfinder size, common mask needed");
+    }
+
+    /* Check if BLOB size is greater than 4k in 4k recording case */
+    if (m_bIs4KVideo && bJpegExceeds4K) {
+        LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
+        pthread_mutex_unlock(&mMutex);
+        return -EINVAL;
+    }
+
+    // When JPEG and preview streams share VFE output, CPP will not apply CAC2
+    // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
+    // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
+    // is not true. Otherwise testMandatoryOutputCombinations will fail with following
+    // configurations:
+    //    {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
+    //    {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
+    //    (These two configurations will not have CAC2 enabled even in HQ modes.)
+    if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
+        ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
+                __func__);
+        pthread_mutex_unlock(&mMutex);
+        return -EINVAL;
+    }
+
+    // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
+    // the YUV stream's size is greater or equal to the JPEG size, set common
+    // postprocess mask to NONE, so that we can take advantage of postproc bypass.
+    if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
+            jpegSize.width, jpegSize.height) &&
+            largeYuv888Size.width > jpegSize.width &&
+            largeYuv888Size.height > jpegSize.height) {
+        bYuv888OverrideJpeg = true;
+    } else if (!isJpeg && numStreamsOnEncoder > 1) {
+        commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
+    }
+
+    LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
+            maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
+            commonFeatureMask);
+    LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
+            numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
+
+    rc = validateStreamDimensions(streamList);
+    if (rc == NO_ERROR) {
+        rc = validateStreamRotations(streamList);
+    }
+    if (rc != NO_ERROR) {
+        LOGE("Invalid stream configuration requested!");
+        pthread_mutex_unlock(&mMutex);
+        return rc;
+    }
+
+    camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
+    for (size_t i = 0; i < streamList->num_streams; i++) {
+        camera3_stream_t *newStream = streamList->streams[i];
+        LOGH("newStream type = %d, stream format = %d "
+                "stream size : %d x %d, stream rotation = %d",
+                 newStream->stream_type, newStream->format,
+                newStream->width, newStream->height, newStream->rotation);
+        //if the stream is in the mStreamList validate it
+        bool stream_exists = false;
+        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
+                it != mStreamInfo.end(); it++) {
+            if ((*it)->stream == newStream) {
+                QCamera3ProcessingChannel *channel =
+                    (QCamera3ProcessingChannel*)(*it)->stream->priv;
+                stream_exists = true;
+                if (channel)
+                    delete channel;
+                (*it)->status = VALID;
+                (*it)->stream->priv = NULL;
+                (*it)->channel = NULL;
+            }
+        }
+        if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
+            //new stream
+            stream_info_t* stream_info;
+            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
+            if (!stream_info) {
+               LOGE("Could not allocate stream info");
+               rc = -ENOMEM;
+               pthread_mutex_unlock(&mMutex);
+               return rc;
+            }
+            stream_info->stream = newStream;
+            stream_info->status = VALID;
+            stream_info->channel = NULL;
+            mStreamInfo.push_back(stream_info);
+        }
+        /* Covers Opaque ZSL and API1 F/W ZSL */
+        if (IS_USAGE_ZSL(newStream->usage)
+                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
+            if (zslStream != NULL) {
+                LOGE("Multiple input/reprocess streams requested!");
+                pthread_mutex_unlock(&mMutex);
+                return BAD_VALUE;
+            }
+            zslStream = newStream;
+        }
+        /* Covers YUV reprocess */
+        if (inputStream != NULL) {
+            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
+                    && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
+                    && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
+                    && inputStream->width == newStream->width
+                    && inputStream->height == newStream->height) {
+                if (zslStream != NULL) {
+                    /* This scenario indicates multiple YUV streams with same size
+                     * as input stream have been requested, since zsl stream handle
+                     * is solely use for the purpose of overriding the size of streams
+                     * which share h/w streams we will just make a guess here as to
+                     * which of the stream is a ZSL stream, this will be refactored
+                     * once we make generic logic for streams sharing encoder output
+                     */
+                    LOGH("Warning, Multiple ip/reprocess streams requested!");
+                }
+                zslStream = newStream;
+            }
+        }
+    }
+
+    /* If a zsl stream is set, we know that we have configured at least one input or
+       bidirectional stream */
+    if (NULL != zslStream) {
+        mInputStreamInfo.dim.width = (int32_t)zslStream->width;
+        mInputStreamInfo.dim.height = (int32_t)zslStream->height;
+        mInputStreamInfo.format = zslStream->format;
+        mInputStreamInfo.usage = zslStream->usage;
+        LOGD("Input stream configured! %d x %d, format %d, usage %d",
+                 mInputStreamInfo.dim.width,
+                mInputStreamInfo.dim.height,
+                mInputStreamInfo.format, mInputStreamInfo.usage);
+    }
+
+    cleanAndSortStreamInfo();
+    if (mMetadataChannel) {
+        delete mMetadataChannel;
+        mMetadataChannel = NULL;
+    }
+    if (mSupportChannel) {
+        delete mSupportChannel;
+        mSupportChannel = NULL;
+    }
+
+    if (mAnalysisChannel) {
+        delete mAnalysisChannel;
+        mAnalysisChannel = NULL;
+    }
+
+    if (mDummyBatchChannel) {
+        delete mDummyBatchChannel;
+        mDummyBatchChannel = NULL;
+    }
+
+    //Create metadata channel and initialize it
+    cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
+    setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
+            gCamCapability[mCameraId]->color_arrangement);
+    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
+                    mChannelHandle, mCameraHandle->ops, captureResultCb,
+                    &padding_info, metadataFeatureMask, this);
+    if (mMetadataChannel == NULL) {
+        LOGE("failed to allocate metadata channel");
+        rc = -ENOMEM;
+        pthread_mutex_unlock(&mMutex);
+        return rc;
+    }
+    rc = mMetadataChannel->initialize(IS_TYPE_NONE);
+    if (rc < 0) {
+        LOGE("metadata channel initialization failed");
+        delete mMetadataChannel;
+        mMetadataChannel = NULL;
+        pthread_mutex_unlock(&mMutex);
+        return rc;
+    }
+
+    // Create analysis stream all the time, even when h/w support is not available
+    {
+        cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
+        setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
+                gCamCapability[mCameraId]->color_arrangement);
+        cam_analysis_info_t analysisInfo;
+        rc = mCommon.getAnalysisInfo(
+                FALSE,
+                TRUE,
+                analysisFeatureMask,
+                &analysisInfo);
+        if (rc != NO_ERROR) {
+            LOGE("getAnalysisInfo failed, ret = %d", rc);
+        }
+        if (rc == NO_ERROR) {
+            mAnalysisChannel = new QCamera3SupportChannel(
+                mCameraHandle->camera_handle,
+                mChannelHandle,
+                mCameraHandle->ops,
+                &analysisInfo.analysis_padding_info,
+                analysisFeatureMask,
+                CAM_STREAM_TYPE_ANALYSIS,
+                &analysisInfo.analysis_max_res,
+                (analysisInfo.analysis_format
+                == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
+                : CAM_FORMAT_YUV_420_NV21),
+                analysisInfo.hw_analysis_supported,
+                gCamCapability[mCameraId]->color_arrangement,
+                this,
+                0); // force buffer count to 0
+            if (!mAnalysisChannel) {
+                LOGE("H/W Analysis channel cannot be created");
+                pthread_mutex_unlock(&mMutex);
+                return -ENOMEM;
+            }
+        }
+    }
+
+    bool isRawStreamRequested = false;
+    memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
+    /* Allocate channel objects for the requested streams */
+    for (size_t i = 0; i < streamList->num_streams; i++) {
+        camera3_stream_t *newStream = streamList->streams[i];
+        uint32_t stream_usage = newStream->usage;
+        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
+        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
+        struct camera_info *p_info = NULL;
+        pthread_mutex_lock(&gCamLock);
+        p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
+        pthread_mutex_unlock(&gCamLock);
+        if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
+                || IS_USAGE_ZSL(newStream->usage)) &&
+            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
+            mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
+            if (bUseCommonFeatureMask) {
+                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
+                        commonFeatureMask;
+            } else {
+                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
+                        CAM_QCOM_FEATURE_NONE;
+            }
+
+        } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
+                LOGH("Input stream configured, reprocess config");
+        } else {
+            //for non zsl streams find out the format
+            switch (newStream->format) {
+            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
+            {
+                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
+                        CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
+                /* add additional features to pp feature mask */
+                addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+                        mStreamConfigInfo.num_streams);
+
+                if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
+                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
+                                CAM_STREAM_TYPE_VIDEO;
+                    if (m_bTnrEnabled && m_bTnrVideo) {
+                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
+                            CAM_QCOM_FEATURE_CPP_TNR;
+                        //TNR and CDS are mutually exclusive. So reset CDS from feature mask
+                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
+                                ~CAM_QCOM_FEATURE_CDS;
+                    }
+                } else {
+                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
+                            CAM_STREAM_TYPE_PREVIEW;
+                    if (m_bTnrEnabled && m_bTnrPreview) {
+                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
+                                CAM_QCOM_FEATURE_CPP_TNR;
+                        //TNR and CDS are mutually exclusive. So reset CDS from feature mask
+                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
+                                ~CAM_QCOM_FEATURE_CDS;
+                    }
+                    padding_info.width_padding = mSurfaceStridePadding;
+                    padding_info.height_padding = CAM_PAD_TO_2;
+                }
+                if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
+                        (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
+                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
+                            newStream->height;
+                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
+                            newStream->width;
+                }
+            }
+            break;
+            case HAL_PIXEL_FORMAT_YCbCr_420_888:
+                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
+                if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
+                    if (bUseCommonFeatureMask)
+                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
+                                commonFeatureMask;
+                    else
+                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
+                                CAM_QCOM_FEATURE_NONE;
+                } else {
+                    mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
+                            CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
+                }
+            break;
+            case HAL_PIXEL_FORMAT_BLOB:
+                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
+                // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
+                if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
+                     mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
+                             CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
+                } else {
+                    if (bUseCommonFeatureMask &&
+                            isOnEncoder(maxViewfinderSize, newStream->width,
+                            newStream->height)) {
+                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
+                    } else {
+                        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
+                    }
+                }
+                if (isZsl) {
+                    if (zslStream) {
+                        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
+                                (int32_t)zslStream->width;
+                        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
+                                (int32_t)zslStream->height;
+                    } else {
+                        LOGE("Error, No ZSL stream identified");
+                        pthread_mutex_unlock(&mMutex);
+                        return -EINVAL;
+                    }
+                } else if (m_bIs4KVideo) {
+                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
+                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
+                } else if (bYuv888OverrideJpeg) {
+                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
+                            (int32_t)largeYuv888Size.width;
+                    mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
+                            (int32_t)largeYuv888Size.height;
+                }
+                break;
+            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
+            case HAL_PIXEL_FORMAT_RAW16:
+            case HAL_PIXEL_FORMAT_RAW10:
+                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
+                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
+                isRawStreamRequested = true;
+                break;
+            default:
+                mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
+                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
+                break;
+            }
+        }
+
+        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
+                (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
+                gCamCapability[mCameraId]->color_arrangement);
+
+        if (newStream->priv == NULL) {
+            //New stream, construct channel
+            switch (newStream->stream_type) {
+            case CAMERA3_STREAM_INPUT:
+                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
+                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
+                break;
+            case CAMERA3_STREAM_BIDIRECTIONAL:
+                newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
+                    GRALLOC_USAGE_HW_CAMERA_WRITE;
+                break;
+            case CAMERA3_STREAM_OUTPUT:
+                /* For video encoding stream, set read/write rarely
+                 * flag so that they may be set to un-cached */
+                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
+                    newStream->usage |=
+                         (GRALLOC_USAGE_SW_READ_RARELY |
+                         GRALLOC_USAGE_SW_WRITE_RARELY |
+                         GRALLOC_USAGE_HW_CAMERA_WRITE);
+                else if (IS_USAGE_ZSL(newStream->usage))
+                {
+                    LOGD("ZSL usage flag skipping");
+                }
+                else if (newStream == zslStream
+                        || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
+                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
+                } else
+                    newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
+                break;
+            default:
+                LOGE("Invalid stream_type %d", newStream->stream_type);
+                break;
+            }
+
+            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
+                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
+                QCamera3ProcessingChannel *channel = NULL;
+                switch (newStream->format) {
+                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+                    if ((newStream->usage &
+                            private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
+                            (streamList->operation_mode ==
+                            CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
+                    ) {
+                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
+                                mChannelHandle, mCameraHandle->ops, captureResultCb,
+                                &gCamCapability[mCameraId]->padding_info,
+                                this,
+                                newStream,
+                                (cam_stream_type_t)
+                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
+                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
+                                mMetadataChannel,
+                                0); //heap buffers are not required for HFR video channel
+                        if (channel == NULL) {
+                            LOGE("allocation of channel failed");
+                            pthread_mutex_unlock(&mMutex);
+                            return -ENOMEM;
+                        }
+                        //channel->getNumBuffers() will return 0 here so use
+                        //MAX_INFLIGH_HFR_REQUESTS
+                        newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
+                        newStream->priv = channel;
+                        LOGI("num video buffers in HFR mode: %d",
+                                 MAX_INFLIGHT_HFR_REQUESTS);
+                    } else {
+                        /* Copy stream contents in HFR preview only case to create
+                         * dummy batch channel so that sensor streaming is in
+                         * HFR mode */
+                        if (!m_bIsVideo && (streamList->operation_mode ==
+                                CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
+                            mDummyBatchStream = *newStream;
+                        }
+                        channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
+                                mChannelHandle, mCameraHandle->ops, captureResultCb,
+                                &gCamCapability[mCameraId]->padding_info,
+                                this,
+                                newStream,
+                                (cam_stream_type_t)
+                                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
+                                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
+                                mMetadataChannel,
+                                MAX_INFLIGHT_REQUESTS);
+                        if (channel == NULL) {
+                            LOGE("allocation of channel failed");
+                            pthread_mutex_unlock(&mMutex);
+                            return -ENOMEM;
+                        }
+                        newStream->max_buffers = channel->getNumBuffers();
+                        newStream->priv = channel;
+                    }
+                    break;
+                case HAL_PIXEL_FORMAT_YCbCr_420_888: {
+                    channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
+                            mChannelHandle,
+                            mCameraHandle->ops, captureResultCb,
+                            &padding_info,
+                            this,
+                            newStream,
+                            (cam_stream_type_t)
+                                    mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
+                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
+                            mMetadataChannel);
+                    if (channel == NULL) {
+                        LOGE("allocation of YUV channel failed");
+                        pthread_mutex_unlock(&mMutex);
+                        return -ENOMEM;
+                    }
+                    newStream->max_buffers = channel->getNumBuffers();
+                    newStream->priv = channel;
+                    break;
+                }
+                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
+                case HAL_PIXEL_FORMAT_RAW16:
+                case HAL_PIXEL_FORMAT_RAW10:
+                    mRawChannel = new QCamera3RawChannel(
+                            mCameraHandle->camera_handle, mChannelHandle,
+                            mCameraHandle->ops, captureResultCb,
+                            &padding_info,
+                            this, newStream,
+                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
+                            mMetadataChannel,
+                            (newStream->format == HAL_PIXEL_FORMAT_RAW16));
+                    if (mRawChannel == NULL) {
+                        LOGE("allocation of raw channel failed");
+                        pthread_mutex_unlock(&mMutex);
+                        return -ENOMEM;
+                    }
+                    newStream->max_buffers = mRawChannel->getNumBuffers();
+                    newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
+                    break;
+                case HAL_PIXEL_FORMAT_BLOB:
+                    // Max live snapshot inflight buffer is 1. This is to mitigate
+                    // frame drop issues for video snapshot. The more buffers being
+                    // allocated, the more frame drops there are.
+                    mPictureChannel = new QCamera3PicChannel(
+                            mCameraHandle->camera_handle, mChannelHandle,
+                            mCameraHandle->ops, captureResultCb,
+                            &padding_info, this, newStream,
+                            mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
+                            m_bIs4KVideo, isZsl, mMetadataChannel,
+                            (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
+                    if (mPictureChannel == NULL) {
+                        LOGE("allocation of channel failed");
+                        pthread_mutex_unlock(&mMutex);
+                        return -ENOMEM;
+                    }
+                    newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
+                    newStream->max_buffers = mPictureChannel->getNumBuffers();
+                    mPictureChannel->overrideYuvSize(
+                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
+                            mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
+                    break;
+
+                default:
+                    LOGE("not a supported format 0x%x", newStream->format);
+                    break;
+                }
+            } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
+                newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
+            } else {
+                LOGE("Error, Unknown stream type");
+                pthread_mutex_unlock(&mMutex);
+                return -EINVAL;
+            }
+
+            QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
+            if (channel != NULL && channel->isUBWCEnabled()) {
+                cam_format_t fmt = channel->getStreamDefaultFormat(
+                        mStreamConfigInfo.type[mStreamConfigInfo.num_streams]);
+                if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
+                    newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
+                }
+            }
+
+            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
+                    it != mStreamInfo.end(); it++) {
+                if ((*it)->stream == newStream) {
+                    (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
+                    break;
+                }
+            }
+        } else {
+            // Channel already exists for this stream
+            // Do nothing for now
+        }
+        padding_info = gCamCapability[mCameraId]->padding_info;
+
+        /* Do not add entries for input stream in metastream info
+         * since there is no real stream associated with it
+         */
+        if (newStream->stream_type != CAMERA3_STREAM_INPUT)
+            mStreamConfigInfo.num_streams++;
+    }
+
+    //RAW DUMP channel
+    if (mEnableRawDump && isRawStreamRequested == false){
+        cam_dimension_t rawDumpSize;
+        rawDumpSize = getMaxRawSize(mCameraId);
+        cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
+        setPAAFSupport(rawDumpFeatureMask,
+                CAM_STREAM_TYPE_RAW,
+                gCamCapability[mCameraId]->color_arrangement);
+        mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
+                                  mChannelHandle,
+                                  mCameraHandle->ops,
+                                  rawDumpSize,
+                                  &padding_info,
+                                  this, rawDumpFeatureMask);
+        if (!mRawDumpChannel) {
+            LOGE("Raw Dump channel cannot be created");
+            pthread_mutex_unlock(&mMutex);
+            return -ENOMEM;
+        }
+    }
+
+
+    if (mAnalysisChannel) {
+        cam_analysis_info_t analysisInfo;
+        memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
+        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
+                CAM_STREAM_TYPE_ANALYSIS;
+        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
+                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
+        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
+                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
+                gCamCapability[mCameraId]->color_arrangement);
+        rc = mCommon.getAnalysisInfo(FALSE, TRUE,
+                mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
+                &analysisInfo);
+        if (rc != NO_ERROR) {
+            LOGE("getAnalysisInfo failed, ret = %d", rc);
+            pthread_mutex_unlock(&mMutex);
+            return rc;
+        }
+        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
+                analysisInfo.analysis_max_res;
+        mStreamConfigInfo.num_streams++;
+    }
+
+    if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
+        cam_analysis_info_t supportInfo;
+        memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
+        cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
+        setPAAFSupport(callbackFeatureMask,
+                CAM_STREAM_TYPE_CALLBACK,
+                gCamCapability[mCameraId]->color_arrangement);
+        rc = mCommon.getAnalysisInfo(FALSE, TRUE, callbackFeatureMask, &supportInfo);
+        if (rc != NO_ERROR) {
+            LOGE("getAnalysisInfo failed, ret = %d", rc);
+            pthread_mutex_unlock(&mMutex);
+            return rc;
+        }
+        mSupportChannel = new QCamera3SupportChannel(
+                mCameraHandle->camera_handle,
+                mChannelHandle,
+                mCameraHandle->ops,
+                &gCamCapability[mCameraId]->padding_info,
+                callbackFeatureMask,
+                CAM_STREAM_TYPE_CALLBACK,
+                &QCamera3SupportChannel::kDim,
+                CAM_FORMAT_YUV_420_NV21,
+                supportInfo.hw_analysis_supported,
+                gCamCapability[mCameraId]->color_arrangement,
+                this);
+        if (!mSupportChannel) {
+            LOGE("dummy channel cannot be created");
+            pthread_mutex_unlock(&mMutex);
+            return -ENOMEM;
+        }
+    }
+
+    if (mSupportChannel) {
+        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
+                QCamera3SupportChannel::kDim;
+        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
+                CAM_STREAM_TYPE_CALLBACK;
+        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
+                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
+        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
+                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
+                gCamCapability[mCameraId]->color_arrangement);
+        mStreamConfigInfo.num_streams++;
+    }
+
+    if (mRawDumpChannel) {
+        cam_dimension_t rawSize;
+        rawSize = getMaxRawSize(mCameraId);
+        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
+                rawSize;
+        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
+                CAM_STREAM_TYPE_RAW;
+        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
+                CAM_QCOM_FEATURE_NONE;
+        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
+                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
+                gCamCapability[mCameraId]->color_arrangement);
+        mStreamConfigInfo.num_streams++;
+    }
+    /* In HFR mode, if video stream is not added, create a dummy channel so that
+     * ISP can create a batch mode even for preview only case. This channel is
+     * never 'start'ed (no stream-on), it is only 'initialized'  */
+    if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
+            !m_bIsVideo) {
+        cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
+        setPAAFSupport(dummyFeatureMask,
+                CAM_STREAM_TYPE_VIDEO,
+                gCamCapability[mCameraId]->color_arrangement);
+        mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
+                mChannelHandle,
+                mCameraHandle->ops, captureResultCb,
+                &gCamCapability[mCameraId]->padding_info,
+                this,
+                &mDummyBatchStream,
+                CAM_STREAM_TYPE_VIDEO,
+                dummyFeatureMask,
+                mMetadataChannel);
+        if (NULL == mDummyBatchChannel) {
+            LOGE("creation of mDummyBatchChannel failed."
+                    "Preview will use non-hfr sensor mode ");
+        }
+    }
+    if (mDummyBatchChannel) {
+        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
+                mDummyBatchStream.width;
+        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
+                mDummyBatchStream.height;
+        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
+                CAM_STREAM_TYPE_VIDEO;
+        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
+                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
+        setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
+                mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
+                gCamCapability[mCameraId]->color_arrangement);
+        mStreamConfigInfo.num_streams++;
+    }
+
+    mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
+    mStreamConfigInfo.buffer_info.max_buffers =
+            m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
+
+    /* Initialize mPendingRequestInfo and mPendingBuffersMap */
+    for (pendingRequestIterator i = mPendingRequestsList.begin();
+            i != mPendingRequestsList.end();) {
+        i = erasePendingRequest(i);
+    }
+    mPendingFrameDropList.clear();
+    // Initialize/Reset the pending buffers list
+    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
+        req.mPendingBufferList.clear();
+    }
+    mPendingBuffersMap.mPendingBuffersInRequest.clear();
+
+    mPendingReprocessResultList.clear();
+
+    mCurJpegMeta.clear();
+    //Get min frame duration for this streams configuration
+    deriveMinFrameDuration();
+
+    // Update state
+    mState = CONFIGURED;
+
+    pthread_mutex_unlock(&mMutex);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : validateCaptureRequest
+ *
+ * DESCRIPTION: validate a capture request from camera service
+ *
+ * PARAMETERS :
+ *   @request : request from framework to process
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+int QCamera3HardwareInterface::validateCaptureRequest(
+                    camera3_capture_request_t *request)
+{
+    ssize_t idx = 0;
+    const camera3_stream_buffer_t *b;
+    CameraMetadata meta;
+
+    /* Sanity check the request */
+    if (request == NULL) {
+        LOGE("NULL capture request");
+        return BAD_VALUE;
+    }
+
+    if ((request->settings == NULL) && (mState == CONFIGURED)) {
+        /*settings cannot be null for the first request*/
+        return BAD_VALUE;
+    }
+
+    uint32_t frameNumber = request->frame_number;
+    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
+        LOGE("Request %d: No output buffers provided!",
+                __FUNCTION__, frameNumber);
+        return BAD_VALUE;
+    }
+    if (request->num_output_buffers >= MAX_NUM_STREAMS) {
+        LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
+                 request->num_output_buffers, MAX_NUM_STREAMS);
+        return BAD_VALUE;
+    }
+    if (request->input_buffer != NULL) {
+        b = request->input_buffer;
+        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
+            LOGE("Request %d: Buffer %ld: Status not OK!",
+                     frameNumber, (long)idx);
+            return BAD_VALUE;
+        }
+        if (b->release_fence != -1) {
+            LOGE("Request %d: Buffer %ld: Has a release fence!",
+                     frameNumber, (long)idx);
+            return BAD_VALUE;
+        }
+        if (b->buffer == NULL) {
+            LOGE("Request %d: Buffer %ld: NULL buffer handle!",
+                     frameNumber, (long)idx);
+            return BAD_VALUE;
+        }
+    }
+
+    // Validate all buffers
+    b = request->output_buffers;
+    do {
+        QCamera3ProcessingChannel *channel =
+                static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
+        if (channel == NULL) {
+            LOGE("Request %d: Buffer %ld: Unconfigured stream!",
+                     frameNumber, (long)idx);
+            return BAD_VALUE;
+        }
+        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
+            LOGE("Request %d: Buffer %ld: Status not OK!",
+                     frameNumber, (long)idx);
+            return BAD_VALUE;
+        }
+        if (b->release_fence != -1) {
+            LOGE("Request %d: Buffer %ld: Has a release fence!",
+                     frameNumber, (long)idx);
+            return BAD_VALUE;
+        }
+        if (b->buffer == NULL) {
+            LOGE("Request %d: Buffer %ld: NULL buffer handle!",
+                     frameNumber, (long)idx);
+            return BAD_VALUE;
+        }
+        if (*(b->buffer) == NULL) {
+            LOGE("Request %d: Buffer %ld: NULL private handle!",
+                     frameNumber, (long)idx);
+            return BAD_VALUE;
+        }
+        idx++;
+        b = request->output_buffers + idx;
+    } while (idx < (ssize_t)request->num_output_buffers);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : deriveMinFrameDuration
+ *
+ * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
+ *              on currently configured streams.
+ *
+ * PARAMETERS : NONE
+ *
+ * RETURN     : NONE
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::deriveMinFrameDuration()
+{
+    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
+
+    maxJpegDim = 0;
+    maxProcessedDim = 0;
+    maxRawDim = 0;
+
+    // Figure out maximum jpeg, processed, and raw dimensions
+    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
+        it != mStreamInfo.end(); it++) {
+
+        // Input stream doesn't have valid stream_type
+        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
+            continue;
+
+        int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
+        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
+            if (dimension > maxJpegDim)
+                maxJpegDim = dimension;
+        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
+                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
+                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
+            if (dimension > maxRawDim)
+                maxRawDim = dimension;
+        } else {
+            if (dimension > maxProcessedDim)
+                maxProcessedDim = dimension;
+        }
+    }
+
+    size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
+            MAX_SIZES_CNT);
+
+    //Assume all jpeg dimensions are in processed dimensions.
+    if (maxJpegDim > maxProcessedDim)
+        maxProcessedDim = maxJpegDim;
+    //Find the smallest raw dimension that is greater or equal to jpeg dimension
+    if (maxProcessedDim > maxRawDim) {
+        maxRawDim = INT32_MAX;
+
+        for (size_t i = 0; i < count; i++) {
+            int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
+                    gCamCapability[mCameraId]->raw_dim[i].height;
+            if (dimension >= maxProcessedDim && dimension < maxRawDim)
+                maxRawDim = dimension;
+        }
+    }
+
+    //Find minimum durations for processed, jpeg, and raw
+    for (size_t i = 0; i < count; i++) {
+        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
+                gCamCapability[mCameraId]->raw_dim[i].height) {
+            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
+            break;
+        }
+    }
+    count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
+    for (size_t i = 0; i < count; i++) {
+        if (maxProcessedDim ==
+                gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
+                gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
+            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
+            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
+            break;
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getMinFrameDuration
+ *
+ * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
+ *              and current request configuration.
+ *
+ * PARAMETERS : @request: requset sent by the frameworks
+ *
+ * RETURN     : min farme duration for a particular request
+ *
+ *==========================================================================*/
+int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
+{
+    bool hasJpegStream = false;
+    bool hasRawStream = false;
+    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
+        const camera3_stream_t *stream = request->output_buffers[i].stream;
+        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
+            hasJpegStream = true;
+        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
+                stream->format == HAL_PIXEL_FORMAT_RAW10 ||
+                stream->format == HAL_PIXEL_FORMAT_RAW16)
+            hasRawStream = true;
+    }
+
+    if (!hasJpegStream)
+        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
+    else
+        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
+}
+
+/*===========================================================================
+ * FUNCTION   : handleBuffersDuringFlushLock
+ *
+ * DESCRIPTION: Account for buffers returned from back-end during flush
+ *              This function is executed while mMutex is held by the caller.
+ *
+ * PARAMETERS :
+ *   @buffer: image buffer for the callback
+ *
+ * RETURN     :
+ *==========================================================================*/
+void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
+{
+    bool buffer_found = false;
+    for (List<PendingBuffersInRequest>::iterator req =
+            mPendingBuffersMap.mPendingBuffersInRequest.begin();
+            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
+        for (List<PendingBufferInfo>::iterator i =
+                req->mPendingBufferList.begin();
+                i != req->mPendingBufferList.end(); i++) {
+            if (i->buffer == buffer->buffer) {
+                mPendingBuffersMap.numPendingBufsAtFlush--;
+                LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
+                    buffer->buffer, req->frame_number,
+                    mPendingBuffersMap.numPendingBufsAtFlush);
+                buffer_found = true;
+                break;
+            }
+        }
+        if (buffer_found) {
+            break;
+        }
+    }
+    if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
+        //signal the flush()
+        LOGD("All buffers returned to HAL. Continue flush");
+        pthread_cond_signal(&mBuffersCond);
+    }
+}
+
+
+/*===========================================================================
+ * FUNCTION   : handlePendingReprocResults
+ *
+ * DESCRIPTION: check and notify on any pending reprocess results
+ *
+ * PARAMETERS :
+ *   @frame_number   : Pending request frame number
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
+{
+    for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
+            j != mPendingReprocessResultList.end(); j++) {
+        if (j->frame_number == frame_number) {
+            mCallbackOps->notify(mCallbackOps, &j->notify_msg);
+
+            LOGD("Delayed reprocess notify %d",
+                    frame_number);
+
+            for (pendingRequestIterator k = mPendingRequestsList.begin();
+                    k != mPendingRequestsList.end(); k++) {
+
+                if (k->frame_number == j->frame_number) {
+                    LOGD("Found reprocess frame number %d in pending reprocess List "
+                            "Take it out!!",
+                            k->frame_number);
+
+                    camera3_capture_result result;
+                    memset(&result, 0, sizeof(camera3_capture_result));
+                    result.frame_number = frame_number;
+                    result.num_output_buffers = 1;
+                    result.output_buffers =  &j->buffer;
+                    result.input_buffer = k->input_buffer;
+                    result.result = k->settings;
+                    result.partial_result = PARTIAL_RESULT_COUNT;
+                    mCallbackOps->process_capture_result(mCallbackOps, &result);
+
+                    erasePendingRequest(k);
+                    break;
+                }
+            }
+            mPendingReprocessResultList.erase(j);
+            break;
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : handleBatchMetadata
+ *
+ * DESCRIPTION: Handles metadata buffer callback in batch mode
+ *
+ * PARAMETERS : @metadata_buf: metadata buffer
+ *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
+ *                 the meta buf in this method
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::handleBatchMetadata(
+        mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
+{
+    ATRACE_CALL();
+
+    if (NULL == metadata_buf) {
+        LOGE("metadata_buf is NULL");
+        return;
+    }
+    /* In batch mode, the metdata will contain the frame number and timestamp of
+     * the last frame in the batch. Eg: a batch containing buffers from request
+     * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
+     * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
+     * multiple process_capture_results */
+    metadata_buffer_t *metadata =
+            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
+    int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
+    uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
+    uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
+    uint32_t frame_number = 0, urgent_frame_number = 0;
+    int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
+    bool invalid_metadata = false;
+    size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
+    size_t loopCount = 1;
+
+    int32_t *p_frame_number_valid =
+            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
+    uint32_t *p_frame_number =
+            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
+    int64_t *p_capture_time =
+            POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
+    int32_t *p_urgent_frame_number_valid =
+            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
+    uint32_t *p_urgent_frame_number =
+            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
+
+    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
+            (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
+            (NULL == p_urgent_frame_number)) {
+        LOGE("Invalid metadata");
+        invalid_metadata = true;
+    } else {
+        frame_number_valid = *p_frame_number_valid;
+        last_frame_number = *p_frame_number;
+        last_frame_capture_time = *p_capture_time;
+        urgent_frame_number_valid = *p_urgent_frame_number_valid;
+        last_urgent_frame_number = *p_urgent_frame_number;
+    }
+
+    /* In batchmode, when no video buffers are requested, set_parms are sent
+     * for every capture_request. The difference between consecutive urgent
+     * frame numbers and frame numbers should be used to interpolate the
+     * corresponding frame numbers and time stamps */
+    pthread_mutex_lock(&mMutex);
+    if (urgent_frame_number_valid) {
+        first_urgent_frame_number =
+                mPendingBatchMap.valueFor(last_urgent_frame_number);
+        urgentFrameNumDiff = last_urgent_frame_number + 1 -
+                first_urgent_frame_number;
+
+        LOGD("urgent_frm: valid: %d frm_num: %d - %d",
+                 urgent_frame_number_valid,
+                first_urgent_frame_number, last_urgent_frame_number);
+    }
+
+    if (frame_number_valid) {
+        first_frame_number = mPendingBatchMap.valueFor(last_frame_number);
+        frameNumDiff = last_frame_number + 1 -
+                first_frame_number;
+        mPendingBatchMap.removeItem(last_frame_number);
+
+        LOGD("frm: valid: %d frm_num: %d - %d",
+                 frame_number_valid,
+                first_frame_number, last_frame_number);
+
+    }
+    pthread_mutex_unlock(&mMutex);
+
+    if (urgent_frame_number_valid || frame_number_valid) {
+        loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
+        if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
+            LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
+                     urgentFrameNumDiff, last_urgent_frame_number);
+        if (frameNumDiff > MAX_HFR_BATCH_SIZE)
+            LOGE("frameNumDiff: %d frameNum: %d",
+                     frameNumDiff, last_frame_number);
+    }
+
+    for (size_t i = 0; i < loopCount; i++) {
+        /* handleMetadataWithLock is called even for invalid_metadata for
+         * pipeline depth calculation */
+        if (!invalid_metadata) {
+            /* Infer frame number. Batch metadata contains frame number of the
+             * last frame */
+            if (urgent_frame_number_valid) {
+                if (i < urgentFrameNumDiff) {
+                    urgent_frame_number =
+                            first_urgent_frame_number + i;
+                    LOGD("inferred urgent frame_number: %d",
+                             urgent_frame_number);
+                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
+                            CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
+                } else {
+                    /* This is to handle when urgentFrameNumDiff < frameNumDiff */
+                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
+                            CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
+                }
+            }
+
+            /* Infer frame number. Batch metadata contains frame number of the
+             * last frame */
+            if (frame_number_valid) {
+                if (i < frameNumDiff) {
+                    frame_number = first_frame_number + i;
+                    LOGD("inferred frame_number: %d", frame_number);
+                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
+                            CAM_INTF_META_FRAME_NUMBER, frame_number);
+                } else {
+                    /* This is to handle when urgentFrameNumDiff > frameNumDiff */
+                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
+                             CAM_INTF_META_FRAME_NUMBER_VALID, 0);
+                }
+            }
+
+            if (last_frame_capture_time) {
+                //Infer timestamp
+                first_frame_capture_time = last_frame_capture_time -
+                        (((loopCount - 1) * NSEC_PER_SEC) / mHFRVideoFps);
+                capture_time =
+                        first_frame_capture_time + (i * NSEC_PER_SEC / mHFRVideoFps);
+                ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
+                        CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
+                LOGD("batch capture_time: %lld, capture_time: %lld",
+                         last_frame_capture_time, capture_time);
+            }
+        }
+        pthread_mutex_lock(&mMutex);
+        handleMetadataWithLock(metadata_buf,
+                false /* free_and_bufdone_meta_buf */);
+        pthread_mutex_unlock(&mMutex);
+    }
+
+    /* BufDone metadata buffer */
+    if (free_and_bufdone_meta_buf) {
+        mMetadataChannel->bufDone(metadata_buf);
+        free(metadata_buf);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : handleMetadataWithLock
+ *
+ * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
+ *
+ * PARAMETERS : @metadata_buf: metadata buffer
+ *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
+ *                 the meta buf in this method
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::handleMetadataWithLock(
+    mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
+{
+    ATRACE_CALL();
+    if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
+        //during flush do not send metadata from this thread
+        LOGD("not sending metadata during flush or when mState is error");
+        if (free_and_bufdone_meta_buf) {
+            mMetadataChannel->bufDone(metadata_buf);
+            free(metadata_buf);
+        }
+        return;
+    }
+
+    //not in flush
+    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
+    int32_t frame_number_valid, urgent_frame_number_valid;
+    uint32_t frame_number, urgent_frame_number;
+    int64_t capture_time;
+    nsecs_t currentSysTime;
+
+    int32_t *p_frame_number_valid =
+            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
+    uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
+    int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
+    int32_t *p_urgent_frame_number_valid =
+            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
+    uint32_t *p_urgent_frame_number =
+            POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
+    IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
+            metadata) {
+        LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
+                 *p_frame_number_valid, *p_frame_number);
+    }
+
+    if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
+            (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
+        LOGE("Invalid metadata");
+        if (free_and_bufdone_meta_buf) {
+            mMetadataChannel->bufDone(metadata_buf);
+            free(metadata_buf);
+        }
+        goto done_metadata;
+    }
+    frame_number_valid =        *p_frame_number_valid;
+    frame_number =              *p_frame_number;
+    capture_time =              *p_capture_time;
+    urgent_frame_number_valid = *p_urgent_frame_number_valid;
+    urgent_frame_number =       *p_urgent_frame_number;
+    currentSysTime =            systemTime(CLOCK_MONOTONIC);
+
+    // Detect if buffers from any requests are overdue
+    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
+        if ( (currentSysTime - req.timestamp) >
+            s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
+            for (auto &missed : req.mPendingBufferList) {
+                LOGE("Current frame: %d. Missing: frame = %d, buffer = %p,"
+                    "stream type = %d, stream format = %d",
+                    frame_number, req.frame_number, missed.buffer,
+                    missed.stream->stream_type, missed.stream->format);
+            }
+        }
+    }
+    //Partial result on process_capture_result for timestamp
+    if (urgent_frame_number_valid) {
+        LOGD("valid urgent frame_number = %u, capture_time = %lld",
+           urgent_frame_number, capture_time);
+
+        //Recieved an urgent Frame Number, handle it
+        //using partial results
+        for (pendingRequestIterator i =
+                mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
+            LOGD("Iterator Frame = %d urgent frame = %d",
+                 i->frame_number, urgent_frame_number);
+
+            if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
+                (i->partial_result_cnt == 0)) {
+                LOGE("Error: HAL missed urgent metadata for frame number %d",
+                         i->frame_number);
+            }
+
+            if (i->frame_number == urgent_frame_number &&
+                     i->bUrgentReceived == 0) {
+
+                camera3_capture_result_t result;
+                memset(&result, 0, sizeof(camera3_capture_result_t));
+
+                i->partial_result_cnt++;
+                i->bUrgentReceived = 1;
+                // Extract 3A metadata
+                result.result =
+                    translateCbUrgentMetadataToResultMetadata(metadata);
+                // Populate metadata result
+                result.frame_number = urgent_frame_number;
+                result.num_output_buffers = 0;
+                result.output_buffers = NULL;
+                result.partial_result = i->partial_result_cnt;
+
+                mCallbackOps->process_capture_result(mCallbackOps, &result);
+                LOGD("urgent frame_number = %u, capture_time = %lld",
+                      result.frame_number, capture_time);
+                free_camera_metadata((camera_metadata_t *)result.result);
+                break;
+            }
+        }
+    }
+
+    if (!frame_number_valid) {
+        LOGD("Not a valid normal frame number, used as SOF only");
+        if (free_and_bufdone_meta_buf) {
+            mMetadataChannel->bufDone(metadata_buf);
+            free(metadata_buf);
+        }
+        goto done_metadata;
+    }
+    LOGH("valid frame_number = %u, capture_time = %lld",
+            frame_number, capture_time);
+
+    for (pendingRequestIterator i = mPendingRequestsList.begin();
+            i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
+        // Flush out all entries with less or equal frame numbers.
+
+        camera3_capture_result_t result;
+        memset(&result, 0, sizeof(camera3_capture_result_t));
+
+        LOGD("frame_number in the list is %u", i->frame_number);
+        i->partial_result_cnt++;
+        result.partial_result = i->partial_result_cnt;
+
+        // Check whether any stream buffer corresponding to this is dropped or not
+        // If dropped, then send the ERROR_BUFFER for the corresponding stream
+        // The API does not expect a blob buffer to be dropped
+        if (p_cam_frame_drop) {
+            /* Clear notify_msg structure */
+            camera3_notify_msg_t notify_msg;
+            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
+            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
+                    j != i->buffers.end(); j++) {
+               if (j->stream->format != HAL_PIXEL_FORMAT_BLOB) {
+                   QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
+                   uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
+                   for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
+                       if (streamID == p_cam_frame_drop->streamID[k]) {
+                           // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
+                           LOGE("Start of reporting error frame#=%u, streamID=%u",
+                                    i->frame_number, streamID);
+                           notify_msg.type = CAMERA3_MSG_ERROR;
+                           notify_msg.message.error.frame_number = i->frame_number;
+                           notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
+                           notify_msg.message.error.error_stream = j->stream;
+                           mCallbackOps->notify(mCallbackOps, &notify_msg);
+                           LOGE("End of reporting error frame#=%u, streamID=%u",
+                                   i->frame_number, streamID);
+                           PendingFrameDropInfo PendingFrameDrop;
+                           PendingFrameDrop.frame_number=i->frame_number;
+                           PendingFrameDrop.stream_ID = streamID;
+                           // Add the Frame drop info to mPendingFrameDropList
+                           mPendingFrameDropList.push_back(PendingFrameDrop);
+                      }
+                   }
+               } else {
+                   LOGE("JPEG buffer dropped for frame number %d",
+                            i->frame_number);
+               }
+            }
+        }
+
+        // Send empty metadata with already filled buffers for dropped metadata
+        // and send valid metadata with already filled buffers for current metadata
+        /* we could hit this case when we either
+         * 1. have a pending reprocess request or
+         * 2. miss a metadata buffer callback */
+        if (i->frame_number < frame_number) {
+            if (i->input_buffer) {
+                /* this will be handled in handleInputBufferWithLock */
+                i++;
+                continue;
+            } else {
+                LOGE("Fatal: Missing metadata buffer for frame number %d", i->frame_number);
+                if (free_and_bufdone_meta_buf) {
+                    mMetadataChannel->bufDone(metadata_buf);
+                    free(metadata_buf);
+                }
+                mState = ERROR;
+                goto done_metadata;
+            }
+        } else {
+            mPendingLiveRequest--;
+            /* Clear notify_msg structure */
+            camera3_notify_msg_t notify_msg;
+            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
+
+            // Send shutter notify to frameworks
+            notify_msg.type = CAMERA3_MSG_SHUTTER;
+            notify_msg.message.shutter.frame_number = i->frame_number;
+            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
+            mCallbackOps->notify(mCallbackOps, &notify_msg);
+
+            i->timestamp = capture_time;
+
+            // Find channel requiring metadata, meaning internal offline postprocess
+            // is needed.
+            //TODO: for now, we don't support two streams requiring metadata at the same time.
+            // (because we are not making copies, and metadata buffer is not reference counted.
+            bool internalPproc = false;
+            for (pendingBufferIterator iter = i->buffers.begin();
+                    iter != i->buffers.end(); iter++) {
+                if (iter->need_metadata) {
+                    internalPproc = true;
+                    QCamera3ProcessingChannel *channel =
+                            (QCamera3ProcessingChannel *)iter->stream->priv;
+                    channel->queueReprocMetadata(metadata_buf);
+                    break;
+                }
+            }
+
+            result.result = translateFromHalMetadata(metadata,
+                    i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
+                    i->capture_intent, internalPproc, i->fwkCacMode);
+
+            saveExifParams(metadata);
+
+            if (i->blob_request) {
+                {
+                    //Dump tuning metadata if enabled and available
+                    char prop[PROPERTY_VALUE_MAX];
+                    memset(prop, 0, sizeof(prop));
+                    property_get("persist.camera.dumpmetadata", prop, "0");
+                    int32_t enabled = atoi(prop);
+                    if (enabled && metadata->is_tuning_params_valid) {
+                        dumpMetadataToFile(metadata->tuning_params,
+                               mMetaFrameCount,
+                               enabled,
+                               "Snapshot",
+                               frame_number);
+                    }
+                }
+            }
+
+            if (!internalPproc) {
+                LOGD("couldn't find need_metadata for this metadata");
+                // Return metadata buffer
+                if (free_and_bufdone_meta_buf) {
+                    mMetadataChannel->bufDone(metadata_buf);
+                    free(metadata_buf);
+                }
+            }
+        }
+        if (!result.result) {
+            LOGE("metadata is NULL");
+        }
+        result.frame_number = i->frame_number;
+        result.input_buffer = i->input_buffer;
+        result.num_output_buffers = 0;
+        result.output_buffers = NULL;
+        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
+                    j != i->buffers.end(); j++) {
+            if (j->buffer) {
+                result.num_output_buffers++;
+            }
+        }
+
+        updateFpsInPreviewBuffer(metadata, i->frame_number);
+
+        if (result.num_output_buffers > 0) {
+            camera3_stream_buffer_t *result_buffers =
+                new camera3_stream_buffer_t[result.num_output_buffers];
+            if (result_buffers != NULL) {
+                size_t result_buffers_idx = 0;
+                for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
+                        j != i->buffers.end(); j++) {
+                    if (j->buffer) {
+                        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
+                                m != mPendingFrameDropList.end(); m++) {
+                            QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
+                            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
+                            if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
+                                j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
+                                LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
+                                        frame_number, streamID);
+                                m = mPendingFrameDropList.erase(m);
+                                break;
+                            }
+                        }
+                        mPendingBuffersMap.removeBuf(j->buffer->buffer);
+                        result_buffers[result_buffers_idx++] = *(j->buffer);
+                        free(j->buffer);
+                        j->buffer = NULL;
+                    }
+                }
+                result.output_buffers = result_buffers;
+                mCallbackOps->process_capture_result(mCallbackOps, &result);
+                LOGD("meta frame_number = %u, capture_time = %lld",
+                        result.frame_number, i->timestamp);
+                free_camera_metadata((camera_metadata_t *)result.result);
+                delete[] result_buffers;
+            }else {
+                LOGE("Fatal error: out of memory");
+            }
+        } else {
+            mCallbackOps->process_capture_result(mCallbackOps, &result);
+            LOGD("meta frame_number = %u, capture_time = %lld",
+                    result.frame_number, i->timestamp);
+            free_camera_metadata((camera_metadata_t *)result.result);
+        }
+
+        i = erasePendingRequest(i);
+
+        if (!mPendingReprocessResultList.empty()) {
+            handlePendingReprocResults(frame_number + 1);
+        }
+    }
+
+done_metadata:
+    for (pendingRequestIterator i = mPendingRequestsList.begin();
+            i != mPendingRequestsList.end() ;i++) {
+        i->pipeline_depth++;
+    }
+    LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
+    unblockRequestIfNecessary();
+}
+
+/*===========================================================================
+ * FUNCTION   : hdrPlusPerfLock
+ *
+ * DESCRIPTION: perf lock for HDR+ using custom intent
+ *
+ * PARAMETERS : @metadata_buf: Metadata super_buf pointer
+ *
+ * RETURN     : None
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::hdrPlusPerfLock(
+        mm_camera_super_buf_t *metadata_buf)
+{
+    if (NULL == metadata_buf) {
+        LOGE("metadata_buf is NULL");
+        return;
+    }
+    metadata_buffer_t *metadata =
+            (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
+    int32_t *p_frame_number_valid =
+            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
+    uint32_t *p_frame_number =
+            POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
+
+    if (p_frame_number_valid == NULL || p_frame_number == NULL) {
+        LOGE("%s: Invalid metadata", __func__);
+        return;
+    }
+
+    //acquire perf lock for 5 sec after the last HDR frame is captured
+    if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
+        if ((p_frame_number != NULL) &&
+                (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
+            m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
+        }
+    }
+
+    //release lock after perf lock timer is expired. If lock is already released,
+    //isTimerReset returns false
+    if (m_perfLock.isTimerReset()) {
+        mLastCustIntentFrmNum = -1;
+        m_perfLock.lock_rel_timed();
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : handleInputBufferWithLock
+ *
+ * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
+ *
+ * PARAMETERS : @frame_number: frame number of the input buffer
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
+{
+    ATRACE_CALL();
+    pendingRequestIterator i = mPendingRequestsList.begin();
+    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
+        i++;
+    }
+    if (i != mPendingRequestsList.end() && i->input_buffer) {
+        //found the right request
+        if (!i->shutter_notified) {
+            CameraMetadata settings;
+            camera3_notify_msg_t notify_msg;
+            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
+            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
+            if(i->settings) {
+                settings = i->settings;
+                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
+                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
+                } else {
+                    LOGE("No timestamp in input settings! Using current one.");
+                }
+            } else {
+                LOGE("Input settings missing!");
+            }
+
+            notify_msg.type = CAMERA3_MSG_SHUTTER;
+            notify_msg.message.shutter.frame_number = frame_number;
+            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
+            mCallbackOps->notify(mCallbackOps, &notify_msg);
+            i->shutter_notified = true;
+            LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
+                        i->frame_number, notify_msg.message.shutter.timestamp);
+        }
+
+        if (i->input_buffer->release_fence != -1) {
+           int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
+           close(i->input_buffer->release_fence);
+           if (rc != OK) {
+               LOGE("input buffer sync wait failed %d", rc);
+           }
+        }
+
+        camera3_capture_result result;
+        memset(&result, 0, sizeof(camera3_capture_result));
+        result.frame_number = frame_number;
+        result.result = i->settings;
+        result.input_buffer = i->input_buffer;
+        result.partial_result = PARTIAL_RESULT_COUNT;
+
+        mCallbackOps->process_capture_result(mCallbackOps, &result);
+        LOGD("Input request metadata and input buffer frame_number = %u",
+                        i->frame_number);
+        i = erasePendingRequest(i);
+    } else {
+        LOGE("Could not find input request for frame number %d", frame_number);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : handleBufferWithLock
+ *
+ * DESCRIPTION: Handles image buffer callback with mMutex lock held.
+ *
+ * PARAMETERS : @buffer: image buffer for the callback
+ *              @frame_number: frame number of the image buffer
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::handleBufferWithLock(
+    camera3_stream_buffer_t *buffer, uint32_t frame_number)
+{
+    ATRACE_CALL();
+    /* Nothing to be done during error state */
+    if ((ERROR == mState) || (DEINIT == mState)) {
+        return;
+    }
+    if (mFlushPerf) {
+        handleBuffersDuringFlushLock(buffer);
+        return;
+    }
+    //not in flush
+    // If the frame number doesn't exist in the pending request list,
+    // directly send the buffer to the frameworks, and update pending buffers map
+    // Otherwise, book-keep the buffer.
+    pendingRequestIterator i = mPendingRequestsList.begin();
+    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
+        i++;
+    }
+    if (i == mPendingRequestsList.end()) {
+        // Verify all pending requests frame_numbers are greater
+        for (pendingRequestIterator j = mPendingRequestsList.begin();
+                j != mPendingRequestsList.end(); j++) {
+            if ((j->frame_number < frame_number) && !(j->input_buffer)) {
+                LOGW("Error: pending live frame number %d is smaller than %d",
+                         j->frame_number, frame_number);
+            }
+        }
+        camera3_capture_result_t result;
+        memset(&result, 0, sizeof(camera3_capture_result_t));
+        result.result = NULL;
+        result.frame_number = frame_number;
+        result.num_output_buffers = 1;
+        result.partial_result = 0;
+        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
+                m != mPendingFrameDropList.end(); m++) {
+            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
+            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
+            if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
+                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
+                LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
+                         frame_number, streamID);
+                m = mPendingFrameDropList.erase(m);
+                break;
+            }
+        }
+        result.output_buffers = buffer;
+        LOGH("result frame_number = %d, buffer = %p",
+                 frame_number, buffer->buffer);
+
+        mPendingBuffersMap.removeBuf(buffer->buffer);
+
+        mCallbackOps->process_capture_result(mCallbackOps, &result);
+    } else {
+        if (i->input_buffer) {
+            CameraMetadata settings;
+            camera3_notify_msg_t notify_msg;
+            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
+            nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
+            if(i->settings) {
+                settings = i->settings;
+                if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
+                    capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
+                } else {
+                    LOGW("No timestamp in input settings! Using current one.");
+                }
+            } else {
+                LOGE("Input settings missing!");
+            }
+
+            notify_msg.type = CAMERA3_MSG_SHUTTER;
+            notify_msg.message.shutter.frame_number = frame_number;
+            notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
+
+            if (i->input_buffer->release_fence != -1) {
+               int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
+               close(i->input_buffer->release_fence);
+               if (rc != OK) {
+                   LOGE("input buffer sync wait failed %d", rc);
+               }
+            }
+            mPendingBuffersMap.removeBuf(buffer->buffer);
+
+            bool notifyNow = true;
+            for (pendingRequestIterator j = mPendingRequestsList.begin();
+                    j != mPendingRequestsList.end(); j++) {
+                if (j->frame_number < frame_number) {
+                    notifyNow = false;
+                    break;
+                }
+            }
+
+            if (notifyNow) {
+                camera3_capture_result result;
+                memset(&result, 0, sizeof(camera3_capture_result));
+                result.frame_number = frame_number;
+                result.result = i->settings;
+                result.input_buffer = i->input_buffer;
+                result.num_output_buffers = 1;
+                result.output_buffers = buffer;
+                result.partial_result = PARTIAL_RESULT_COUNT;
+
+                mCallbackOps->notify(mCallbackOps, &notify_msg);
+                mCallbackOps->process_capture_result(mCallbackOps, &result);
+                LOGD("Notify reprocess now %d!", frame_number);
+                i = erasePendingRequest(i);
+            } else {
+                // Cache reprocess result for later
+                PendingReprocessResult pendingResult;
+                memset(&pendingResult, 0, sizeof(PendingReprocessResult));
+                pendingResult.notify_msg = notify_msg;
+                pendingResult.buffer = *buffer;
+                pendingResult.frame_number = frame_number;
+                mPendingReprocessResultList.push_back(pendingResult);
+                LOGD("Cache reprocess result %d!", frame_number);
+            }
+        } else {
+            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
+                j != i->buffers.end(); j++) {
+                if (j->stream == buffer->stream) {
+                    if (j->buffer != NULL) {
+                        LOGE("Error: buffer is already set");
+                    } else {
+                        j->buffer = (camera3_stream_buffer_t *)malloc(
+                            sizeof(camera3_stream_buffer_t));
+                        *(j->buffer) = *buffer;
+                        LOGH("cache buffer %p at result frame_number %u",
+                             buffer->buffer, frame_number);
+                    }
+                }
+            }
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : unblockRequestIfNecessary
+ *
+ * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
+ *              that mMutex is held when this function is called.
+ *
+ * PARAMETERS :
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::unblockRequestIfNecessary()
+{
+   // Unblock process_capture_request
+   pthread_cond_signal(&mRequestCond);
+}
+
+
+/*===========================================================================
+ * FUNCTION   : processCaptureRequest
+ *
+ * DESCRIPTION: process a capture request from camera service
+ *
+ * PARAMETERS :
+ *   @request : request from framework to process
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+int QCamera3HardwareInterface::processCaptureRequest(
+                    camera3_capture_request_t *request)
+{
+    ATRACE_CALL();
+    int rc = NO_ERROR;
+    int32_t request_id;
+    CameraMetadata meta;
+    uint32_t minInFlightRequests = MIN_INFLIGHT_REQUESTS;
+    uint32_t maxInFlightRequests = MAX_INFLIGHT_REQUESTS;
+    bool isVidBufRequested = false;
+    camera3_stream_buffer_t *pInputBuffer = NULL;
+
+    pthread_mutex_lock(&mMutex);
+
+    // Validate current state
+    switch (mState) {
+        case CONFIGURED:
+        case STARTED:
+            /* valid state */
+            break;
+
+        case ERROR:
+            pthread_mutex_unlock(&mMutex);
+            handleCameraDeviceError();
+            return -ENODEV;
+
+        default:
+            LOGE("Invalid state %d", mState);
+            pthread_mutex_unlock(&mMutex);
+            return -ENODEV;
+    }
+
+    rc = validateCaptureRequest(request);
+    if (rc != NO_ERROR) {
+        LOGE("incoming request is not valid");
+        pthread_mutex_unlock(&mMutex);
+        return rc;
+    }
+
+    meta = request->settings;
+
+    // For first capture request, send capture intent, and
+    // stream on all streams
+    if (mState == CONFIGURED) {
+        // send an unconfigure to the backend so that the isp
+        // resources are deallocated
+        if (!mFirstConfiguration) {
+            cam_stream_size_info_t stream_config_info;
+            int32_t hal_version = CAM_HAL_V3;
+            memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
+            stream_config_info.buffer_info.min_buffers =
+                    MIN_INFLIGHT_REQUESTS;
+            stream_config_info.buffer_info.max_buffers =
+                    m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
+            clear_metadata_buffer(mParameters);
+            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
+                    CAM_INTF_PARM_HAL_VERSION, hal_version);
+            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
+                    CAM_INTF_META_STREAM_INFO, stream_config_info);
+            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
+                    mParameters);
+            if (rc < 0) {
+                LOGE("set_parms for unconfigure failed");
+                pthread_mutex_unlock(&mMutex);
+                return rc;
+            }
+        }
+        m_perfLock.lock_acq();
+        /* get eis information for stream configuration */
+        cam_is_type_t is_type;
+        char is_type_value[PROPERTY_VALUE_MAX];
+        property_get("persist.camera.is_type", is_type_value, "0");
+        is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
+
+        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
+            int32_t hal_version = CAM_HAL_V3;
+            uint8_t captureIntent =
+                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
+            mCaptureIntent = captureIntent;
+            clear_metadata_buffer(mParameters);
+            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
+            ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
+        }
+
+        //If EIS is enabled, turn it on for video
+        bool setEis = m_bEisEnable && m_bEisSupportedSize;
+        int32_t vsMode;
+        vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
+            rc = BAD_VALUE;
+        }
+
+        //IS type will be 0 unless EIS is supported. If EIS is supported
+        //it could either be 1 or 4 depending on the stream and video size
+        for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
+            if (setEis) {
+                if (!m_bEisSupportedSize) {
+                    is_type = IS_TYPE_DIS;
+                    } else {
+                    if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
+                        is_type = IS_TYPE_EIS_2_0;
+                    }else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO) {
+                        is_type = IS_TYPE_EIS_3_0;
+                    }else {
+                        is_type = IS_TYPE_NONE;
+                    }
+                 }
+                 mStreamConfigInfo.is_type[i] = is_type;
+            }
+            else {
+                 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
+            }
+        }
+
+        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
+                CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
+
+        int32_t tintless_value = 1;
+        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
+                CAM_INTF_PARM_TINTLESS, tintless_value);
+        //Disable CDS for HFR mode or if DIS/EIS is on.
+        //CDS is a session parameter in the backend/ISP, so need to be set/reset
+        //after every configure_stream
+        if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
+                (m_bIsVideo)) {
+            int32_t cds = CAM_CDS_MODE_OFF;
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
+                    CAM_INTF_PARM_CDS_MODE, cds))
+                LOGE("Failed to disable CDS for HFR mode");
+
+        }
+        setMobicat();
+
+        /* Set fps and hfr mode while sending meta stream info so that sensor
+         * can configure appropriate streaming mode */
+        mHFRVideoFps = DEFAULT_VIDEO_FPS;
+        if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
+            rc = setHalFpsRange(meta, mParameters);
+            if (rc != NO_ERROR) {
+                LOGE("setHalFpsRange failed");
+            }
+        }
+        if (meta.exists(ANDROID_CONTROL_MODE)) {
+            uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
+            rc = extractSceneMode(meta, metaMode, mParameters);
+            if (rc != NO_ERROR) {
+                LOGE("extractSceneMode failed");
+            }
+        }
+
+        //TODO: validate the arguments, HSV scenemode should have only the
+        //advertised fps ranges
+
+        /*set the capture intent, hal version, tintless, stream info,
+         *and disenable parameters to the backend*/
+        LOGD("set_parms META_STREAM_INFO " );
+        for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
+            LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
+                    "Format:%d",
+                    mStreamConfigInfo.type[i],
+                    mStreamConfigInfo.stream_sizes[i].width,
+                    mStreamConfigInfo.stream_sizes[i].height,
+                    mStreamConfigInfo.postprocess_mask[i],
+                    mStreamConfigInfo.format[i]);
+        }
+        rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
+                    mParameters);
+        if (rc < 0) {
+            LOGE("set_parms failed for hal version, stream info");
+        }
+
+        cam_dimension_t sensor_dim;
+        memset(&sensor_dim, 0, sizeof(sensor_dim));
+        rc = getSensorOutputSize(sensor_dim);
+        if (rc != NO_ERROR) {
+            LOGE("Failed to get sensor output size");
+            pthread_mutex_unlock(&mMutex);
+            goto error_exit;
+        }
+
+        mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
+                gCamCapability[mCameraId]->active_array_size.height,
+                sensor_dim.width, sensor_dim.height);
+
+        /* Set batchmode before initializing channel. Since registerBuffer
+         * internally initializes some of the channels, better set batchmode
+         * even before first register buffer */
+        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
+            it != mStreamInfo.end(); it++) {
+            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
+            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
+                    && mBatchSize) {
+                rc = channel->setBatchSize(mBatchSize);
+                //Disable per frame map unmap for HFR/batchmode case
+                rc |= channel->setPerFrameMapUnmap(false);
+                if (NO_ERROR != rc) {
+                    LOGE("Channel init failed %d", rc);
+                    pthread_mutex_unlock(&mMutex);
+                    goto error_exit;
+                }
+            }
+        }
+
+        //First initialize all streams
+        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
+            it != mStreamInfo.end(); it++) {
+            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
+            if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
+               ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
+               setEis)
+                rc = channel->initialize(is_type);
+            else {
+                rc = channel->initialize(IS_TYPE_NONE);
+            }
+            if (NO_ERROR != rc) {
+                LOGE("Channel initialization failed %d", rc);
+                pthread_mutex_unlock(&mMutex);
+                goto error_exit;
+            }
+        }
+
+        if (mRawDumpChannel) {
+            rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
+            if (rc != NO_ERROR) {
+                LOGE("Error: Raw Dump Channel init failed");
+                pthread_mutex_unlock(&mMutex);
+                goto error_exit;
+            }
+        }
+        if (mSupportChannel) {
+            rc = mSupportChannel->initialize(IS_TYPE_NONE);
+            if (rc < 0) {
+                LOGE("Support channel initialization failed");
+                pthread_mutex_unlock(&mMutex);
+                goto error_exit;
+            }
+        }
+        if (mAnalysisChannel) {
+            rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
+            if (rc < 0) {
+                LOGE("Analysis channel initialization failed");
+                pthread_mutex_unlock(&mMutex);
+                goto error_exit;
+            }
+        }
+        if (mDummyBatchChannel) {
+            rc = mDummyBatchChannel->setBatchSize(mBatchSize);
+            if (rc < 0) {
+                LOGE("mDummyBatchChannel setBatchSize failed");
+                pthread_mutex_unlock(&mMutex);
+                goto error_exit;
+            }
+            rc = mDummyBatchChannel->initialize(is_type);
+            if (rc < 0) {
+                LOGE("mDummyBatchChannel initialization failed");
+                pthread_mutex_unlock(&mMutex);
+                goto error_exit;
+            }
+        }
+
+        // Set bundle info
+        rc = setBundleInfo();
+        if (rc < 0) {
+            LOGE("setBundleInfo failed %d", rc);
+            pthread_mutex_unlock(&mMutex);
+            goto error_exit;
+        }
+
+        //update settings from app here
+        if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
+            mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
+            LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
+        }
+        if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
+            mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
+            LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
+        }
+        if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
+            mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
+            LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
+
+            if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
+                (mLinkedCameraId != mCameraId) ) {
+                LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
+                    mLinkedCameraId, mCameraId);
+                goto error_exit;
+            }
+        }
+
+        // add bundle related cameras
+        LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
+        if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
+            if (mIsDeviceLinked)
+                m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
+            else
+                m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
+
+            pthread_mutex_lock(&gCamLock);
+
+            if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
+                LOGE("Dualcam: Invalid Session Id ");
+                pthread_mutex_unlock(&gCamLock);
+                goto error_exit;
+            }
+
+            if (mIsMainCamera == 1) {
+                m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
+                m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
+                // related session id should be session id of linked session
+                m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
+            } else {
+                m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
+                m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
+                m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
+            }
+            pthread_mutex_unlock(&gCamLock);
+
+            rc = mCameraHandle->ops->sync_related_sensors(
+                    mCameraHandle->camera_handle, m_pRelCamSyncBuf);
+            if (rc < 0) {
+                LOGE("Dualcam: link failed");
+                goto error_exit;
+            }
+        }
+
+        //Then start them.
+        LOGH("Start META Channel");
+        rc = mMetadataChannel->start();
+        if (rc < 0) {
+            LOGE("META channel start failed");
+            pthread_mutex_unlock(&mMutex);
+            goto error_exit;
+        }
+
+        if (mAnalysisChannel) {
+            rc = mAnalysisChannel->start();
+            if (rc < 0) {
+                LOGE("Analysis channel start failed");
+                mMetadataChannel->stop();
+                pthread_mutex_unlock(&mMutex);
+                goto error_exit;
+            }
+        }
+
+        if (mSupportChannel) {
+            rc = mSupportChannel->start();
+            if (rc < 0) {
+                LOGE("Support channel start failed");
+                mMetadataChannel->stop();
+                /* Although support and analysis are mutually exclusive today
+                   adding it in anycase for future proofing */
+                if (mAnalysisChannel) {
+                    mAnalysisChannel->stop();
+                }
+                pthread_mutex_unlock(&mMutex);
+                goto error_exit;
+            }
+        }
+        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
+            it != mStreamInfo.end(); it++) {
+            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
+            LOGH("Start Processing Channel mask=%d",
+                     channel->getStreamTypeMask());
+            rc = channel->start();
+            if (rc < 0) {
+                LOGE("channel start failed");
+                pthread_mutex_unlock(&mMutex);
+                goto error_exit;
+            }
+        }
+
+        if (mRawDumpChannel) {
+            LOGD("Starting raw dump stream");
+            rc = mRawDumpChannel->start();
+            if (rc != NO_ERROR) {
+                LOGE("Error Starting Raw Dump Channel");
+                for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
+                      it != mStreamInfo.end(); it++) {
+                    QCamera3Channel *channel =
+                        (QCamera3Channel *)(*it)->stream->priv;
+                    LOGH("Stopping Processing Channel mask=%d",
+                        channel->getStreamTypeMask());
+                    channel->stop();
+                }
+                if (mSupportChannel)
+                    mSupportChannel->stop();
+                if (mAnalysisChannel) {
+                    mAnalysisChannel->stop();
+                }
+                mMetadataChannel->stop();
+                pthread_mutex_unlock(&mMutex);
+                goto error_exit;
+            }
+        }
+
+        if (mChannelHandle) {
+
+            rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
+                    mChannelHandle);
+            if (rc != NO_ERROR) {
+                LOGE("start_channel failed %d", rc);
+                pthread_mutex_unlock(&mMutex);
+                goto error_exit;
+            }
+        }
+
+        goto no_error;
+error_exit:
+        m_perfLock.lock_rel();
+        return rc;
+no_error:
+        m_perfLock.lock_rel();
+
+        mWokenUpByDaemon = false;
+        mPendingLiveRequest = 0;
+        mFirstConfiguration = false;
+        enablePowerHint();
+    }
+
+    uint32_t frameNumber = request->frame_number;
+    cam_stream_ID_t streamID;
+
+    if (mFlushPerf) {
+        //we cannot accept any requests during flush
+        LOGE("process_capture_request cannot proceed during flush");
+        pthread_mutex_unlock(&mMutex);
+        return NO_ERROR; //should return an error
+    }
+
+    if (meta.exists(ANDROID_REQUEST_ID)) {
+        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
+        mCurrentRequestId = request_id;
+        LOGD("Received request with id: %d", request_id);
+    } else if (mState == CONFIGURED || mCurrentRequestId == -1){
+        LOGE("Unable to find request id field, \
+                & no previous id available");
+        pthread_mutex_unlock(&mMutex);
+        return NAME_NOT_FOUND;
+    } else {
+        LOGD("Re-using old request id");
+        request_id = mCurrentRequestId;
+    }
+
+    LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
+                                    request->num_output_buffers,
+                                    request->input_buffer,
+                                    frameNumber);
+    // Acquire all request buffers first
+    streamID.num_streams = 0;
+    int blob_request = 0;
+    uint32_t snapshotStreamId = 0;
+    for (size_t i = 0; i < request->num_output_buffers; i++) {
+        const camera3_stream_buffer_t& output = request->output_buffers[i];
+        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
+
+        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
+            //Call function to store local copy of jpeg data for encode params.
+            blob_request = 1;
+            snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
+        }
+
+        if (output.acquire_fence != -1) {
+           rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
+           close(output.acquire_fence);
+           if (rc != OK) {
+              LOGE("sync wait failed %d", rc);
+              pthread_mutex_unlock(&mMutex);
+              return rc;
+           }
+        }
+
+        streamID.streamID[streamID.num_streams] =
+            channel->getStreamID(channel->getStreamTypeMask());
+        streamID.num_streams++;
+
+        if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
+            isVidBufRequested = true;
+        }
+    }
+
+    if (blob_request) {
+        KPI_ATRACE_INT("SNAPSHOT", 1);
+    }
+    if (blob_request && mRawDumpChannel) {
+        LOGD("Trigger Raw based on blob request if Raw dump is enabled");
+        streamID.streamID[streamID.num_streams] =
+            mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
+        streamID.num_streams++;
+    }
+
+    if(request->input_buffer == NULL) {
+        /* Parse the settings:
+         * - For every request in NORMAL MODE
+         * - For every request in HFR mode during preview only case
+         * - For first request of every batch in HFR mode during video
+         * recording. In batchmode the same settings except frame number is
+         * repeated in each request of the batch.
+         */
+        if (!mBatchSize ||
+           (mBatchSize && !isVidBufRequested) ||
+           (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
+            rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
+            if (rc < 0) {
+                LOGE("fail to set frame parameters");
+                pthread_mutex_unlock(&mMutex);
+                return rc;
+            }
+        }
+        /* For batchMode HFR, setFrameParameters is not called for every
+         * request. But only frame number of the latest request is parsed.
+         * Keep track of first and last frame numbers in a batch so that
+         * metadata for the frame numbers of batch can be duplicated in
+         * handleBatchMetadta */
+        if (mBatchSize) {
+            if (!mToBeQueuedVidBufs) {
+                //start of the batch
+                mFirstFrameNumberInBatch = request->frame_number;
+            }
+            if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
+                CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
+                LOGE("Failed to set the frame number in the parameters");
+                return BAD_VALUE;
+            }
+        }
+        if (mNeedSensorRestart) {
+            /* Unlock the mutex as restartSensor waits on the channels to be
+             * stopped, which in turn calls stream callback functions -
+             * handleBufferWithLock and handleMetadataWithLock */
+            pthread_mutex_unlock(&mMutex);
+            rc = dynamicUpdateMetaStreamInfo();
+            if (rc != NO_ERROR) {
+                LOGE("Restarting the sensor failed");
+                return BAD_VALUE;
+            }
+            mNeedSensorRestart = false;
+            pthread_mutex_lock(&mMutex);
+        }
+    } else {
+
+        if (request->input_buffer->acquire_fence != -1) {
+           rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
+           close(request->input_buffer->acquire_fence);
+           if (rc != OK) {
+              LOGE("input buffer sync wait failed %d", rc);
+              pthread_mutex_unlock(&mMutex);
+              return rc;
+           }
+        }
+    }
+
+    if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
+        mLastCustIntentFrmNum = frameNumber;
+    }
+    /* Update pending request list and pending buffers map */
+    PendingRequestInfo pendingRequest;
+    pendingRequestIterator latestRequest;
+    pendingRequest.frame_number = frameNumber;
+    pendingRequest.num_buffers = request->num_output_buffers;
+    pendingRequest.request_id = request_id;
+    pendingRequest.blob_request = blob_request;
+    pendingRequest.timestamp = 0;
+    pendingRequest.bUrgentReceived = 0;
+    if (request->input_buffer) {
+        pendingRequest.input_buffer =
+                (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
+        *(pendingRequest.input_buffer) = *(request->input_buffer);
+        pInputBuffer = pendingRequest.input_buffer;
+    } else {
+       pendingRequest.input_buffer = NULL;
+       pInputBuffer = NULL;
+    }
+
+    pendingRequest.pipeline_depth = 0;
+    pendingRequest.partial_result_cnt = 0;
+    extractJpegMetadata(mCurJpegMeta, request);
+    pendingRequest.jpegMetadata = mCurJpegMeta;
+    pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
+    pendingRequest.shutter_notified = false;
+
+    //extract capture intent
+    if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
+        mCaptureIntent =
+                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
+    }
+    pendingRequest.capture_intent = mCaptureIntent;
+
+    //extract CAC info
+    if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
+        mCacMode =
+                meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
+    }
+    pendingRequest.fwkCacMode = mCacMode;
+
+    PendingBuffersInRequest bufsForCurRequest;
+    bufsForCurRequest.frame_number = frameNumber;
+    // Mark current timestamp for the new request
+    bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
+
+    for (size_t i = 0; i < request->num_output_buffers; i++) {
+        RequestedBufferInfo requestedBuf;
+        memset(&requestedBuf, 0, sizeof(requestedBuf));
+        requestedBuf.stream = request->output_buffers[i].stream;
+        requestedBuf.buffer = NULL;
+        pendingRequest.buffers.push_back(requestedBuf);
+
+        // Add to buffer handle the pending buffers list
+        PendingBufferInfo bufferInfo;
+        bufferInfo.buffer = request->output_buffers[i].buffer;
+        bufferInfo.stream = request->output_buffers[i].stream;
+        bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
+        QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
+        LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
+            frameNumber, bufferInfo.buffer,
+            channel->getStreamTypeMask(), bufferInfo.stream->format);
+    }
+    // Add this request packet into mPendingBuffersMap
+    mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
+    LOGD("mPendingBuffersMap.num_overall_buffers = %d",
+        mPendingBuffersMap.get_num_overall_buffers());
+
+    latestRequest = mPendingRequestsList.insert(
+            mPendingRequestsList.end(), pendingRequest);
+    if(mFlush) {
+        LOGI("mFlush is true");
+        pthread_mutex_unlock(&mMutex);
+        return NO_ERROR;
+    }
+
+    // Notify metadata channel we receive a request
+    mMetadataChannel->request(NULL, frameNumber);
+
+    if(request->input_buffer != NULL){
+        LOGD("Input request, frame_number %d", frameNumber);
+        rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
+        if (NO_ERROR != rc) {
+            LOGE("fail to set reproc parameters");
+            pthread_mutex_unlock(&mMutex);
+            return rc;
+        }
+    }
+
+    // Call request on other streams
+    uint32_t streams_need_metadata = 0;
+    pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
+    for (size_t i = 0; i < request->num_output_buffers; i++) {
+        const camera3_stream_buffer_t& output = request->output_buffers[i];
+        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
+
+        if (channel == NULL) {
+            LOGW("invalid channel pointer for stream");
+            continue;
+        }
+
+        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
+            LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
+                      output.buffer, request->input_buffer, frameNumber);
+            if(request->input_buffer != NULL){
+                rc = channel->request(output.buffer, frameNumber,
+                        pInputBuffer, &mReprocMeta);
+                if (rc < 0) {
+                    LOGE("Fail to request on picture channel");
+                    pthread_mutex_unlock(&mMutex);
+                    return rc;
+                }
+            } else {
+                LOGD("snapshot request with buffer %p, frame_number %d",
+                         output.buffer, frameNumber);
+                if (!request->settings) {
+                    rc = channel->request(output.buffer, frameNumber,
+                            NULL, mPrevParameters);
+                } else {
+                    rc = channel->request(output.buffer, frameNumber,
+                            NULL, mParameters);
+                }
+                if (rc < 0) {
+                    LOGE("Fail to request on picture channel");
+                    pthread_mutex_unlock(&mMutex);
+                    return rc;
+                }
+                pendingBufferIter->need_metadata = true;
+                streams_need_metadata++;
+            }
+        } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
+            bool needMetadata = false;
+
+            if (m_perfLock.isPerfLockTimedAcquired()) {
+                if (m_perfLock.isTimerReset())
+                {
+                    m_perfLock.lock_rel_timed();
+                    m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
+                }
+            } else {
+                m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
+            }
+
+            QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
+            rc = yuvChannel->request(output.buffer, frameNumber,
+                    pInputBuffer,
+                    (pInputBuffer ? &mReprocMeta : mParameters), needMetadata);
+            if (rc < 0) {
+                LOGE("Fail to request on YUV channel");
+                pthread_mutex_unlock(&mMutex);
+                return rc;
+            }
+            pendingBufferIter->need_metadata = needMetadata;
+            if (needMetadata)
+                streams_need_metadata += 1;
+            LOGD("calling YUV channel request, need_metadata is %d",
+                     needMetadata);
+        } else {
+            LOGD("request with buffer %p, frame_number %d",
+                  output.buffer, frameNumber);
+            /* Set perf lock for API-2 zsl */
+            if (IS_USAGE_ZSL(output.stream->usage)) {
+                if (m_perfLock.isPerfLockTimedAcquired()) {
+                    if (m_perfLock.isTimerReset())
+                    {
+                        m_perfLock.lock_rel_timed();
+                        m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
+                    }
+                } else {
+                    m_perfLock.lock_acq_timed(BURST_REPROCESS_PERF_TIME_OUT);
+                }
+            }
+
+            rc = channel->request(output.buffer, frameNumber);
+            if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
+                    && mBatchSize) {
+                mToBeQueuedVidBufs++;
+                if (mToBeQueuedVidBufs == mBatchSize) {
+                    channel->queueBatchBuf();
+                }
+            }
+            if (rc < 0) {
+                LOGE("request failed");
+                pthread_mutex_unlock(&mMutex);
+                return rc;
+            }
+        }
+        pendingBufferIter++;
+    }
+
+    //If 2 streams have need_metadata set to true, fail the request, unless
+    //we copy/reference count the metadata buffer
+    if (streams_need_metadata > 1) {
+        LOGE("not supporting request in which two streams requires"
+                " 2 HAL metadata for reprocessing");
+        pthread_mutex_unlock(&mMutex);
+        return -EINVAL;
+    }
+
+    if(request->input_buffer == NULL) {
+        /* Set the parameters to backend:
+         * - For every request in NORMAL MODE
+         * - For every request in HFR mode during preview only case
+         * - Once every batch in HFR mode during video recording
+         */
+        if (!mBatchSize ||
+           (mBatchSize && !isVidBufRequested) ||
+           (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
+            LOGD("set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
+                     mBatchSize, isVidBufRequested,
+                    mToBeQueuedVidBufs);
+            rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
+                    mParameters);
+            if (rc < 0) {
+                LOGE("set_parms failed");
+            }
+            /* reset to zero coz, the batch is queued */
+            mToBeQueuedVidBufs = 0;
+            mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
+        }
+        mPendingLiveRequest++;
+    }
+
+    LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
+
+    mState = STARTED;
+    // Added a timed condition wait
+    struct timespec ts;
+    uint8_t isValidTimeout = 1;
+    rc = clock_gettime(CLOCK_REALTIME, &ts);
+    if (rc < 0) {
+      isValidTimeout = 0;
+      LOGE("Error reading the real time clock!!");
+    }
+    else {
+      // Make timeout as 5 sec for request to be honored
+      ts.tv_sec += 5;
+    }
+    //Block on conditional variable
+    if (mBatchSize) {
+        /* For HFR, more buffers are dequeued upfront to improve the performance */
+        minInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
+        maxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
+    }
+    if (m_perfLock.isPerfLockTimedAcquired() && m_perfLock.isTimerReset())
+        m_perfLock.lock_rel_timed();
+
+    while ((mPendingLiveRequest >= minInFlightRequests) && !pInputBuffer &&
+            (mState != ERROR) && (mState != DEINIT)) {
+        if (!isValidTimeout) {
+            LOGD("Blocking on conditional wait");
+            pthread_cond_wait(&mRequestCond, &mMutex);
+        }
+        else {
+            LOGD("Blocking on timed conditional wait");
+            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
+            if (rc == ETIMEDOUT) {
+                rc = -ENODEV;
+                LOGE("Unblocked on timeout!!!!");
+                break;
+            }
+        }
+        LOGD("Unblocked");
+        if (mWokenUpByDaemon) {
+            mWokenUpByDaemon = false;
+            if (mPendingLiveRequest < maxInFlightRequests)
+                break;
+        }
+    }
+    pthread_mutex_unlock(&mMutex);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : dump
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS :
+ *
+ *
+ * RETURN     :
+ *==========================================================================*/
+void QCamera3HardwareInterface::dump(int fd)
+{
+    pthread_mutex_lock(&mMutex);
+    dprintf(fd, "\n Camera HAL3 information Begin \n");
+
+    dprintf(fd, "\nNumber of pending requests: %zu \n",
+        mPendingRequestsList.size());
+    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
+    dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
+    dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
+    for(pendingRequestIterator i = mPendingRequestsList.begin();
+            i != mPendingRequestsList.end(); i++) {
+        dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
+        i->frame_number, i->num_buffers, i->request_id, i->blob_request,
+        i->input_buffer);
+    }
+    dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
+                mPendingBuffersMap.get_num_overall_buffers());
+    dprintf(fd, "-------+------------------\n");
+    dprintf(fd, " Frame | Stream type mask \n");
+    dprintf(fd, "-------+------------------\n");
+    for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
+        for(auto &j : req.mPendingBufferList) {
+            QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
+            dprintf(fd, " %5d | %11d \n",
+                    req.frame_number, channel->getStreamTypeMask());
+        }
+    }
+    dprintf(fd, "-------+------------------\n");
+
+    dprintf(fd, "\nPending frame drop list: %zu\n",
+        mPendingFrameDropList.size());
+    dprintf(fd, "-------+-----------\n");
+    dprintf(fd, " Frame | Stream ID \n");
+    dprintf(fd, "-------+-----------\n");
+    for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
+        i != mPendingFrameDropList.end(); i++) {
+        dprintf(fd, " %5d | %9d \n",
+            i->frame_number, i->stream_ID);
+    }
+    dprintf(fd, "-------+-----------\n");
+
+    dprintf(fd, "\n Camera HAL3 information End \n");
+
+    /* use dumpsys media.camera as trigger to send update debug level event */
+    mUpdateDebugLevel = true;
+    pthread_mutex_unlock(&mMutex);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : flush
+ *
+ * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
+ *              conditionally restarts channels
+ *
+ * PARAMETERS :
+ *  @ restartChannels: re-start all channels
+ *
+ *
+ * RETURN     :
+ *          0 on success
+ *          Error code on failure
+ *==========================================================================*/
+int QCamera3HardwareInterface::flush(bool restartChannels)
+{
+    KPI_ATRACE_CALL();
+    int32_t rc = NO_ERROR;
+
+    LOGD("Unblocking Process Capture Request");
+    pthread_mutex_lock(&mMutex);
+    mFlush = true;
+    pthread_mutex_unlock(&mMutex);
+
+    rc = stopAllChannels();
+    // unlink of dualcam
+    if (mIsDeviceLinked) {
+        m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
+        pthread_mutex_lock(&gCamLock);
+
+        if (mIsMainCamera == 1) {
+            m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
+            m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
+            // related session id should be session id of linked session
+            m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
+        } else {
+            m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
+            m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
+            m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
+        }
+        pthread_mutex_unlock(&gCamLock);
+
+        rc = mCameraHandle->ops->sync_related_sensors(
+                mCameraHandle->camera_handle, m_pRelCamSyncBuf);
+        if (rc < 0) {
+            LOGE("Dualcam: Unlink failed, but still proceed to close");
+        }
+    }
+
+    if (rc < 0) {
+        LOGE("stopAllChannels failed");
+        return rc;
+    }
+    if (mChannelHandle) {
+        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
+                mChannelHandle);
+    }
+
+    // Reset bundle info
+    rc = setBundleInfo();
+    if (rc < 0) {
+        LOGE("setBundleInfo failed %d", rc);
+        return rc;
+    }
+
+    // Mutex Lock
+    pthread_mutex_lock(&mMutex);
+
+    // Unblock process_capture_request
+    mPendingLiveRequest = 0;
+    pthread_cond_signal(&mRequestCond);
+
+    rc = notifyErrorForPendingRequests();
+    if (rc < 0) {
+        LOGE("notifyErrorForPendingRequests failed");
+        pthread_mutex_unlock(&mMutex);
+        return rc;
+    }
+
+    mFlush = false;
+
+    // Start the Streams/Channels
+    if (restartChannels) {
+        rc = startAllChannels();
+        if (rc < 0) {
+            LOGE("startAllChannels failed");
+            pthread_mutex_unlock(&mMutex);
+            return rc;
+        }
+    }
+
+    if (mChannelHandle) {
+        mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
+                    mChannelHandle);
+        if (rc < 0) {
+            LOGE("start_channel failed");
+            pthread_mutex_unlock(&mMutex);
+            return rc;
+        }
+    }
+
+    pthread_mutex_unlock(&mMutex);
+
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : flushPerf
+ *
+ * DESCRIPTION: This is the performance optimization version of flush that does
+ *              not use stream off, rather flushes the system
+ *
+ * PARAMETERS :
+ *
+ *
+ * RETURN     : 0 : success
+ *              -EINVAL: input is malformed (device is not valid)
+ *              -ENODEV: if the device has encountered a serious error
+ *==========================================================================*/
+int QCamera3HardwareInterface::flushPerf()
+{
+    ATRACE_CALL();
+    int32_t rc = 0;
+    struct timespec timeout;
+    bool timed_wait = false;
+
+    pthread_mutex_lock(&mMutex);
+    mFlushPerf = true;
+    mPendingBuffersMap.numPendingBufsAtFlush =
+        mPendingBuffersMap.get_num_overall_buffers();
+    LOGD("Calling flush. Wait for %d buffers to return",
+        mPendingBuffersMap.numPendingBufsAtFlush);
+
+    /* send the flush event to the backend */
+    rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
+    if (rc < 0) {
+        LOGE("Error in flush: IOCTL failure");
+        mFlushPerf = false;
+        pthread_mutex_unlock(&mMutex);
+        return -ENODEV;
+    }
+
+    if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
+        LOGD("No pending buffers in HAL, return flush");
+        mFlushPerf = false;
+        pthread_mutex_unlock(&mMutex);
+        return rc;
+    }
+
+    /* wait on a signal that buffers were received */
+    rc = clock_gettime(CLOCK_REALTIME, &timeout);
+    if (rc < 0) {
+        LOGE("Error reading the real time clock, cannot use timed wait");
+    } else {
+        timeout.tv_sec += FLUSH_TIMEOUT;
+        timed_wait = true;
+    }
+
+    //Block on conditional variable
+    while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
+        LOGD("Waiting on mBuffersCond");
+        if (!timed_wait) {
+            rc = pthread_cond_wait(&mBuffersCond, &mMutex);
+            if (rc != 0) {
+                 LOGE("pthread_cond_wait failed due to rc = %s",
+                        strerror(rc));
+                 break;
+            }
+        } else {
+            rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
+            if (rc != 0) {
+                LOGE("pthread_cond_timedwait failed due to rc = %s",
+                            strerror(rc));
+                break;
+            }
+        }
+    }
+    if (rc != 0) {
+        mFlushPerf = false;
+        pthread_mutex_unlock(&mMutex);
+        return -ENODEV;
+    }
+
+    LOGD("Received buffers, now safe to return them");
+
+    //make sure the channels handle flush
+    //currently only required for the picture channel to release snapshot resources
+    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
+            it != mStreamInfo.end(); it++) {
+        QCamera3Channel *channel = (*it)->channel;
+        if (channel) {
+            rc = channel->flush();
+            if (rc) {
+               LOGE("Flushing the channels failed with error %d", rc);
+               // even though the channel flush failed we need to continue and
+               // return the buffers we have to the framework, however the return
+               // value will be an error
+               rc = -ENODEV;
+            }
+        }
+    }
+
+    /* notify the frameworks and send errored results */
+    rc = notifyErrorForPendingRequests();
+    if (rc < 0) {
+        LOGE("notifyErrorForPendingRequests failed");
+        pthread_mutex_unlock(&mMutex);
+        return rc;
+    }
+
+    //unblock process_capture_request
+    mPendingLiveRequest = 0;
+    unblockRequestIfNecessary();
+
+    mFlushPerf = false;
+    pthread_mutex_unlock(&mMutex);
+    LOGD ("Flush Operation complete. rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : handleCameraDeviceError
+ *
+ * DESCRIPTION: This function calls internal flush and notifies the error to
+ *              framework and updates the state variable.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : NO_ERROR on Success
+ *              Error code on failure
+ *==========================================================================*/
+int32_t QCamera3HardwareInterface::handleCameraDeviceError()
+{
+    int32_t rc = NO_ERROR;
+
+    pthread_mutex_lock(&mMutex);
+    if (mState != ERROR) {
+        //if mState != ERROR, nothing to be done
+        pthread_mutex_unlock(&mMutex);
+        return NO_ERROR;
+    }
+    pthread_mutex_unlock(&mMutex);
+
+    rc = flush(false /* restart channels */);
+    if (NO_ERROR != rc) {
+        LOGE("internal flush to handle mState = ERROR failed");
+    }
+
+    pthread_mutex_lock(&mMutex);
+    mState = DEINIT;
+    pthread_mutex_unlock(&mMutex);
+
+    camera3_notify_msg_t notify_msg;
+    memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
+    notify_msg.type = CAMERA3_MSG_ERROR;
+    notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
+    notify_msg.message.error.error_stream = NULL;
+    notify_msg.message.error.frame_number = 0;
+    mCallbackOps->notify(mCallbackOps, &notify_msg);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : captureResultCb
+ *
+ * DESCRIPTION: Callback handler for all capture result
+ *              (streams, as well as metadata)
+ *
+ * PARAMETERS :
+ *   @metadata : metadata information
+ *   @buffer   : actual gralloc buffer to be returned to frameworks.
+ *               NULL if metadata.
+ *
+ * RETURN     : NONE
+ *==========================================================================*/
+void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
+                camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
+{
+    if (metadata_buf) {
+        if (mBatchSize) {
+            handleBatchMetadata(metadata_buf,
+                    true /* free_and_bufdone_meta_buf */);
+        } else { /* mBatchSize = 0 */
+            hdrPlusPerfLock(metadata_buf);
+            pthread_mutex_lock(&mMutex);
+            handleMetadataWithLock(metadata_buf,
+                    true /* free_and_bufdone_meta_buf */);
+            pthread_mutex_unlock(&mMutex);
+        }
+    } else if (isInputBuffer) {
+        pthread_mutex_lock(&mMutex);
+        handleInputBufferWithLock(frame_number);
+        pthread_mutex_unlock(&mMutex);
+    } else {
+        pthread_mutex_lock(&mMutex);
+        handleBufferWithLock(buffer, frame_number);
+        pthread_mutex_unlock(&mMutex);
+    }
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : getReprocessibleOutputStreamId
+ *
+ * DESCRIPTION: Get source output stream id for the input reprocess stream
+ *              based on size and format, which would be the largest
+ *              output stream if an input stream exists.
+ *
+ * PARAMETERS :
+ *   @id      : return the stream id if found
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
+{
+    /* check if any output or bidirectional stream with the same size and format
+       and return that stream */
+    if ((mInputStreamInfo.dim.width > 0) &&
+            (mInputStreamInfo.dim.height > 0)) {
+        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
+                it != mStreamInfo.end(); it++) {
+
+            camera3_stream_t *stream = (*it)->stream;
+            if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
+                    (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
+                    (stream->format == mInputStreamInfo.format)) {
+                // Usage flag for an input stream and the source output stream
+                // may be different.
+                LOGD("Found reprocessible output stream! %p", *it);
+                LOGD("input stream usage 0x%x, current stream usage 0x%x",
+                         stream->usage, mInputStreamInfo.usage);
+
+                QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
+                if (channel != NULL && channel->mStreams[0]) {
+                    id = channel->mStreams[0]->getMyServerID();
+                    return NO_ERROR;
+                }
+            }
+        }
+    } else {
+        LOGD("No input stream, so no reprocessible output stream");
+    }
+    return NAME_NOT_FOUND;
+}
+
+/*===========================================================================
+ * FUNCTION   : lookupFwkName
+ *
+ * DESCRIPTION: In case the enum is not same in fwk and backend
+ *              make sure the parameter is correctly propogated
+ *
+ * PARAMETERS  :
+ *   @arr      : map between the two enums
+ *   @len      : len of the map
+ *   @hal_name : name of the hal_parm to map
+ *
+ * RETURN     : int type of status
+ *              fwk_name  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
+        size_t len, halType hal_name)
+{
+
+    for (size_t i = 0; i < len; i++) {
+        if (arr[i].hal_name == hal_name) {
+            return arr[i].fwk_name;
+        }
+    }
+
+    /* Not able to find matching framework type is not necessarily
+     * an error case. This happens when mm-camera supports more attributes
+     * than the frameworks do */
+    LOGH("Cannot find matching framework type");
+    return NAME_NOT_FOUND;
+}
+
+/*===========================================================================
+ * FUNCTION   : lookupHalName
+ *
+ * DESCRIPTION: In case the enum is not same in fwk and backend
+ *              make sure the parameter is correctly propogated
+ *
+ * PARAMETERS  :
+ *   @arr      : map between the two enums
+ *   @len      : len of the map
+ *   @fwk_name : name of the hal_parm to map
+ *
+ * RETURN     : int32_t type of status
+ *              hal_name  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
+        size_t len, fwkType fwk_name)
+{
+    for (size_t i = 0; i < len; i++) {
+        if (arr[i].fwk_name == fwk_name) {
+            return arr[i].hal_name;
+        }
+    }
+
+    LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
+    return NAME_NOT_FOUND;
+}
+
+/*===========================================================================
+ * FUNCTION   : lookupProp
+ *
+ * DESCRIPTION: lookup a value by its name
+ *
+ * PARAMETERS :
+ *   @arr     : map between the two enums
+ *   @len     : size of the map
+ *   @name    : name to be looked up
+ *
+ * RETURN     : Value if found
+ *              CAM_CDS_MODE_MAX if not found
+ *==========================================================================*/
+template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
+        size_t len, const char *name)
+{
+    if (name) {
+        for (size_t i = 0; i < len; i++) {
+            if (!strcmp(arr[i].desc, name)) {
+                return arr[i].val;
+            }
+        }
+    }
+    return CAM_CDS_MODE_MAX;
+}
+
+/*===========================================================================
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS :
+ *   @metadata : metadata information from callback
+ *   @timestamp: metadata buffer timestamp
+ *   @request_id: request id
+ *   @jpegMetadata: additional jpeg metadata
+ *   @pprocDone: whether internal offline postprocsesing is done
+ *
+ * RETURN     : camera_metadata_t*
+ *              metadata in a format specified by fwk
+ *==========================================================================*/
+camera_metadata_t*
+QCamera3HardwareInterface::translateFromHalMetadata(
+                                 metadata_buffer_t *metadata,
+                                 nsecs_t timestamp,
+                                 int32_t request_id,
+                                 const CameraMetadata& jpegMetadata,
+                                 uint8_t pipeline_depth,
+                                 uint8_t capture_intent,
+                                 bool pprocDone,
+                                 uint8_t fwk_cacMode)
+{
+    CameraMetadata camMetadata;
+    camera_metadata_t *resultMetadata;
+
+    if (jpegMetadata.entryCount())
+        camMetadata.append(jpegMetadata);
+
+    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
+    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
+    camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
+    camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
+
+    IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
+        int64_t fwk_frame_number = *frame_number;
+        camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
+    }
+
+    IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
+        int32_t fps_range[2];
+        fps_range[0] = (int32_t)float_range->min_fps;
+        fps_range[1] = (int32_t)float_range->max_fps;
+        camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+                                      fps_range, 2);
+        LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
+             fps_range[0], fps_range[1]);
+    }
+
+    IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
+        camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
+    }
+
+    IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
+        int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
+                METADATA_MAP_SIZE(SCENE_MODES_MAP),
+                *sceneMode);
+        if (NAME_NOT_FOUND != val) {
+            uint8_t fwkSceneMode = (uint8_t)val;
+            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
+            LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
+                     fwkSceneMode);
+        }
+    }
+
+    IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
+        uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
+        camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
+    }
+
+    IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
+        uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
+        camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
+    }
+
+    IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
+        uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
+        camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
+    }
+
+    IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
+            CAM_INTF_META_EDGE_MODE, metadata) {
+        camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
+    }
+
+    IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
+        uint8_t fwk_flashPower = (uint8_t) *flashPower;
+        camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
+    }
+
+    IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
+        camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
+    }
+
+    IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
+        if (0 <= *flashState) {
+            uint8_t fwk_flashState = (uint8_t) *flashState;
+            if (!gCamCapability[mCameraId]->flash_available) {
+                fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
+            }
+            camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
+        }
+    }
+
+    IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
+        int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
+        if (NAME_NOT_FOUND != val) {
+            uint8_t fwk_flashMode = (uint8_t)val;
+            camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
+        }
+    }
+
+    IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
+        uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
+        camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
+    }
+
+    IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
+        camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
+    }
+
+    IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
+        camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
+    }
+
+    IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
+        camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
+    }
+
+    IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
+        uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
+        camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
+    }
+
+    IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
+        uint8_t fwk_videoStab = (uint8_t) *videoStab;
+        LOGD("fwk_videoStab = %d", fwk_videoStab);
+        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
+    } else {
+        // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
+        // and so hardcoding the Video Stab result to OFF mode.
+        uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
+        camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
+        LOGD("%s: EIS result default to OFF mode", __func__);
+    }
+
+    IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
+        uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
+        camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
+    }
+
+    IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
+        camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
+    }
+
+    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
+        CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
+
+        LOGD("dynamicblackLevel = %f %f %f %f",
+          blackLevelSourcePattern->cam_black_level[0],
+          blackLevelSourcePattern->cam_black_level[1],
+          blackLevelSourcePattern->cam_black_level[2],
+          blackLevelSourcePattern->cam_black_level[3]);
+    }
+
+    IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
+        CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
+        float fwk_blackLevelInd[4];
+
+        fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
+        fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
+        fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
+        fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
+
+        LOGD("applied dynamicblackLevel = %f %f %f %f",
+          blackLevelAppliedPattern->cam_black_level[0],
+          blackLevelAppliedPattern->cam_black_level[1],
+          blackLevelAppliedPattern->cam_black_level[2],
+          blackLevelAppliedPattern->cam_black_level[3]);
+        camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
+        camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
+    }
+
+
+    if (gCamCapability[mCameraId]->optical_black_region_count != 0 &&
+        gCamCapability[mCameraId]->optical_black_region_count <= MAX_OPTICAL_BLACK_REGIONS) {
+        int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
+        for (size_t i = 0; i < gCamCapability[mCameraId]->optical_black_region_count * 4; i++) {
+            opticalBlackRegions[i] = gCamCapability[mCameraId]->optical_black_regions[i];
+        }
+        camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_INFO_OPTICALLY_SHIELDED_REGIONS,
+                opticalBlackRegions, gCamCapability[mCameraId]->optical_black_region_count * 4);
+    }
+
+    IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
+            CAM_INTF_META_SCALER_CROP_REGION, metadata) {
+        int32_t scalerCropRegion[4];
+        scalerCropRegion[0] = hScalerCropRegion->left;
+        scalerCropRegion[1] = hScalerCropRegion->top;
+        scalerCropRegion[2] = hScalerCropRegion->width;
+        scalerCropRegion[3] = hScalerCropRegion->height;
+
+        // Adjust crop region from sensor output coordinate system to active
+        // array coordinate system.
+        mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
+                scalerCropRegion[2], scalerCropRegion[3]);
+
+        camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
+    }
+
+    IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
+        LOGD("sensorExpTime = %lld", *sensorExpTime);
+        camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
+    }
+
+    IF_META_AVAILABLE(int64_t, sensorFameDuration,
+            CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
+        LOGD("sensorFameDuration = %lld", *sensorFameDuration);
+        camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
+    }
+
+    IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
+            CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
+        LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
+        camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
+                sensorRollingShutterSkew, 1);
+    }
+
+    IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
+        LOGD("sensorSensitivity = %d", *sensorSensitivity);
+        camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
+
+        //calculate the noise profile based on sensitivity
+        double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
+        double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
+        double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
+        for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
+            noise_profile[i]   = noise_profile_S;
+            noise_profile[i+1] = noise_profile_O;
+        }
+        LOGD("noise model entry (S, O) is (%f, %f)",
+                noise_profile_S, noise_profile_O);
+        camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
+                (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
+    }
+
+    IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
+        uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
+        camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
+    }
+
+    IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
+        int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
+                *faceDetectMode);
+        if (NAME_NOT_FOUND != val) {
+            uint8_t fwk_faceDetectMode = (uint8_t)val;
+            camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
+
+            if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
+                IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
+                        CAM_INTF_META_FACE_DETECTION, metadata) {
+                    uint8_t numFaces = MIN(
+                            faceDetectionInfo->num_faces_detected, MAX_ROI);
+                    int32_t faceIds[MAX_ROI];
+                    uint8_t faceScores[MAX_ROI];
+                    int32_t faceRectangles[MAX_ROI * 4];
+                    int32_t faceLandmarks[MAX_ROI * 6];
+                    size_t j = 0, k = 0;
+
+                    for (size_t i = 0; i < numFaces; i++) {
+                        faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
+                        // Adjust crop region from sensor output coordinate system to active
+                        // array coordinate system.
+                        cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
+                        mCropRegionMapper.toActiveArray(rect.left, rect.top,
+                                rect.width, rect.height);
+
+                        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
+                                faceRectangles+j, -1);
+
+                        j+= 4;
+                    }
+                    if (numFaces <= 0) {
+                        memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
+                        memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
+                        memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
+                        memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
+                    }
+
+                    camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
+                            numFaces);
+                    camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
+                            faceRectangles, numFaces * 4U);
+                    if (fwk_faceDetectMode ==
+                            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
+                        IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
+                                CAM_INTF_META_FACE_LANDMARK, metadata) {
+
+                            for (size_t i = 0; i < numFaces; i++) {
+                                // Map the co-ordinate sensor output coordinate system to active
+                                // array coordinate system.
+                                mCropRegionMapper.toActiveArray(
+                                        landmarks->face_landmarks[i].left_eye_center.x,
+                                        landmarks->face_landmarks[i].left_eye_center.y);
+                                mCropRegionMapper.toActiveArray(
+                                        landmarks->face_landmarks[i].right_eye_center.x,
+                                        landmarks->face_landmarks[i].right_eye_center.y);
+                                mCropRegionMapper.toActiveArray(
+                                        landmarks->face_landmarks[i].mouth_center.x,
+                                        landmarks->face_landmarks[i].mouth_center.y);
+
+                                convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
+                                k+= 6;
+                            }
+                        }
+
+                        camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
+                        camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
+                                faceLandmarks, numFaces * 6U);
+                   }
+                }
+            }
+        }
+    }
+
+    IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
+        uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
+        camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
+    }
+
+    IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
+            CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
+        uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
+        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
+    }
+
+    IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
+            CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
+        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
+                CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
+    }
+
+    IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
+            CAM_INTF_META_LENS_SHADING_MAP, metadata) {
+        size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
+                CAM_MAX_SHADING_MAP_HEIGHT);
+        size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
+                CAM_MAX_SHADING_MAP_WIDTH);
+        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
+                lensShadingMap->lens_shading, 4U * map_width * map_height);
+    }
+
+    IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
+        uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
+        camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
+    }
+
+    IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
+        //Populate CAM_INTF_META_TONEMAP_CURVES
+        /* ch0 = G, ch 1 = B, ch 2 = R*/
+        if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
+            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
+                     tonemap->tonemap_points_cnt,
+                    CAM_MAX_TONEMAP_CURVE_SIZE);
+            tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
+        }
+
+        camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
+                        &tonemap->curves[0].tonemap_points[0][0],
+                        tonemap->tonemap_points_cnt * 2);
+
+        camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
+                        &tonemap->curves[1].tonemap_points[0][0],
+                        tonemap->tonemap_points_cnt * 2);
+
+        camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
+                        &tonemap->curves[2].tonemap_points[0][0],
+                        tonemap->tonemap_points_cnt * 2);
+    }
+
+    IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
+            CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
+        camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
+                CC_GAIN_MAX);
+    }
+
+    IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
+            CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
+        camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
+                (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
+                CC_MATRIX_COLS * CC_MATRIX_ROWS);
+    }
+
+    IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
+            CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
+        if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
+            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
+                     toneCurve->tonemap_points_cnt,
+                    CAM_MAX_TONEMAP_CURVE_SIZE);
+            toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
+        }
+        camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
+                (float*)toneCurve->curve.tonemap_points,
+                toneCurve->tonemap_points_cnt * 2);
+    }
+
+    IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
+            CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
+        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
+                predColorCorrectionGains->gains, 4);
+    }
+
+    IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
+            CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
+        camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
+                (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
+                CC_MATRIX_ROWS * CC_MATRIX_COLS);
+    }
+
+    IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
+        camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
+    }
+
+    IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
+        uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
+        camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
+    }
+
+    IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
+        uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
+        camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
+    }
+
+    IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
+        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
+                *effectMode);
+        if (NAME_NOT_FOUND != val) {
+            uint8_t fwk_effectMode = (uint8_t)val;
+            camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
+        }
+    }
+
+    IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
+            CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
+        int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
+                METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
+        if (NAME_NOT_FOUND != fwk_testPatternMode) {
+            camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
+        }
+        int32_t fwk_testPatternData[4];
+        fwk_testPatternData[0] = testPatternData->r;
+        fwk_testPatternData[3] = testPatternData->b;
+        switch (gCamCapability[mCameraId]->color_arrangement) {
+        case CAM_FILTER_ARRANGEMENT_RGGB:
+        case CAM_FILTER_ARRANGEMENT_GRBG:
+            fwk_testPatternData[1] = testPatternData->gr;
+            fwk_testPatternData[2] = testPatternData->gb;
+            break;
+        case CAM_FILTER_ARRANGEMENT_GBRG:
+        case CAM_FILTER_ARRANGEMENT_BGGR:
+            fwk_testPatternData[2] = testPatternData->gr;
+            fwk_testPatternData[1] = testPatternData->gb;
+            break;
+        default:
+            LOGE("color arrangement %d is not supported",
+                gCamCapability[mCameraId]->color_arrangement);
+            break;
+        }
+        camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
+    }
+
+    IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
+        camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
+    }
+
+    IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
+        String8 str((const char *)gps_methods);
+        camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
+    }
+
+    IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
+        camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
+    }
+
+    IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
+        camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
+    }
+
+    IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
+        uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
+        camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
+    }
+
+    IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
+        uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
+        camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
+    }
+
+    IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
+        int32_t fwk_thumb_size[2];
+        fwk_thumb_size[0] = thumb_size->width;
+        fwk_thumb_size[1] = thumb_size->height;
+        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
+    }
+
+    IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
+        camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
+                privateData,
+                MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
+    }
+
+    if (metadata->is_tuning_params_valid) {
+        uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
+        uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
+        metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
+
+
+        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
+                sizeof(uint32_t));
+        data += sizeof(uint32_t);
+
+        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
+                sizeof(uint32_t));
+        LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
+        data += sizeof(uint32_t);
+
+        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
+                sizeof(uint32_t));
+        LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
+        data += sizeof(uint32_t);
+
+        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
+                sizeof(uint32_t));
+        LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
+        data += sizeof(uint32_t);
+
+        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
+                sizeof(uint32_t));
+        LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
+        data += sizeof(uint32_t);
+
+        metadata->tuning_params.tuning_mod3_data_size = 0;
+        memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
+                sizeof(uint32_t));
+        LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
+        data += sizeof(uint32_t);
+
+        size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
+                TUNING_SENSOR_DATA_MAX);
+        memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
+                count);
+        data += count;
+
+        count = MIN(metadata->tuning_params.tuning_vfe_data_size,
+                TUNING_VFE_DATA_MAX);
+        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
+                count);
+        data += count;
+
+        count = MIN(metadata->tuning_params.tuning_cpp_data_size,
+                TUNING_CPP_DATA_MAX);
+        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
+                count);
+        data += count;
+
+        count = MIN(metadata->tuning_params.tuning_cac_data_size,
+                TUNING_CAC_DATA_MAX);
+        memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
+                count);
+        data += count;
+
+        camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
+                (int32_t *)(void *)tuning_meta_data_blob,
+                (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
+    }
+
+    IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
+            CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
+        camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
+                (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
+                NEUTRAL_COL_POINTS);
+    }
+
+    IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
+        uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
+        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
+    }
+
+    IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
+        int32_t aeRegions[REGIONS_TUPLE_COUNT];
+        // Adjust crop region from sensor output coordinate system to active
+        // array coordinate system.
+        mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
+                hAeRegions->rect.width, hAeRegions->rect.height);
+
+        convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
+        camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
+                REGIONS_TUPLE_COUNT);
+        LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
+                 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
+                hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
+                hAeRegions->rect.height);
+    }
+
+    IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
+        uint8_t fwk_afState = (uint8_t) *afState;
+        camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
+        LOGD("urgent Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
+    }
+
+    IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
+        camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
+    }
+
+    IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
+        camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
+    }
+
+    IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
+        uint8_t fwk_lensState = *lensState;
+        camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
+    }
+
+    IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
+        /*af regions*/
+        int32_t afRegions[REGIONS_TUPLE_COUNT];
+        // Adjust crop region from sensor output coordinate system to active
+        // array coordinate system.
+        mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
+                hAfRegions->rect.width, hAfRegions->rect.height);
+
+        convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
+        camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
+                REGIONS_TUPLE_COUNT);
+        LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
+                 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
+                hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
+                hAfRegions->rect.height);
+    }
+
+    IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
+        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
+                *hal_ab_mode);
+        if (NAME_NOT_FOUND != val) {
+            uint8_t fwk_ab_mode = (uint8_t)val;
+            camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
+        }
+    }
+
+    IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
+        int val = lookupFwkName(SCENE_MODES_MAP,
+                METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
+        if (NAME_NOT_FOUND != val) {
+            uint8_t fwkBestshotMode = (uint8_t)val;
+            camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
+            LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
+        } else {
+            LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
+        }
+    }
+
+    IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
+         uint8_t fwk_mode = (uint8_t) *mode;
+         camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
+    }
+
+    /* Constant metadata values to be update*/
+    uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
+    camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
+
+    uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
+    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
+
+    int32_t hotPixelMap[2];
+    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
+
+    // CDS
+    IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
+        camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
+    }
+
+    // TNR
+    IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
+        uint8_t tnr_enable       = tnr->denoise_enable;
+        int32_t tnr_process_type = (int32_t)tnr->process_plates;
+
+        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
+        camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
+    }
+
+    // Reprocess crop data
+    IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
+        uint8_t cnt = crop_data->num_of_streams;
+        if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
+            // mm-qcamera-daemon only posts crop_data for streams
+            // not linked to pproc. So no valid crop metadata is not
+            // necessarily an error case.
+            LOGD("No valid crop metadata entries");
+        } else {
+            uint32_t reproc_stream_id;
+            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
+                LOGD("No reprocessible stream found, ignore crop data");
+            } else {
+                int rc = NO_ERROR;
+                Vector<int32_t> roi_map;
+                int32_t *crop = new int32_t[cnt*4];
+                if (NULL == crop) {
+                   rc = NO_MEMORY;
+                }
+                if (NO_ERROR == rc) {
+                    int32_t streams_found = 0;
+                    for (size_t i = 0; i < cnt; i++) {
+                        if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
+                            if (pprocDone) {
+                                // HAL already does internal reprocessing,
+                                // either via reprocessing before JPEG encoding,
+                                // or offline postprocessing for pproc bypass case.
+                                crop[0] = 0;
+                                crop[1] = 0;
+                                crop[2] = mInputStreamInfo.dim.width;
+                                crop[3] = mInputStreamInfo.dim.height;
+                            } else {
+                                crop[0] = crop_data->crop_info[i].crop.left;
+                                crop[1] = crop_data->crop_info[i].crop.top;
+                                crop[2] = crop_data->crop_info[i].crop.width;
+                                crop[3] = crop_data->crop_info[i].crop.height;
+                            }
+                            roi_map.add(crop_data->crop_info[i].roi_map.left);
+                            roi_map.add(crop_data->crop_info[i].roi_map.top);
+                            roi_map.add(crop_data->crop_info[i].roi_map.width);
+                            roi_map.add(crop_data->crop_info[i].roi_map.height);
+                            streams_found++;
+                            LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
+                                    crop[0], crop[1], crop[2], crop[3]);
+                            LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
+                                    crop_data->crop_info[i].roi_map.left,
+                                    crop_data->crop_info[i].roi_map.top,
+                                    crop_data->crop_info[i].roi_map.width,
+                                    crop_data->crop_info[i].roi_map.height);
+                            break;
+
+                       }
+                    }
+                    camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
+                            &streams_found, 1);
+                    camMetadata.update(QCAMERA3_CROP_REPROCESS,
+                            crop, (size_t)(streams_found * 4));
+                    if (roi_map.array()) {
+                        camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
+                                roi_map.array(), roi_map.size());
+                    }
+               }
+               if (crop) {
+                   delete [] crop;
+               }
+            }
+        }
+    }
+
+    if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
+        // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
+        // so hardcoding the CAC result to OFF mode.
+        uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
+        camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
+    } else {
+        IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
+            int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
+                    *cacMode);
+            if (NAME_NOT_FOUND != val) {
+                uint8_t resultCacMode = (uint8_t)val;
+                // check whether CAC result from CB is equal to Framework set CAC mode
+                // If not equal then set the CAC mode came in corresponding request
+                if (fwk_cacMode != resultCacMode) {
+                    resultCacMode = fwk_cacMode;
+                }
+                LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
+                camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
+            } else {
+                LOGE("Invalid CAC camera parameter: %d", *cacMode);
+            }
+        }
+    }
+
+    // Post blob of cam_cds_data through vendor tag.
+    IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
+        uint8_t cnt = cdsInfo->num_of_streams;
+        cam_cds_data_t cdsDataOverride;
+        memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
+        cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
+        cdsDataOverride.num_of_streams = 1;
+        if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
+            uint32_t reproc_stream_id;
+            if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
+                LOGD("No reprocessible stream found, ignore cds data");
+            } else {
+                for (size_t i = 0; i < cnt; i++) {
+                    if (cdsInfo->cds_info[i].stream_id ==
+                            reproc_stream_id) {
+                        cdsDataOverride.cds_info[0].cds_enable =
+                                cdsInfo->cds_info[i].cds_enable;
+                        break;
+                    }
+                }
+            }
+        } else {
+            LOGD("Invalid stream count %d in CDS_DATA", cnt);
+        }
+        camMetadata.update(QCAMERA3_CDS_INFO,
+                (uint8_t *)&cdsDataOverride,
+                sizeof(cam_cds_data_t));
+    }
+
+    // Ldaf calibration data
+    if (!mLdafCalibExist) {
+        IF_META_AVAILABLE(uint32_t, ldafCalib,
+                CAM_INTF_META_LDAF_EXIF, metadata) {
+            mLdafCalibExist = true;
+            mLdafCalib[0] = ldafCalib[0];
+            mLdafCalib[1] = ldafCalib[1];
+            LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
+                    ldafCalib[0], ldafCalib[1]);
+        }
+    }
+
+    // DDM debug data through vendor tag
+    cam_ddm_info_t ddm_info;
+    memset(&ddm_info, 0, sizeof(cam_ddm_info_t));
+    IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
+            CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
+        memcpy(&(ddm_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
+    }
+    IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
+            CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
+        memcpy(&(ddm_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
+    }
+    IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
+            CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
+        memcpy(&(ddm_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
+    }
+    IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
+            CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
+        memcpy(&(ddm_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
+    }
+    IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
+            CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
+        memcpy(&(ddm_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
+    }
+    IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
+        memcpy(&(ddm_info.pipeline_flip), flip, sizeof(int32_t));
+    }
+    IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
+            CAM_INTF_PARM_ROTATION, metadata) {
+        memcpy(&(ddm_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
+    }
+    camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_DDM_DATA_BLOB,
+            (uint8_t *)&ddm_info, sizeof(cam_ddm_info_t));
+
+    resultMetadata = camMetadata.release();
+    return resultMetadata;
+}
+
+/*===========================================================================
+ * FUNCTION   : saveExifParams
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS :
+ *   @metadata : metadata information from callback
+ *
+ * RETURN     : none
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
+{
+    IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
+            CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
+        if (mExifParams.debug_params) {
+            mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
+            mExifParams.debug_params->ae_debug_params_valid = TRUE;
+        }
+    }
+    IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
+            CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
+        if (mExifParams.debug_params) {
+            mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
+            mExifParams.debug_params->awb_debug_params_valid = TRUE;
+        }
+    }
+    IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
+            CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
+        if (mExifParams.debug_params) {
+            mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
+            mExifParams.debug_params->af_debug_params_valid = TRUE;
+        }
+    }
+    IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
+            CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
+        if (mExifParams.debug_params) {
+            mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
+            mExifParams.debug_params->asd_debug_params_valid = TRUE;
+        }
+    }
+    IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
+            CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
+        if (mExifParams.debug_params) {
+            mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
+            mExifParams.debug_params->stats_debug_params_valid = TRUE;
+        }
+    }
+    IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
+            CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
+        if (mExifParams.debug_params) {
+            mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
+            mExifParams.debug_params->bestats_debug_params_valid = TRUE;
+        }
+    }
+    IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
+            CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
+        if (mExifParams.debug_params) {
+            mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
+            mExifParams.debug_params->bhist_debug_params_valid = TRUE;
+        }
+    }
+    IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
+            CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
+        if (mExifParams.debug_params) {
+            mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
+            mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : get3AExifParams
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS : none
+ *
+ *
+ * RETURN     : mm_jpeg_exif_params_t
+ *
+ *==========================================================================*/
+mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
+{
+    return mExifParams;
+}
+
+/*===========================================================================
+ * FUNCTION   : translateCbUrgentMetadataToResultMetadata
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS :
+ *   @metadata : metadata information from callback
+ *
+ * RETURN     : camera_metadata_t*
+ *              metadata in a format specified by fwk
+ *==========================================================================*/
+camera_metadata_t*
+QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
+                                (metadata_buffer_t *metadata)
+{
+    CameraMetadata camMetadata;
+    camera_metadata_t *resultMetadata;
+
+
+    IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
+        uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
+        camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
+        LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
+    }
+
+    IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
+        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
+                &aecTrigger->trigger, 1);
+        camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
+                &aecTrigger->trigger_id, 1);
+        LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
+                 aecTrigger->trigger);
+        LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
+                aecTrigger->trigger_id);
+    }
+
+    IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
+        uint8_t fwk_ae_state = (uint8_t) *ae_state;
+        camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
+        LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
+    }
+
+    IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
+        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
+        if (NAME_NOT_FOUND != val) {
+            uint8_t fwkAfMode = (uint8_t)val;
+            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
+            LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
+        } else {
+            LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
+                    val);
+        }
+    }
+
+    IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
+        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
+                &af_trigger->trigger, 1);
+        LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
+                 af_trigger->trigger);
+        camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
+        LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
+                af_trigger->trigger_id);
+    }
+
+    IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
+        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
+                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
+        if (NAME_NOT_FOUND != val) {
+            uint8_t fwkWhiteBalanceMode = (uint8_t)val;
+            camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
+            LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
+        } else {
+            LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
+        }
+    }
+
+    uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
+    uint32_t aeMode = CAM_AE_MODE_MAX;
+    int32_t flashMode = CAM_FLASH_MODE_MAX;
+    int32_t redeye = -1;
+    IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
+        aeMode = *pAeMode;
+    }
+    IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
+        flashMode = *pFlashMode;
+    }
+    IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
+        redeye = *pRedeye;
+    }
+
+    if (1 == redeye) {
+        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
+        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
+    } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
+        int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
+                flashMode);
+        if (NAME_NOT_FOUND != val) {
+            fwk_aeMode = (uint8_t)val;
+            camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
+        } else {
+            LOGE("Unsupported flash mode %d", flashMode);
+        }
+    } else if (aeMode == CAM_AE_MODE_ON) {
+        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
+        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
+    } else if (aeMode == CAM_AE_MODE_OFF) {
+        fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
+        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
+    } else {
+        LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
+              "flashMode:%d, aeMode:%u!!!",
+                 redeye, flashMode, aeMode);
+    }
+
+    resultMetadata = camMetadata.release();
+    return resultMetadata;
+}
+
+/*===========================================================================
+ * FUNCTION   : dumpMetadataToFile
+ *
+ * DESCRIPTION: Dumps tuning metadata to file system
+ *
+ * PARAMETERS :
+ *   @meta           : tuning metadata
+ *   @dumpFrameCount : current dump frame count
+ *   @enabled        : Enable mask
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
+                                                   uint32_t &dumpFrameCount,
+                                                   bool enabled,
+                                                   const char *type,
+                                                   uint32_t frameNumber)
+{
+    //Some sanity checks
+    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
+        LOGE("Tuning sensor data size bigger than expected %d: %d",
+              meta.tuning_sensor_data_size,
+              TUNING_SENSOR_DATA_MAX);
+        return;
+    }
+
+    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
+        LOGE("Tuning VFE data size bigger than expected %d: %d",
+              meta.tuning_vfe_data_size,
+              TUNING_VFE_DATA_MAX);
+        return;
+    }
+
+    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
+        LOGE("Tuning CPP data size bigger than expected %d: %d",
+              meta.tuning_cpp_data_size,
+              TUNING_CPP_DATA_MAX);
+        return;
+    }
+
+    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
+        LOGE("Tuning CAC data size bigger than expected %d: %d",
+              meta.tuning_cac_data_size,
+              TUNING_CAC_DATA_MAX);
+        return;
+    }
+    //
+
+    if(enabled){
+        char timeBuf[FILENAME_MAX];
+        char buf[FILENAME_MAX];
+        memset(buf, 0, sizeof(buf));
+        memset(timeBuf, 0, sizeof(timeBuf));
+        time_t current_time;
+        struct tm * timeinfo;
+        time (&current_time);
+        timeinfo = localtime (&current_time);
+        if (timeinfo != NULL) {
+            strftime (timeBuf, sizeof(timeBuf),
+                    QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
+        }
+        String8 filePath(timeBuf);
+        snprintf(buf,
+                sizeof(buf),
+                "%dm_%s_%d.bin",
+                dumpFrameCount,
+                type,
+                frameNumber);
+        filePath.append(buf);
+        int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
+        if (file_fd >= 0) {
+            ssize_t written_len = 0;
+            meta.tuning_data_version = TUNING_DATA_VERSION;
+            void *data = (void *)((uint8_t *)&meta.tuning_data_version);
+            written_len += write(file_fd, data, sizeof(uint32_t));
+            data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
+            LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
+            written_len += write(file_fd, data, sizeof(uint32_t));
+            data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
+            LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
+            written_len += write(file_fd, data, sizeof(uint32_t));
+            data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
+            LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
+            written_len += write(file_fd, data, sizeof(uint32_t));
+            data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
+            LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
+            written_len += write(file_fd, data, sizeof(uint32_t));
+            meta.tuning_mod3_data_size = 0;
+            data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
+            LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
+            written_len += write(file_fd, data, sizeof(uint32_t));
+            size_t total_size = meta.tuning_sensor_data_size;
+            data = (void *)((uint8_t *)&meta.data);
+            written_len += write(file_fd, data, total_size);
+            total_size = meta.tuning_vfe_data_size;
+            data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
+            written_len += write(file_fd, data, total_size);
+            total_size = meta.tuning_cpp_data_size;
+            data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
+            written_len += write(file_fd, data, total_size);
+            total_size = meta.tuning_cac_data_size;
+            data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
+            written_len += write(file_fd, data, total_size);
+            close(file_fd);
+        }else {
+            LOGE("fail to open file for metadata dumping");
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : cleanAndSortStreamInfo
+ *
+ * DESCRIPTION: helper method to clean up invalid streams in stream_info,
+ *              and sort them such that raw stream is at the end of the list
+ *              This is a workaround for camera daemon constraint.
+ *
+ * PARAMETERS : None
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::cleanAndSortStreamInfo()
+{
+    List<stream_info_t *> newStreamInfo;
+
+    /*clean up invalid streams*/
+    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
+            it != mStreamInfo.end();) {
+        if(((*it)->status) == INVALID){
+            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
+            delete channel;
+            free(*it);
+            it = mStreamInfo.erase(it);
+        } else {
+            it++;
+        }
+    }
+
+    // Move preview/video/callback/snapshot streams into newList
+    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
+            it != mStreamInfo.end();) {
+        if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
+                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
+                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
+            newStreamInfo.push_back(*it);
+            it = mStreamInfo.erase(it);
+        } else
+            it++;
+    }
+    // Move raw streams into newList
+    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
+            it != mStreamInfo.end();) {
+        newStreamInfo.push_back(*it);
+        it = mStreamInfo.erase(it);
+    }
+
+    mStreamInfo = newStreamInfo;
+}
+
+/*===========================================================================
+ * FUNCTION   : extractJpegMetadata
+ *
+ * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
+ *              JPEG metadata is cached in HAL, and return as part of capture
+ *              result when metadata is returned from camera daemon.
+ *
+ * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
+ *              @request:      capture request
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::extractJpegMetadata(
+        CameraMetadata& jpegMetadata,
+        const camera3_capture_request_t *request)
+{
+    CameraMetadata frame_settings;
+    frame_settings = request->settings;
+
+    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
+        jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
+                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
+                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
+
+    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
+        jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
+                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
+                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
+
+    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
+        jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
+                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
+                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
+
+    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
+        jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
+                frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
+                frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
+
+    if (frame_settings.exists(ANDROID_JPEG_QUALITY))
+        jpegMetadata.update(ANDROID_JPEG_QUALITY,
+                frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
+                frame_settings.find(ANDROID_JPEG_QUALITY).count);
+
+    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
+        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
+                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
+                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
+
+    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
+        int32_t thumbnail_size[2];
+        thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
+        thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
+        if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
+            int32_t orientation =
+                  frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
+            if ((orientation == 90) || (orientation == 270)) {
+               //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
+               int32_t temp;
+               temp = thumbnail_size[0];
+               thumbnail_size[0] = thumbnail_size[1];
+               thumbnail_size[1] = temp;
+            }
+         }
+         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
+                thumbnail_size,
+                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
+    }
+
+}
+
+/*===========================================================================
+ * FUNCTION   : convertToRegions
+ *
+ * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
+ *
+ * PARAMETERS :
+ *   @rect   : cam_rect_t struct to convert
+ *   @region : int32_t destination array
+ *   @weight : if we are converting from cam_area_t, weight is valid
+ *             else weight = -1
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
+        int32_t *region, int weight)
+{
+    region[0] = rect.left;
+    region[1] = rect.top;
+    region[2] = rect.left + rect.width;
+    region[3] = rect.top + rect.height;
+    if (weight > -1) {
+        region[4] = weight;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : convertFromRegions
+ *
+ * DESCRIPTION: helper method to convert from array to cam_rect_t
+ *
+ * PARAMETERS :
+ *   @rect   : cam_rect_t struct to convert
+ *   @region : int32_t destination array
+ *   @weight : if we are converting from cam_area_t, weight is valid
+ *             else weight = -1
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
+        const camera_metadata_t *settings, uint32_t tag)
+{
+    CameraMetadata frame_settings;
+    frame_settings = settings;
+    int32_t x_min = frame_settings.find(tag).data.i32[0];
+    int32_t y_min = frame_settings.find(tag).data.i32[1];
+    int32_t x_max = frame_settings.find(tag).data.i32[2];
+    int32_t y_max = frame_settings.find(tag).data.i32[3];
+    roi.weight = frame_settings.find(tag).data.i32[4];
+    roi.rect.left = x_min;
+    roi.rect.top = y_min;
+    roi.rect.width = x_max - x_min;
+    roi.rect.height = y_max - y_min;
+}
+
+/*===========================================================================
+ * FUNCTION   : resetIfNeededROI
+ *
+ * DESCRIPTION: helper method to reset the roi if it is greater than scaler
+ *              crop region
+ *
+ * PARAMETERS :
+ *   @roi       : cam_area_t struct to resize
+ *   @scalerCropRegion : cam_crop_region_t region to compare against
+ *
+ *
+ *==========================================================================*/
+bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
+                                                 const cam_crop_region_t* scalerCropRegion)
+{
+    int32_t roi_x_max = roi->rect.width + roi->rect.left;
+    int32_t roi_y_max = roi->rect.height + roi->rect.top;
+    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
+    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
+
+    /* According to spec weight = 0 is used to indicate roi needs to be disabled
+     * without having this check the calculations below to validate if the roi
+     * is inside scalar crop region will fail resulting in the roi not being
+     * reset causing algorithm to continue to use stale roi window
+     */
+    if (roi->weight == 0) {
+        return true;
+    }
+
+    if ((roi_x_max < scalerCropRegion->left) ||
+        // right edge of roi window is left of scalar crop's left edge
+        (roi_y_max < scalerCropRegion->top)  ||
+        // bottom edge of roi window is above scalar crop's top edge
+        (roi->rect.left > crop_x_max) ||
+        // left edge of roi window is beyond(right) of scalar crop's right edge
+        (roi->rect.top > crop_y_max)){
+        // top edge of roi windo is above scalar crop's top edge
+        return false;
+    }
+    if (roi->rect.left < scalerCropRegion->left) {
+        roi->rect.left = scalerCropRegion->left;
+    }
+    if (roi->rect.top < scalerCropRegion->top) {
+        roi->rect.top = scalerCropRegion->top;
+    }
+    if (roi_x_max > crop_x_max) {
+        roi_x_max = crop_x_max;
+    }
+    if (roi_y_max > crop_y_max) {
+        roi_y_max = crop_y_max;
+    }
+    roi->rect.width = roi_x_max - roi->rect.left;
+    roi->rect.height = roi_y_max - roi->rect.top;
+    return true;
+}
+
+/*===========================================================================
+ * FUNCTION   : convertLandmarks
+ *
+ * DESCRIPTION: helper method to extract the landmarks from face detection info
+ *
+ * PARAMETERS :
+ *   @landmark_data : input landmark data to be converted
+ *   @landmarks : int32_t destination array
+ *
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::convertLandmarks(
+        cam_face_landmarks_info_t landmark_data,
+        int32_t *landmarks)
+{
+    landmarks[0] = (int32_t)landmark_data.left_eye_center.x;
+    landmarks[1] = (int32_t)landmark_data.left_eye_center.y;
+    landmarks[2] = (int32_t)landmark_data.right_eye_center.x;
+    landmarks[3] = (int32_t)landmark_data.right_eye_center.y;
+    landmarks[4] = (int32_t)landmark_data.mouth_center.x;
+    landmarks[5] = (int32_t)landmark_data.mouth_center.y;
+}
+
+#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
+/*===========================================================================
+ * FUNCTION   : initCapabilities
+ *
+ * DESCRIPTION: initialize camera capabilities in static data struct
+ *
+ * PARAMETERS :
+ *   @cameraId  : camera Id
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
+{
+    int rc = 0;
+    mm_camera_vtbl_t *cameraHandle = NULL;
+    QCamera3HeapMemory *capabilityHeap = NULL;
+
+    rc = camera_open((uint8_t)cameraId, &cameraHandle);
+    if (rc) {
+        LOGE("camera_open failed. rc = %d", rc);
+        goto open_failed;
+    }
+    if (!cameraHandle) {
+        LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
+        goto open_failed;
+    }
+
+    capabilityHeap = new QCamera3HeapMemory(1);
+    if (capabilityHeap == NULL) {
+        LOGE("creation of capabilityHeap failed");
+        goto heap_creation_failed;
+    }
+    /* Allocate memory for capability buffer */
+    rc = capabilityHeap->allocate(sizeof(cam_capability_t));
+    if(rc != OK) {
+        LOGE("No memory for cappability");
+        goto allocate_failed;
+    }
+
+    /* Map memory for capability buffer */
+    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
+    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
+                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
+                                capabilityHeap->getFd(0),
+                                sizeof(cam_capability_t),
+                                capabilityHeap->getPtr(0));
+    if(rc < 0) {
+        LOGE("failed to map capability buffer");
+        goto map_failed;
+    }
+
+    /* Query Capability */
+    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
+    if(rc < 0) {
+        LOGE("failed to query capability");
+        goto query_failed;
+    }
+    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
+    if (!gCamCapability[cameraId]) {
+        LOGE("out of memory");
+        goto query_failed;
+    }
+    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
+                                        sizeof(cam_capability_t));
+
+    int index;
+    for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
+        cam_analysis_info_t *p_analysis_info =
+                &gCamCapability[cameraId]->analysis_info[index];
+        p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
+        p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
+    }
+    rc = 0;
+
+query_failed:
+    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
+                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
+map_failed:
+    capabilityHeap->deallocate();
+allocate_failed:
+    delete capabilityHeap;
+heap_creation_failed:
+    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
+    cameraHandle = NULL;
+open_failed:
+    return rc;
+}
+
+/*==========================================================================
+ * FUNCTION   : get3Aversion
+ *
+ * DESCRIPTION: get the Q3A S/W version
+ *
+ * PARAMETERS :
+ *  @sw_version: Reference of Q3A structure which will hold version info upon
+ *               return
+ *
+ * RETURN     : None
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
+{
+    if(gCamCapability[mCameraId])
+        sw_version = gCamCapability[mCameraId]->q3a_version;
+    else
+        LOGE("Capability structure NULL!");
+}
+
+
+/*===========================================================================
+ * FUNCTION   : initParameters
+ *
+ * DESCRIPTION: initialize camera parameters
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3HardwareInterface::initParameters()
+{
+    int rc = 0;
+
+    //Allocate Set Param Buffer
+    mParamHeap = new QCamera3HeapMemory(1);
+    rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
+    if(rc != OK) {
+        rc = NO_MEMORY;
+        LOGE("Failed to allocate SETPARM Heap memory");
+        delete mParamHeap;
+        mParamHeap = NULL;
+        return rc;
+    }
+
+    //Map memory for parameters buffer
+    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
+            CAM_MAPPING_BUF_TYPE_PARM_BUF,
+            mParamHeap->getFd(0),
+            sizeof(metadata_buffer_t),
+            (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
+    if(rc < 0) {
+        LOGE("failed to map SETPARM buffer");
+        rc = FAILED_TRANSACTION;
+        mParamHeap->deallocate();
+        delete mParamHeap;
+        mParamHeap = NULL;
+        return rc;
+    }
+
+    mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
+
+    mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : deinitParameters
+ *
+ * DESCRIPTION: de-initialize camera parameters
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : NONE
+ *==========================================================================*/
+void QCamera3HardwareInterface::deinitParameters()
+{
+    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
+            CAM_MAPPING_BUF_TYPE_PARM_BUF);
+
+    mParamHeap->deallocate();
+    delete mParamHeap;
+    mParamHeap = NULL;
+
+    mParameters = NULL;
+
+    free(mPrevParameters);
+    mPrevParameters = NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : calcMaxJpegSize
+ *
+ * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : max_jpeg_size
+ *==========================================================================*/
+size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
+{
+    size_t max_jpeg_size = 0;
+    size_t temp_width, temp_height;
+    size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
+            MAX_SIZES_CNT);
+    for (size_t i = 0; i < count; i++) {
+        temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
+        temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
+        if (temp_width * temp_height > max_jpeg_size ) {
+            max_jpeg_size = temp_width * temp_height;
+        }
+    }
+    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
+    return max_jpeg_size;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMaxRawSize
+ *
+ * DESCRIPTION: Fetches maximum raw size supported by the cameraId
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : Largest supported Raw Dimension
+ *==========================================================================*/
+cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
+{
+    int max_width = 0;
+    cam_dimension_t maxRawSize;
+
+    memset(&maxRawSize, 0, sizeof(cam_dimension_t));
+    for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
+        if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
+            max_width = gCamCapability[camera_id]->raw_dim[i].width;
+            maxRawSize = gCamCapability[camera_id]->raw_dim[i];
+        }
+    }
+    return maxRawSize;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : calcMaxJpegDim
+ *
+ * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : max_jpeg_dim
+ *==========================================================================*/
+cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
+{
+    cam_dimension_t max_jpeg_dim;
+    cam_dimension_t curr_jpeg_dim;
+    max_jpeg_dim.width = 0;
+    max_jpeg_dim.height = 0;
+    curr_jpeg_dim.width = 0;
+    curr_jpeg_dim.height = 0;
+    for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
+        curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
+        curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
+        if (curr_jpeg_dim.width * curr_jpeg_dim.height >
+            max_jpeg_dim.width * max_jpeg_dim.height ) {
+            max_jpeg_dim.width = curr_jpeg_dim.width;
+            max_jpeg_dim.height = curr_jpeg_dim.height;
+        }
+    }
+    return max_jpeg_dim;
+}
+
+/*===========================================================================
+ * FUNCTION   : addStreamConfig
+ *
+ * DESCRIPTION: adds the stream configuration to the array
+ *
+ * PARAMETERS :
+ * @available_stream_configs : pointer to stream configuration array
+ * @scalar_format            : scalar format
+ * @dim                      : configuration dimension
+ * @config_type              : input or output configuration type
+ *
+ * RETURN     : NONE
+ *==========================================================================*/
+void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
+        int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
+{
+    available_stream_configs.add(scalar_format);
+    available_stream_configs.add(dim.width);
+    available_stream_configs.add(dim.height);
+    available_stream_configs.add(config_type);
+}
+
+/*===========================================================================
+ * FUNCTION   : suppportBurstCapture
+ *
+ * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
+ *
+ * PARAMETERS :
+ *   @cameraId  : camera Id
+ *
+ * RETURN     : true if camera supports BURST_CAPTURE
+ *              false otherwise
+ *==========================================================================*/
+bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
+{
+    const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
+    const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
+    const int32_t highResWidth = 3264;
+    const int32_t highResHeight = 2448;
+
+    if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
+        // Maximum resolution images cannot be captured at >= 10fps
+        // -> not supporting BURST_CAPTURE
+        return false;
+    }
+
+    if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
+        // Maximum resolution images can be captured at >= 20fps
+        // --> supporting BURST_CAPTURE
+        return true;
+    }
+
+    // Find the smallest highRes resolution, or largest resolution if there is none
+    size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
+            MAX_SIZES_CNT);
+    size_t highRes = 0;
+    while ((highRes + 1 < totalCnt) &&
+            (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
+            gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
+            highResWidth * highResHeight)) {
+        highRes++;
+    }
+    if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
+        return true;
+    } else {
+        return false;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : initStaticMetadata
+ *
+ * DESCRIPTION: initialize the static metadata
+ *
+ * PARAMETERS :
+ *   @cameraId  : camera Id
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              non-zero failure code
+ *==========================================================================*/
+int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
+{
+    int rc = 0;
+    CameraMetadata staticInfo;
+    size_t count = 0;
+    bool limitedDevice = false;
+    char prop[PROPERTY_VALUE_MAX];
+    bool supportBurst = false;
+
+    supportBurst = supportBurstCapture(cameraId);
+
+    /* If sensor is YUV sensor (no raw support) or if per-frame control is not
+     * guaranteed or if min fps of max resolution is less than 20 fps, its
+     * advertised as limited device*/
+    limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
+            (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
+            (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
+            !supportBurst;
+
+    uint8_t supportedHwLvl = limitedDevice ?
+            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
+            ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
+
+    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
+            &supportedHwLvl, 1);
+
+    bool facingBack = false;
+    if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
+            (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
+        facingBack = true;
+    }
+    /*HAL 3 only*/
+    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
+                    &gCamCapability[cameraId]->min_focus_distance, 1);
+
+    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
+                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
+
+    /*should be using focal lengths but sensor doesn't provide that info now*/
+    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
+                      &gCamCapability[cameraId]->focal_length,
+                      1);
+
+    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
+            gCamCapability[cameraId]->apertures,
+            MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
+
+    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
+            gCamCapability[cameraId]->filter_densities,
+            MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
+
+
+    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
+            (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
+            MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
+
+    int32_t lens_shading_map_size[] = {
+            MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
+            MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
+    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
+                      lens_shading_map_size,
+                      sizeof(lens_shading_map_size)/sizeof(int32_t));
+
+    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
+            gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
+
+    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
+            gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
+
+    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
+            &gCamCapability[cameraId]->max_frame_duration, 1);
+
+    camera_metadata_rational baseGainFactor = {
+            gCamCapability[cameraId]->base_gain_factor.numerator,
+            gCamCapability[cameraId]->base_gain_factor.denominator};
+    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
+                      &baseGainFactor, 1);
+
+    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
+                     (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
+
+    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
+            gCamCapability[cameraId]->pixel_array_size.height};
+    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
+                      pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
+
+    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
+            gCamCapability[cameraId]->active_array_size.top,
+            gCamCapability[cameraId]->active_array_size.width,
+            gCamCapability[cameraId]->active_array_size.height};
+    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
+            active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
+
+    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
+            &gCamCapability[cameraId]->white_level, 1);
+
+    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
+            gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
+
+    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
+            &gCamCapability[cameraId]->flash_charge_duration, 1);
+
+    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
+            &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
+
+    uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
+    staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
+            &timestampSource, 1);
+
+    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
+            &gCamCapability[cameraId]->histogram_size, 1);
+
+    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
+            &gCamCapability[cameraId]->max_histogram_count, 1);
+
+    int32_t sharpness_map_size[] = {
+            gCamCapability[cameraId]->sharpness_map_size.width,
+            gCamCapability[cameraId]->sharpness_map_size.height};
+
+    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
+            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
+
+    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
+            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
+
+    int32_t scalar_formats[] = {
+            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
+            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
+            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
+            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
+            HAL_PIXEL_FORMAT_RAW10,
+            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
+    size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
+    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
+                      scalar_formats,
+                      scalar_formats_count);
+
+    int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
+    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
+    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
+            count, MAX_SIZES_CNT, available_processed_sizes);
+    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
+            available_processed_sizes, count * 2);
+
+    int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
+    count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
+    makeTable(gCamCapability[cameraId]->raw_dim,
+            count, MAX_SIZES_CNT, available_raw_sizes);
+    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
+            available_raw_sizes, count * 2);
+
+    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
+    count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
+    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
+            count, MAX_SIZES_CNT, available_fps_ranges);
+    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
+            available_fps_ranges, count * 2);
+
+    camera_metadata_rational exposureCompensationStep = {
+            gCamCapability[cameraId]->exp_compensation_step.numerator,
+            gCamCapability[cameraId]->exp_compensation_step.denominator};
+    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
+                      &exposureCompensationStep, 1);
+
+    Vector<uint8_t> availableVstabModes;
+    availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
+    char eis_prop[PROPERTY_VALUE_MAX];
+    memset(eis_prop, 0, sizeof(eis_prop));
+    property_get("persist.camera.eis.enable", eis_prop, "0");
+    uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
+    if (facingBack && eis_prop_set) {
+        availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
+    }
+    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
+                      availableVstabModes.array(), availableVstabModes.size());
+
+    /*HAL 1 and HAL 3 common*/
+    uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
+    uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
+    uint32_t minZoomStep = 100; //as per HAL1/API1 spec
+    float maxZoom = maxZoomStep/minZoomStep;
+    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
+            &maxZoom, 1);
+
+    uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
+    staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
+
+    int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
+    if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
+        max3aRegions[2] = 0; /* AF not supported */
+    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
+            max3aRegions, 3);
+
+    /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.facedetect", prop, "1");
+    uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
+    LOGD("Support face detection mode: %d",
+             supportedFaceDetectMode);
+
+    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
+    Vector<uint8_t> availableFaceDetectModes;
+    availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
+    if (supportedFaceDetectMode == 1) {
+        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
+    } else if (supportedFaceDetectMode == 2) {
+        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
+    } else if (supportedFaceDetectMode == 3) {
+        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
+        availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
+    } else {
+        maxFaces = 0;
+    }
+    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
+            availableFaceDetectModes.array(),
+            availableFaceDetectModes.size());
+    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
+            (int32_t *)&maxFaces, 1);
+
+    int32_t exposureCompensationRange[] = {
+            gCamCapability[cameraId]->exposure_compensation_min,
+            gCamCapability[cameraId]->exposure_compensation_max};
+    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
+            exposureCompensationRange,
+            sizeof(exposureCompensationRange)/sizeof(int32_t));
+
+    uint8_t lensFacing = (facingBack) ?
+            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
+    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
+
+    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+                      available_thumbnail_sizes,
+                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
+
+    /*all sizes will be clubbed into this tag*/
+    count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
+    /*android.scaler.availableStreamConfigurations*/
+    Vector<int32_t> available_stream_configs;
+    cam_dimension_t active_array_dim;
+    active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
+    active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
+    /* Add input/output stream configurations for each scalar formats*/
+    for (size_t j = 0; j < scalar_formats_count; j++) {
+        switch (scalar_formats[j]) {
+        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
+        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
+        case HAL_PIXEL_FORMAT_RAW10:
+            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
+                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
+                addStreamConfig(available_stream_configs, scalar_formats[j],
+                        gCamCapability[cameraId]->raw_dim[i],
+                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
+            }
+            break;
+        case HAL_PIXEL_FORMAT_BLOB:
+            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
+                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
+                addStreamConfig(available_stream_configs, scalar_formats[j],
+                        gCamCapability[cameraId]->picture_sizes_tbl[i],
+                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
+            }
+            break;
+        case HAL_PIXEL_FORMAT_YCbCr_420_888:
+        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+        default:
+            cam_dimension_t largest_picture_size;
+            memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
+            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
+                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
+                addStreamConfig(available_stream_configs, scalar_formats[j],
+                        gCamCapability[cameraId]->picture_sizes_tbl[i],
+                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
+                /* Book keep largest */
+                if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
+                        >= largest_picture_size.width &&
+                        gCamCapability[cameraId]->picture_sizes_tbl[i].height
+                        >= largest_picture_size.height)
+                    largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
+            }
+            /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
+            if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
+                    scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
+                 addStreamConfig(available_stream_configs, scalar_formats[j],
+                         largest_picture_size,
+                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
+            }
+            break;
+        }
+    }
+
+    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+                      available_stream_configs.array(), available_stream_configs.size());
+    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
+    staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
+
+    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
+    staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
+
+    /* android.scaler.availableMinFrameDurations */
+    Vector<int64_t> available_min_durations;
+    for (size_t j = 0; j < scalar_formats_count; j++) {
+        switch (scalar_formats[j]) {
+        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
+        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
+        case HAL_PIXEL_FORMAT_RAW10:
+            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
+                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
+                available_min_durations.add(scalar_formats[j]);
+                available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
+                available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
+                available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
+            }
+            break;
+        default:
+            for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
+                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
+                available_min_durations.add(scalar_formats[j]);
+                available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
+                available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
+                available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
+            }
+            break;
+        }
+    }
+    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
+                      available_min_durations.array(), available_min_durations.size());
+
+    Vector<int32_t> available_hfr_configs;
+    for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
+        int32_t fps = 0;
+        switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
+        case CAM_HFR_MODE_60FPS:
+            fps = 60;
+            break;
+        case CAM_HFR_MODE_90FPS:
+            fps = 90;
+            break;
+        case CAM_HFR_MODE_120FPS:
+            fps = 120;
+            break;
+        case CAM_HFR_MODE_150FPS:
+            fps = 150;
+            break;
+        case CAM_HFR_MODE_180FPS:
+            fps = 180;
+            break;
+        case CAM_HFR_MODE_210FPS:
+            fps = 210;
+            break;
+        case CAM_HFR_MODE_240FPS:
+            fps = 240;
+            break;
+        case CAM_HFR_MODE_480FPS:
+            fps = 480;
+            break;
+        case CAM_HFR_MODE_OFF:
+        case CAM_HFR_MODE_MAX:
+        default:
+            break;
+        }
+
+        /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
+        if (fps >= MIN_FPS_FOR_BATCH_MODE) {
+            /* For each HFR frame rate, need to advertise one variable fps range
+             * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
+             * and [120, 120]. While camcorder preview alone is running [30, 120] is
+             * set by the app. When video recording is started, [120, 120] is
+             * set. This way sensor configuration does not change when recording
+             * is started */
+
+            /* (width, height, fps_min, fps_max, batch_size_max) */
+            for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
+                j < MAX_SIZES_CNT; j++) {
+                available_hfr_configs.add(
+                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
+                available_hfr_configs.add(
+                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
+                available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
+                available_hfr_configs.add(fps);
+                available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
+
+                /* (width, height, fps_min, fps_max, batch_size_max) */
+                available_hfr_configs.add(
+                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
+                available_hfr_configs.add(
+                        gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
+                available_hfr_configs.add(fps);
+                available_hfr_configs.add(fps);
+                available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
+            }
+       }
+    }
+    //Advertise HFR capability only if the property is set
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.hal3hfr.enable", prop, "1");
+    uint8_t hfrEnable = (uint8_t)atoi(prop);
+
+    if(hfrEnable && available_hfr_configs.array()) {
+        staticInfo.update(
+                ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
+                available_hfr_configs.array(), available_hfr_configs.size());
+    }
+
+    int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
+    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
+                      &max_jpeg_size, 1);
+
+    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
+    size_t size = 0;
+    count = CAM_EFFECT_MODE_MAX;
+    count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
+    for (size_t i = 0; i < count; i++) {
+        int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
+                gCamCapability[cameraId]->supported_effects[i]);
+        if (NAME_NOT_FOUND != val) {
+            avail_effects[size] = (uint8_t)val;
+            size++;
+        }
+    }
+    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
+                      avail_effects,
+                      size);
+
+    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
+    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
+    size_t supported_scene_modes_cnt = 0;
+    count = CAM_SCENE_MODE_MAX;
+    count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
+    for (size_t i = 0; i < count; i++) {
+        if (gCamCapability[cameraId]->supported_scene_modes[i] !=
+                CAM_SCENE_MODE_OFF) {
+            int val = lookupFwkName(SCENE_MODES_MAP,
+                    METADATA_MAP_SIZE(SCENE_MODES_MAP),
+                    gCamCapability[cameraId]->supported_scene_modes[i]);
+            if (NAME_NOT_FOUND != val) {
+                avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
+                supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
+                supported_scene_modes_cnt++;
+            }
+        }
+    }
+    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
+                      avail_scene_modes,
+                      supported_scene_modes_cnt);
+
+    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
+    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
+                      supported_scene_modes_cnt,
+                      CAM_SCENE_MODE_MAX,
+                      scene_mode_overrides,
+                      supported_indexes,
+                      cameraId);
+
+    if (supported_scene_modes_cnt == 0) {
+        supported_scene_modes_cnt = 1;
+        avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
+    }
+
+    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
+            scene_mode_overrides, supported_scene_modes_cnt * 3);
+
+    uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
+                                         ANDROID_CONTROL_MODE_AUTO,
+                                         ANDROID_CONTROL_MODE_USE_SCENE_MODE};
+    staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
+            available_control_modes,
+            3);
+
+    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
+    size = 0;
+    count = CAM_ANTIBANDING_MODE_MAX;
+    count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
+    for (size_t i = 0; i < count; i++) {
+        int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
+                gCamCapability[cameraId]->supported_antibandings[i]);
+        if (NAME_NOT_FOUND != val) {
+            avail_antibanding_modes[size] = (uint8_t)val;
+            size++;
+        }
+
+    }
+    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
+                      avail_antibanding_modes,
+                      size);
+
+    uint8_t avail_abberation_modes[] = {
+            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
+            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
+            ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
+    count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
+    count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
+    if (0 == count) {
+        //  If no aberration correction modes are available for a device, this advertise OFF mode
+        size = 1;
+    } else {
+        // If count is not zero then atleast one among the FAST or HIGH quality is supported
+        // So, advertize all 3 modes if atleast any one mode is supported as per the
+        // new M requirement
+        size = 3;
+    }
+    staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
+            avail_abberation_modes,
+            size);
+
+    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
+    size = 0;
+    count = CAM_FOCUS_MODE_MAX;
+    count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
+    for (size_t i = 0; i < count; i++) {
+        int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
+                gCamCapability[cameraId]->supported_focus_modes[i]);
+        if (NAME_NOT_FOUND != val) {
+            avail_af_modes[size] = (uint8_t)val;
+            size++;
+        }
+    }
+    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
+                      avail_af_modes,
+                      size);
+
+    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
+    size = 0;
+    count = CAM_WB_MODE_MAX;
+    count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
+    for (size_t i = 0; i < count; i++) {
+        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
+                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
+                gCamCapability[cameraId]->supported_white_balances[i]);
+        if (NAME_NOT_FOUND != val) {
+            avail_awb_modes[size] = (uint8_t)val;
+            size++;
+        }
+    }
+    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
+                      avail_awb_modes,
+                      size);
+
+    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
+    count = CAM_FLASH_FIRING_LEVEL_MAX;
+    count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
+            count);
+    for (size_t i = 0; i < count; i++) {
+        available_flash_levels[i] =
+                gCamCapability[cameraId]->supported_firing_levels[i];
+    }
+    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
+            available_flash_levels, count);
+
+    uint8_t flashAvailable;
+    if (gCamCapability[cameraId]->flash_available)
+        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
+    else
+        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
+    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
+            &flashAvailable, 1);
+
+    Vector<uint8_t> avail_ae_modes;
+    count = CAM_AE_MODE_MAX;
+    count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
+    for (size_t i = 0; i < count; i++) {
+        avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
+    }
+    if (flashAvailable) {
+        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
+        avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
+    }
+    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
+                      avail_ae_modes.array(),
+                      avail_ae_modes.size());
+
+    int32_t sensitivity_range[2];
+    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
+    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
+    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
+                      sensitivity_range,
+                      sizeof(sensitivity_range) / sizeof(int32_t));
+
+    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
+                      &gCamCapability[cameraId]->max_analog_sensitivity,
+                      1);
+
+    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
+    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
+                      &sensor_orientation,
+                      1);
+
+    int32_t max_output_streams[] = {
+            MAX_STALLING_STREAMS,
+            MAX_PROCESSED_STREAMS,
+            MAX_RAW_STREAMS};
+    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
+            max_output_streams,
+            sizeof(max_output_streams)/sizeof(max_output_streams[0]));
+
+    uint8_t avail_leds = 0;
+    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
+                      &avail_leds, 0);
+
+    uint8_t focus_dist_calibrated;
+    int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
+            gCamCapability[cameraId]->focus_dist_calibrated);
+    if (NAME_NOT_FOUND != val) {
+        focus_dist_calibrated = (uint8_t)val;
+        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
+                     &focus_dist_calibrated, 1);
+    }
+
+    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
+    size = 0;
+    count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
+            MAX_TEST_PATTERN_CNT);
+    for (size_t i = 0; i < count; i++) {
+        int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
+                gCamCapability[cameraId]->supported_test_pattern_modes[i]);
+        if (NAME_NOT_FOUND != testpatternMode) {
+            avail_testpattern_modes[size] = testpatternMode;
+            size++;
+        }
+    }
+    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
+                      avail_testpattern_modes,
+                      size);
+
+    uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
+    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
+                      &max_pipeline_depth,
+                      1);
+
+    int32_t partial_result_count = PARTIAL_RESULT_COUNT;
+    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
+                      &partial_result_count,
+                       1);
+
+    int32_t max_stall_duration = MAX_REPROCESS_STALL;
+    staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
+
+    Vector<uint8_t> available_capabilities;
+    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
+    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
+    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
+    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
+    if (supportBurst) {
+        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
+    }
+    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
+    available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
+    if (hfrEnable && available_hfr_configs.array()) {
+        available_capabilities.add(
+                ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
+    }
+
+    if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
+        available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
+    }
+    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
+            available_capabilities.array(),
+            available_capabilities.size());
+
+    //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
+    //Assumption is that all bayer cameras support MANUAL_SENSOR.
+    uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
+            ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
+
+    staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
+            &aeLockAvailable, 1);
+
+    //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
+    //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
+    uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
+            ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
+
+    staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
+            &awbLockAvailable, 1);
+
+    int32_t max_input_streams = 1;
+    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
+                      &max_input_streams,
+                      1);
+
+    /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
+    int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
+            HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
+            HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
+            HAL_PIXEL_FORMAT_YCbCr_420_888};
+    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
+                      io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
+
+    int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
+    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
+                      &max_latency,
+                      1);
+
+    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
+                                           ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
+    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
+            available_hot_pixel_modes,
+            sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
+
+    uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
+                                         ANDROID_SHADING_MODE_FAST,
+                                         ANDROID_SHADING_MODE_HIGH_QUALITY};
+    staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
+                      available_shading_modes,
+                      3);
+
+    uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
+                                                  ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
+    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
+                      available_lens_shading_map_modes,
+                      2);
+
+    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
+                                      ANDROID_EDGE_MODE_FAST,
+                                      ANDROID_EDGE_MODE_HIGH_QUALITY,
+                                      ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
+    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
+            available_edge_modes,
+            sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
+
+    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
+                                           ANDROID_NOISE_REDUCTION_MODE_FAST,
+                                           ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
+                                           ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
+                                           ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
+    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
+            available_noise_red_modes,
+            sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
+
+    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
+                                         ANDROID_TONEMAP_MODE_FAST,
+                                         ANDROID_TONEMAP_MODE_HIGH_QUALITY};
+    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
+            available_tonemap_modes,
+            sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
+
+    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
+    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
+            available_hot_pixel_map_modes,
+            sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
+
+    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
+            gCamCapability[cameraId]->reference_illuminant1);
+    if (NAME_NOT_FOUND != val) {
+        uint8_t fwkReferenceIlluminant = (uint8_t)val;
+        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
+    }
+
+    val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
+            gCamCapability[cameraId]->reference_illuminant2);
+    if (NAME_NOT_FOUND != val) {
+        uint8_t fwkReferenceIlluminant = (uint8_t)val;
+        staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
+    }
+
+    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
+            (void *)gCamCapability[cameraId]->forward_matrix1,
+            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
+
+    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
+            (void *)gCamCapability[cameraId]->forward_matrix2,
+            FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
+
+    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
+            (void *)gCamCapability[cameraId]->color_transform1,
+            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
+
+    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
+            (void *)gCamCapability[cameraId]->color_transform2,
+            COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
+
+    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
+            (void *)gCamCapability[cameraId]->calibration_transform1,
+            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
+
+    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
+            (void *)gCamCapability[cameraId]->calibration_transform2,
+            CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
+
+    int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
+       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
+       ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
+       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
+       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
+       ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
+       ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
+       ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
+       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
+       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
+       ANDROID_JPEG_GPS_COORDINATES,
+       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
+       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
+       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
+       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
+       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
+       ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
+       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
+       ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
+       ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
+       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
+       ANDROID_STATISTICS_FACE_DETECT_MODE,
+       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
+       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
+       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
+       ANDROID_BLACK_LEVEL_LOCK };
+
+    size_t request_keys_cnt =
+            sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
+    Vector<int32_t> available_request_keys;
+    available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
+    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
+        available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
+    }
+
+    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
+            available_request_keys.array(), available_request_keys.size());
+
+    int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
+       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
+       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
+       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
+       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
+       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
+       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
+       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
+       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
+       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
+       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
+       ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
+       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
+       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
+       ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
+       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
+       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
+       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
+       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
+       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
+       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
+       ANDROID_STATISTICS_FACE_SCORES};
+    size_t result_keys_cnt =
+            sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
+
+    Vector<int32_t> available_result_keys;
+    available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
+    if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
+        available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
+    }
+    if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
+        available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
+        available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
+    }
+    if (supportedFaceDetectMode == 1) {
+        available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
+        available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
+    } else if ((supportedFaceDetectMode == 2) ||
+            (supportedFaceDetectMode == 3)) {
+        available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
+        available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
+    }
+    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
+            available_result_keys.array(), available_result_keys.size());
+
+    int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
+       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
+       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
+       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
+       ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
+       ANDROID_SCALER_CROPPING_TYPE,
+       ANDROID_SYNC_MAX_LATENCY,
+       ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
+       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
+       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
+       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
+       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
+       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
+       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
+       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
+       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
+       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
+       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
+       ANDROID_LENS_FACING,
+       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
+       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
+       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
+       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
+       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
+       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
+       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
+       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
+       ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
+       ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
+       ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
+       ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
+       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
+       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
+       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
+       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
+       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
+       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
+       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
+       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
+       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
+       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
+       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
+       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
+       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
+       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
+       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
+       ANDROID_TONEMAP_MAX_CURVE_POINTS,
+       ANDROID_CONTROL_AVAILABLE_MODES,
+       ANDROID_CONTROL_AE_LOCK_AVAILABLE,
+       ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
+       ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
+       ANDROID_SHADING_AVAILABLE_MODES,
+       ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
+    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
+                      available_characteristics_keys,
+                      sizeof(available_characteristics_keys)/sizeof(int32_t));
+
+    /*available stall durations depend on the hw + sw and will be different for different devices */
+    /*have to add for raw after implementation*/
+    int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
+    size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
+
+    Vector<int64_t> available_stall_durations;
+    for (uint32_t j = 0; j < stall_formats_count; j++) {
+        if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
+            for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
+                    gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
+                available_stall_durations.add(stall_formats[j]);
+                available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
+                available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
+                available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
+          }
+        } else {
+            for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
+                    gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
+                available_stall_durations.add(stall_formats[j]);
+                available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
+                available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
+                available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
+            }
+        }
+    }
+    staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
+                      available_stall_durations.array(),
+                      available_stall_durations.size());
+
+    //QCAMERA3_OPAQUE_RAW
+    uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
+    cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
+    switch (gCamCapability[cameraId]->opaque_raw_fmt) {
+    case LEGACY_RAW:
+        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
+            fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
+        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
+            fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
+        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
+            fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
+        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
+        break;
+    case MIPI_RAW:
+        if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
+            fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
+        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
+            fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
+        else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
+            fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
+        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
+        break;
+    default:
+        LOGE("unknown opaque_raw_format %d",
+                gCamCapability[cameraId]->opaque_raw_fmt);
+        break;
+    }
+    staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
+
+    Vector<int32_t> strides;
+    for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
+            gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
+        cam_stream_buf_plane_info_t buf_planes;
+        strides.add(gCamCapability[cameraId]->raw_dim[i].width);
+        strides.add(gCamCapability[cameraId]->raw_dim[i].height);
+        mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
+            &gCamCapability[cameraId]->padding_info, &buf_planes);
+        strides.add(buf_planes.plane_info.mp[0].stride);
+    }
+    staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
+            strides.size());
+
+    staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
+            (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
+            sizeof(gCamCapability[cameraId]->related_cam_calibration));
+
+    uint8_t isMonoOnly =
+            (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
+    staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
+            &isMonoOnly, 1);
+
+    gStaticMetadata[cameraId] = staticInfo.release();
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : makeTable
+ *
+ * DESCRIPTION: make a table of sizes
+ *
+ * PARAMETERS :
+ *
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
+        size_t max_size, int32_t *sizeTable)
+{
+    size_t j = 0;
+    if (size > max_size) {
+       size = max_size;
+    }
+    for (size_t i = 0; i < size; i++) {
+        sizeTable[j] = dimTable[i].width;
+        sizeTable[j+1] = dimTable[i].height;
+        j+=2;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : makeFPSTable
+ *
+ * DESCRIPTION: make a table of fps ranges
+ *
+ * PARAMETERS :
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
+        size_t max_size, int32_t *fpsRangesTable)
+{
+    size_t j = 0;
+    if (size > max_size) {
+       size = max_size;
+    }
+    for (size_t i = 0; i < size; i++) {
+        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
+        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
+        j+=2;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : makeOverridesList
+ *
+ * DESCRIPTION: make a list of scene mode overrides
+ *
+ * PARAMETERS :
+ *
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::makeOverridesList(
+        cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
+        uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
+{
+    /*daemon will give a list of overrides for all scene modes.
+      However we should send the fwk only the overrides for the scene modes
+      supported by the framework*/
+    size_t j = 0;
+    if (size > max_size) {
+       size = max_size;
+    }
+    size_t focus_count = CAM_FOCUS_MODE_MAX;
+    focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
+            focus_count);
+    for (size_t i = 0; i < size; i++) {
+        bool supt = false;
+        size_t index = supported_indexes[i];
+        overridesList[j] = gCamCapability[camera_id]->flash_available ?
+                ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
+        int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
+                METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
+                overridesTable[index].awb_mode);
+        if (NAME_NOT_FOUND != val) {
+            overridesList[j+1] = (uint8_t)val;
+        }
+        uint8_t focus_override = overridesTable[index].af_mode;
+        for (size_t k = 0; k < focus_count; k++) {
+           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
+              supt = true;
+              break;
+           }
+        }
+        if (supt) {
+            val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
+                    focus_override);
+            if (NAME_NOT_FOUND != val) {
+                overridesList[j+2] = (uint8_t)val;
+            }
+        } else {
+           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
+        }
+        j+=3;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : filterJpegSizes
+ *
+ * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
+ *              could be downscaled to
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : length of jpegSizes array
+ *==========================================================================*/
+
+size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
+        size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
+        uint8_t downscale_factor)
+{
+    if (0 == downscale_factor) {
+        downscale_factor = 1;
+    }
+
+    int32_t min_width = active_array_size.width / downscale_factor;
+    int32_t min_height = active_array_size.height / downscale_factor;
+    size_t jpegSizesCnt = 0;
+    if (processedSizesCnt > maxCount) {
+        processedSizesCnt = maxCount;
+    }
+    for (size_t i = 0; i < processedSizesCnt; i+=2) {
+        if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
+            jpegSizes[jpegSizesCnt] = processedSizes[i];
+            jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
+            jpegSizesCnt += 2;
+        }
+    }
+    return jpegSizesCnt;
+}
+
+/*===========================================================================
+ * FUNCTION   : computeNoiseModelEntryS
+ *
+ * DESCRIPTION: function to map a given sensitivity to the S noise
+ *              model parameters in the DNG noise model.
+ *
+ * PARAMETERS : sens : the sensor sensitivity
+ *
+ ** RETURN    : S (sensor amplification) noise
+ *
+ *==========================================================================*/
+double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
+    double s = gCamCapability[mCameraId]->gradient_S * sens +
+            gCamCapability[mCameraId]->offset_S;
+    return ((s < 0.0) ? 0.0 : s);
+}
+
+/*===========================================================================
+ * FUNCTION   : computeNoiseModelEntryO
+ *
+ * DESCRIPTION: function to map a given sensitivity to the O noise
+ *              model parameters in the DNG noise model.
+ *
+ * PARAMETERS : sens : the sensor sensitivity
+ *
+ ** RETURN    : O (sensor readout) noise
+ *
+ *==========================================================================*/
+double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
+    int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
+    double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
+            1.0 : (1.0 * sens / max_analog_sens);
+    double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
+            gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
+    return ((o < 0.0) ? 0.0 : o);
+}
+
+/*===========================================================================
+ * FUNCTION   : getSensorSensitivity
+ *
+ * DESCRIPTION: convert iso_mode to an integer value
+ *
+ * PARAMETERS : iso_mode : the iso_mode supported by sensor
+ *
+ ** RETURN    : sensitivity supported by sensor
+ *
+ *==========================================================================*/
+int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
+{
+    int32_t sensitivity;
+
+    switch (iso_mode) {
+    case CAM_ISO_MODE_100:
+        sensitivity = 100;
+        break;
+    case CAM_ISO_MODE_200:
+        sensitivity = 200;
+        break;
+    case CAM_ISO_MODE_400:
+        sensitivity = 400;
+        break;
+    case CAM_ISO_MODE_800:
+        sensitivity = 800;
+        break;
+    case CAM_ISO_MODE_1600:
+        sensitivity = 1600;
+        break;
+    default:
+        sensitivity = -1;
+        break;
+    }
+    return sensitivity;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCamInfo
+ *
+ * DESCRIPTION: query camera capabilities
+ *
+ * PARAMETERS :
+ *   @cameraId  : camera Id
+ *   @info      : camera info struct to be filled in with camera capabilities
+ *
+ * RETURN     : int type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
+        struct camera_info *info)
+{
+    ATRACE_CALL();
+    int rc = 0;
+
+    pthread_mutex_lock(&gCamLock);
+    if (NULL == gCamCapability[cameraId]) {
+        rc = initCapabilities(cameraId);
+        if (rc < 0) {
+            pthread_mutex_unlock(&gCamLock);
+            return rc;
+        }
+    }
+
+    if (NULL == gStaticMetadata[cameraId]) {
+        rc = initStaticMetadata(cameraId);
+        if (rc < 0) {
+            pthread_mutex_unlock(&gCamLock);
+            return rc;
+        }
+    }
+
+    switch(gCamCapability[cameraId]->position) {
+    case CAM_POSITION_BACK:
+    case CAM_POSITION_BACK_AUX:
+        info->facing = CAMERA_FACING_BACK;
+        break;
+
+    case CAM_POSITION_FRONT:
+    case CAM_POSITION_FRONT_AUX:
+        info->facing = CAMERA_FACING_FRONT;
+        break;
+
+    default:
+        LOGE("Unknown position type %d for camera id:%d",
+                gCamCapability[cameraId]->position, cameraId);
+        rc = -1;
+        break;
+    }
+
+
+    info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
+    info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
+    info->static_camera_characteristics = gStaticMetadata[cameraId];
+
+    //For now assume both cameras can operate independently.
+    info->conflicting_devices = NULL;
+    info->conflicting_devices_length = 0;
+
+    //resource cost is 100 * MIN(1.0, m/M),
+    //where m is throughput requirement with maximum stream configuration
+    //and M is CPP maximum throughput.
+    float max_fps = 0.0;
+    for (uint32_t i = 0;
+            i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
+        if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
+            max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
+    }
+    float ratio = 1.0 * MAX_PROCESSED_STREAMS *
+            gCamCapability[cameraId]->active_array_size.width *
+            gCamCapability[cameraId]->active_array_size.height * max_fps /
+            gCamCapability[cameraId]->max_pixel_bandwidth;
+    info->resource_cost = 100 * MIN(1.0, ratio);
+    LOGI("camera %d resource cost is %d", cameraId,
+            info->resource_cost);
+
+    pthread_mutex_unlock(&gCamLock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : translateCapabilityToMetadata
+ *
+ * DESCRIPTION: translate the capability into camera_metadata_t
+ *
+ * PARAMETERS : type of the request
+ *
+ *
+ * RETURN     : success: camera_metadata_t*
+ *              failure: NULL
+ *
+ *==========================================================================*/
+camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
+{
+    if (mDefaultMetadata[type] != NULL) {
+        return mDefaultMetadata[type];
+    }
+    //first time we are handling this request
+    //fill up the metadata structure using the wrapper class
+    CameraMetadata settings;
+    //translate from cam_capability_t to camera_metadata_tag_t
+    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
+    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
+    int32_t defaultRequestID = 0;
+    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
+
+    /* OIS disable */
+    char ois_prop[PROPERTY_VALUE_MAX];
+    memset(ois_prop, 0, sizeof(ois_prop));
+    property_get("persist.camera.ois.disable", ois_prop, "0");
+    uint8_t ois_disable = (uint8_t)atoi(ois_prop);
+
+    /* Force video to use OIS */
+    char videoOisProp[PROPERTY_VALUE_MAX];
+    memset(videoOisProp, 0, sizeof(videoOisProp));
+    property_get("persist.camera.ois.video", videoOisProp, "1");
+    uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
+
+    // EIS enable/disable
+    char eis_prop[PROPERTY_VALUE_MAX];
+    memset(eis_prop, 0, sizeof(eis_prop));
+    property_get("persist.camera.eis.enable", eis_prop, "0");
+    const uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
+
+    const bool facingBack = ((gCamCapability[mCameraId]->position == CAM_POSITION_BACK) ||
+            (gCamCapability[mCameraId]->position == CAM_POSITION_BACK_AUX));
+    // This is a bit hacky. EIS is enabled only when the above setprop
+    // is set to non-zero value and on back camera (for 2015 Nexus).
+    // Ideally, we should rely on m_bEisEnable, but we cannot guarantee
+    // configureStream is called before this function. In other words,
+    // we cannot guarantee the app will call configureStream before
+    // calling createDefaultRequest.
+    const bool eisEnabled = facingBack && eis_prop_set;
+
+    uint8_t controlIntent = 0;
+    uint8_t focusMode;
+    uint8_t vsMode;
+    uint8_t optStabMode;
+    uint8_t cacMode;
+    uint8_t edge_mode;
+    uint8_t noise_red_mode;
+    uint8_t tonemap_mode;
+    bool highQualityModeEntryAvailable = FALSE;
+    bool fastModeEntryAvailable = FALSE;
+    vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
+    optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
+    switch (type) {
+      case CAMERA3_TEMPLATE_PREVIEW:
+        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
+        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
+        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
+        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
+        edge_mode = ANDROID_EDGE_MODE_FAST;
+        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
+        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
+        break;
+      case CAMERA3_TEMPLATE_STILL_CAPTURE:
+        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
+        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
+        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
+        edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
+        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
+        tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
+        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
+        // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
+        for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
+            if (gCamCapability[mCameraId]->aberration_modes[i] ==
+                    CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
+                highQualityModeEntryAvailable = TRUE;
+            } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
+                    CAM_COLOR_CORRECTION_ABERRATION_FAST) {
+                fastModeEntryAvailable = TRUE;
+            }
+        }
+        if (highQualityModeEntryAvailable) {
+            cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
+        } else if (fastModeEntryAvailable) {
+            cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
+        }
+        break;
+      case CAMERA3_TEMPLATE_VIDEO_RECORD:
+        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
+        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
+        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
+        if (eisEnabled) {
+            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
+        }
+        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
+        edge_mode = ANDROID_EDGE_MODE_FAST;
+        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
+        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
+        if (forceVideoOis)
+            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
+        break;
+      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
+        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
+        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
+        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
+        if (eisEnabled) {
+            vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
+        }
+        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
+        edge_mode = ANDROID_EDGE_MODE_FAST;
+        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
+        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
+        if (forceVideoOis)
+            optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
+        break;
+      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
+        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
+        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
+        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
+        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
+        edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
+        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
+        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
+        break;
+      case CAMERA3_TEMPLATE_MANUAL:
+        edge_mode = ANDROID_EDGE_MODE_FAST;
+        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
+        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
+        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
+        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
+        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
+        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
+        break;
+      default:
+        edge_mode = ANDROID_EDGE_MODE_FAST;
+        noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
+        tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
+        cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
+        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
+        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
+        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
+        break;
+    }
+    settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
+    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
+    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
+    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
+        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
+    }
+    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
+
+    if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
+            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
+        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
+    else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
+            gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
+            || ois_disable)
+        optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
+    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
+
+    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
+
+    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
+    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
+
+    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
+    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
+
+    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
+    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
+
+    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
+    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
+
+    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
+    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
+
+    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
+    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
+
+    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
+    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
+
+    /*flash*/
+    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
+    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
+
+    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
+    settings.update(ANDROID_FLASH_FIRING_POWER,
+            &flashFiringLevel, 1);
+
+    /* lens */
+    float default_aperture = gCamCapability[mCameraId]->apertures[0];
+    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
+
+    if (gCamCapability[mCameraId]->filter_densities_count) {
+        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
+        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
+                        gCamCapability[mCameraId]->filter_densities_count);
+    }
+
+    float default_focal_length = gCamCapability[mCameraId]->focal_length;
+    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
+
+    if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
+        float default_focus_distance = 0;
+        settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
+    }
+
+    static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
+    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
+
+    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
+    settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
+
+    static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
+    settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
+
+    /* face detection (default to OFF) */
+    static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
+    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
+
+    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
+    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
+
+    static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
+    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
+
+    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
+    settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
+
+    static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
+    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
+
+    static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
+    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
+
+    /* Exposure time(Update the Min Exposure Time)*/
+    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
+    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
+
+    /* frame duration */
+    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
+    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
+
+    /* sensitivity */
+    static const int32_t default_sensitivity = 100;
+    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
+
+    /*edge mode*/
+    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
+
+    /*noise reduction mode*/
+    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
+
+    /*color correction mode*/
+    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
+    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
+
+    /*transform matrix mode*/
+    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
+
+    int32_t scaler_crop_region[4];
+    scaler_crop_region[0] = 0;
+    scaler_crop_region[1] = 0;
+    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
+    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
+    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
+
+    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
+    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
+
+    /*focus distance*/
+    float focus_distance = 0.0;
+    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
+
+    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
+    float max_range = 0.0;
+    float max_fixed_fps = 0.0;
+    int32_t fps_range[2] = {0, 0};
+    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
+            i++) {
+        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
+            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
+        if (type == CAMERA3_TEMPLATE_PREVIEW ||
+                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
+                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
+            if (range > max_range) {
+                fps_range[0] =
+                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
+                fps_range[1] =
+                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
+                max_range = range;
+            }
+        } else {
+            if (range < 0.01 && max_fixed_fps <
+                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
+                fps_range[0] =
+                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
+                fps_range[1] =
+                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
+                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
+            }
+        }
+    }
+    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
+
+    /*precapture trigger*/
+    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
+    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
+
+    /*af trigger*/
+    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
+    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
+
+    /* ae & af regions */
+    int32_t active_region[] = {
+            gCamCapability[mCameraId]->active_array_size.left,
+            gCamCapability[mCameraId]->active_array_size.top,
+            gCamCapability[mCameraId]->active_array_size.left +
+                    gCamCapability[mCameraId]->active_array_size.width,
+            gCamCapability[mCameraId]->active_array_size.top +
+                    gCamCapability[mCameraId]->active_array_size.height,
+            0};
+    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
+            sizeof(active_region) / sizeof(active_region[0]));
+    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
+            sizeof(active_region) / sizeof(active_region[0]));
+
+    /* black level lock */
+    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
+    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
+
+    /* lens shading map mode */
+    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
+    if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
+        shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
+    }
+    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
+
+    //special defaults for manual template
+    if (type == CAMERA3_TEMPLATE_MANUAL) {
+        static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
+        settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
+
+        static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
+        settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
+
+        static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
+        settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
+
+        static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
+        settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
+
+        static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
+        settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
+
+        static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
+        settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
+    }
+
+
+    /* TNR
+     * We'll use this location to determine which modes TNR will be set.
+     * We will enable TNR to be on if either of the Preview/Video stream requires TNR
+     * This is not to be confused with linking on a per stream basis that decision
+     * is still on per-session basis and will be handled as part of config stream
+     */
+    uint8_t tnr_enable = 0;
+
+    if (m_bTnrPreview || m_bTnrVideo) {
+
+        switch (type) {
+            case CAMERA3_TEMPLATE_VIDEO_RECORD:
+                    tnr_enable = 1;
+                    break;
+
+            default:
+                    tnr_enable = 0;
+                    break;
+        }
+
+        int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
+        settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
+        settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
+
+        LOGD("TNR:%d with process plate %d for template:%d",
+                             tnr_enable, tnr_process_type, type);
+    }
+
+    //Update Link tags to default
+    int32_t sync_type = CAM_TYPE_STANDALONE;
+    settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
+
+    int32_t is_main = 0; //this doesn't matter as app should overwrite
+    settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
+
+    settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
+
+    /* CDS default */
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.CDS", prop, "Auto");
+    cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
+    cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
+    if (CAM_CDS_MODE_MAX == cds_mode) {
+        cds_mode = CAM_CDS_MODE_AUTO;
+    }
+
+    /* Disabling CDS in templates which have TNR enabled*/
+    if (tnr_enable)
+        cds_mode = CAM_CDS_MODE_OFF;
+
+    int32_t mode = cds_mode;
+    settings.update(QCAMERA3_CDS_MODE, &mode, 1);
+    mDefaultMetadata[type] = settings.release();
+
+    return mDefaultMetadata[type];
+}
+
+/*===========================================================================
+ * FUNCTION   : setFrameParameters
+ *
+ * DESCRIPTION: set parameters per frame as requested in the metadata from
+ *              framework
+ *
+ * PARAMETERS :
+ *   @request   : request that needs to be serviced
+ *   @streamID : Stream ID of all the requested streams
+ *   @blob_request: Whether this request is a blob request or not
+ *
+ * RETURN     : success: NO_ERROR
+ *              failure:
+ *==========================================================================*/
+int QCamera3HardwareInterface::setFrameParameters(
+                    camera3_capture_request_t *request,
+                    cam_stream_ID_t streamID,
+                    int blob_request,
+                    uint32_t snapshotStreamId)
+{
+    /*translate from camera_metadata_t type to parm_type_t*/
+    int rc = 0;
+    int32_t hal_version = CAM_HAL_V3;
+
+    clear_metadata_buffer(mParameters);
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
+        LOGE("Failed to set hal version in the parameters");
+        return BAD_VALUE;
+    }
+
+    /*we need to update the frame number in the parameters*/
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
+            request->frame_number)) {
+        LOGE("Failed to set the frame number in the parameters");
+        return BAD_VALUE;
+    }
+
+    /* Update stream id of all the requested buffers */
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
+        LOGE("Failed to set stream type mask in the parameters");
+        return BAD_VALUE;
+    }
+
+    if (mUpdateDebugLevel) {
+        uint32_t dummyDebugLevel = 0;
+        /* The value of dummyDebugLevel is irrelavent. On
+         * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
+                dummyDebugLevel)) {
+            LOGE("Failed to set UPDATE_DEBUG_LEVEL");
+            return BAD_VALUE;
+        }
+        mUpdateDebugLevel = false;
+    }
+
+    if(request->settings != NULL){
+        rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
+        if (blob_request)
+            memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setReprocParameters
+ *
+ * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
+ *              return it.
+ *
+ * PARAMETERS :
+ *   @request   : request that needs to be serviced
+ *
+ * RETURN     : success: NO_ERROR
+ *              failure:
+ *==========================================================================*/
+int32_t QCamera3HardwareInterface::setReprocParameters(
+        camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
+        uint32_t snapshotStreamId)
+{
+    /*translate from camera_metadata_t type to parm_type_t*/
+    int rc = 0;
+
+    if (NULL == request->settings){
+        LOGE("Reprocess settings cannot be NULL");
+        return BAD_VALUE;
+    }
+
+    if (NULL == reprocParam) {
+        LOGE("Invalid reprocessing metadata buffer");
+        return BAD_VALUE;
+    }
+    clear_metadata_buffer(reprocParam);
+
+    /*we need to update the frame number in the parameters*/
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
+            request->frame_number)) {
+        LOGE("Failed to set the frame number in the parameters");
+        return BAD_VALUE;
+    }
+
+    rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
+    if (rc < 0) {
+        LOGE("Failed to translate reproc request");
+        return rc;
+    }
+
+    CameraMetadata frame_settings;
+    frame_settings = request->settings;
+    if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
+            frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
+        int32_t *crop_count =
+                frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
+        int32_t *crop_data =
+                frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
+        int32_t *roi_map =
+                frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
+        if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
+            cam_crop_data_t crop_meta;
+            memset(&crop_meta, 0, sizeof(cam_crop_data_t));
+            crop_meta.num_of_streams = 1;
+            crop_meta.crop_info[0].crop.left   = crop_data[0];
+            crop_meta.crop_info[0].crop.top    = crop_data[1];
+            crop_meta.crop_info[0].crop.width  = crop_data[2];
+            crop_meta.crop_info[0].crop.height = crop_data[3];
+
+            crop_meta.crop_info[0].roi_map.left =
+                    roi_map[0];
+            crop_meta.crop_info[0].roi_map.top =
+                    roi_map[1];
+            crop_meta.crop_info[0].roi_map.width =
+                    roi_map[2];
+            crop_meta.crop_info[0].roi_map.height =
+                    roi_map[3];
+
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
+                rc = BAD_VALUE;
+            }
+            LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
+                    request->input_buffer->stream,
+                    crop_meta.crop_info[0].crop.left,
+                    crop_meta.crop_info[0].crop.top,
+                    crop_meta.crop_info[0].crop.width,
+                    crop_meta.crop_info[0].crop.height);
+            LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
+                    request->input_buffer->stream,
+                    crop_meta.crop_info[0].roi_map.left,
+                    crop_meta.crop_info[0].roi_map.top,
+                    crop_meta.crop_info[0].roi_map.width,
+                    crop_meta.crop_info[0].roi_map.height);
+            } else {
+                LOGE("Invalid reprocess crop count %d!", *crop_count);
+            }
+    } else {
+        LOGE("No crop data from matching output stream");
+    }
+
+    /* These settings are not needed for regular requests so handle them specially for
+       reprocess requests; information needed for EXIF tags */
+    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
+        int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
+                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
+        if (NAME_NOT_FOUND != val) {
+            uint32_t flashMode = (uint32_t)val;
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
+                rc = BAD_VALUE;
+            }
+        } else {
+            LOGE("Could not map fwk flash mode %d to correct hal flash mode",
+                    frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
+        }
+    } else {
+        LOGH("No flash mode in reprocess settings");
+    }
+
+    if (frame_settings.exists(ANDROID_FLASH_STATE)) {
+        int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
+            rc = BAD_VALUE;
+        }
+    } else {
+        LOGH("No flash state in reprocess settings");
+    }
+
+    if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
+        uint8_t *reprocessFlags =
+            frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
+                *reprocessFlags)) {
+                rc = BAD_VALUE;
+        }
+    }
+
+    // Add metadata which DDM needs
+    if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_DDM_DATA_BLOB)) {
+        cam_ddm_info_t *ddm_info =
+                (cam_ddm_info_t *)frame_settings.find
+                (QCAMERA3_HAL_PRIVATEDATA_DDM_DATA_BLOB).data.u8;
+        ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
+                ddm_info->sensor_crop_info);
+        ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
+                ddm_info->camif_crop_info);
+        ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
+                ddm_info->isp_crop_info);
+        ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
+                ddm_info->cpp_crop_info);
+        ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
+                ddm_info->af_focal_length_ratio);
+        ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
+                ddm_info->pipeline_flip);
+        /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
+           CAM_INTF_PARM_ROTATION metadata then has been added in
+           translateToHalMetadata. HAL need to keep this new rotation
+           metadata. Otherwise, the old rotation info saved in the vendor tag
+           would be used */
+        IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
+                CAM_INTF_PARM_ROTATION, reprocParam) {
+            LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
+        } else {
+            ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
+                    ddm_info->rotation_info);
+        }
+
+    }
+
+    /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
+       to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
+       roi.width and roi.height would be the final JPEG size.
+       For now, HAL only checks this for reprocess request */
+    if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
+            frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
+        uint8_t *enable =
+            frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
+        if (*enable == TRUE) {
+            int32_t *crop_data =
+                    frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
+            cam_stream_crop_info_t crop_meta;
+            memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
+            crop_meta.stream_id = 0;
+            crop_meta.crop.left   = crop_data[0];
+            crop_meta.crop.top    = crop_data[1];
+            crop_meta.crop.width  = crop_data[2];
+            crop_meta.crop.height = crop_data[3];
+            if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
+                int32_t *roi =
+                    frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
+                crop_meta.roi_map.left =
+                        roi[0];
+                crop_meta.roi_map.top =
+                        roi[1];
+                crop_meta.roi_map.width =
+                        roi[2];
+                crop_meta.roi_map.height =
+                        roi[3];
+            }
+            ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
+                    crop_meta);
+            LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d",
+                    crop_meta.crop.left, crop_meta.crop.top,
+                    crop_meta.crop.width, crop_meta.crop.height);
+            LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d",
+                    crop_meta.roi_map.left, crop_meta.roi_map.top,
+                    crop_meta.roi_map.width, crop_meta.roi_map.height);
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : saveRequestSettings
+ *
+ * DESCRIPTION: Add any settings that might have changed to the request settings
+ *              and save the settings to be applied on the frame
+ *
+ * PARAMETERS :
+ *   @jpegMetadata : the extracted and/or modified jpeg metadata
+ *   @request      : request with initial settings
+ *
+ * RETURN     :
+ * camera_metadata_t* : pointer to the saved request settings
+ *==========================================================================*/
+camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
+        const CameraMetadata &jpegMetadata,
+        camera3_capture_request_t *request)
+{
+    camera_metadata_t *resultMetadata;
+    CameraMetadata camMetadata;
+    camMetadata = request->settings;
+
+    if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
+        int32_t thumbnail_size[2];
+        thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
+        thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
+        camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
+                jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
+    }
+
+    if (request->input_buffer != NULL) {
+        uint8_t reprocessFlags = 1;
+        camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
+                (uint8_t*)&reprocessFlags,
+                sizeof(reprocessFlags));
+    }
+
+    resultMetadata = camMetadata.release();
+    return resultMetadata;
+}
+
+/*===========================================================================
+ * FUNCTION   : setHalFpsRange
+ *
+ * DESCRIPTION: set FPS range parameter
+ *
+ *
+ * PARAMETERS :
+ *   @settings    : Metadata from framework
+ *   @hal_metadata: Metadata buffer
+ *
+ *
+ * RETURN     : success: NO_ERROR
+ *              failure:
+ *==========================================================================*/
+int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
+        metadata_buffer_t *hal_metadata)
+{
+    int32_t rc = NO_ERROR;
+    cam_fps_range_t fps_range;
+    fps_range.min_fps = (float)
+            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
+    fps_range.max_fps = (float)
+            settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
+    fps_range.video_min_fps = fps_range.min_fps;
+    fps_range.video_max_fps = fps_range.max_fps;
+
+    LOGD("aeTargetFpsRange fps: [%f %f]",
+            fps_range.min_fps, fps_range.max_fps);
+    /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
+     * follows:
+     * ---------------------------------------------------------------|
+     *      Video stream is absent in configure_streams               |
+     *    (Camcorder preview before the first video record            |
+     * ---------------------------------------------------------------|
+     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
+     *                   |             |             | vid_min/max_fps|
+     * ---------------------------------------------------------------|
+     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
+     *                   |-------------|-------------|----------------|
+     *                   |  [240, 240] |     240     |  [240, 240]    |
+     * ---------------------------------------------------------------|
+     *     Video stream is present in configure_streams               |
+     * ---------------------------------------------------------------|
+     * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
+     *                   |             |             | vid_min/max_fps|
+     * ---------------------------------------------------------------|
+     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
+     * (camcorder prev   |-------------|-------------|----------------|
+     *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
+     *  is stopped)      |             |             |                |
+     * ---------------------------------------------------------------|
+     *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
+     *                   |-------------|-------------|----------------|
+     *                   |  [240, 240] |     240     |  [240, 240]    |
+     * ---------------------------------------------------------------|
+     * When Video stream is absent in configure_streams,
+     * preview fps = sensor_fps / batchsize
+     * Eg: for 240fps at batchSize 4, preview = 60fps
+     *     for 120fps at batchSize 4, preview = 30fps
+     *
+     * When video stream is present in configure_streams, preview fps is as per
+     * the ratio of preview buffers to video buffers requested in process
+     * capture request
+     */
+    mBatchSize = 0;
+    if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
+        fps_range.min_fps = fps_range.video_max_fps;
+        fps_range.video_min_fps = fps_range.video_max_fps;
+        int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
+                fps_range.max_fps);
+        if (NAME_NOT_FOUND != val) {
+            cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
+                return BAD_VALUE;
+            }
+
+            if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
+                /* If batchmode is currently in progress and the fps changes,
+                 * set the flag to restart the sensor */
+                if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
+                        (mHFRVideoFps != fps_range.max_fps)) {
+                    mNeedSensorRestart = true;
+                }
+                mHFRVideoFps = fps_range.max_fps;
+                mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
+                if (mBatchSize > MAX_HFR_BATCH_SIZE) {
+                    mBatchSize = MAX_HFR_BATCH_SIZE;
+                }
+             }
+            LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
+
+         }
+    } else {
+        /* HFR mode is session param in backend/ISP. This should be reset when
+         * in non-HFR mode  */
+        cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
+            return BAD_VALUE;
+        }
+    }
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
+        return BAD_VALUE;
+    }
+    LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
+            fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : translateToHalMetadata
+ *
+ * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
+ *
+ *
+ * PARAMETERS :
+ *   @request  : request sent from framework
+ *
+ *
+ * RETURN     : success: NO_ERROR
+ *              failure:
+ *==========================================================================*/
+int QCamera3HardwareInterface::translateToHalMetadata
+                                  (const camera3_capture_request_t *request,
+                                   metadata_buffer_t *hal_metadata,
+                                   uint32_t snapshotStreamId)
+{
+    int rc = 0;
+    CameraMetadata frame_settings;
+    frame_settings = request->settings;
+
+    /* Do not change the order of the following list unless you know what you are
+     * doing.
+     * The order is laid out in such a way that parameters in the front of the table
+     * may be used to override the parameters later in the table. Examples are:
+     * 1. META_MODE should precede AEC/AWB/AF MODE
+     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
+     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
+     * 4. Any mode should precede it's corresponding settings
+     */
+    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
+        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
+            rc = BAD_VALUE;
+        }
+        rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
+        if (rc != NO_ERROR) {
+            LOGE("extractSceneMode failed");
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
+        uint8_t fwk_aeMode =
+            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
+        uint8_t aeMode;
+        int32_t redeye;
+
+        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
+            aeMode = CAM_AE_MODE_OFF;
+        } else {
+            aeMode = CAM_AE_MODE_ON;
+        }
+        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
+            redeye = 1;
+        } else {
+            redeye = 0;
+        }
+
+        int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
+                fwk_aeMode);
+        if (NAME_NOT_FOUND != val) {
+            int32_t flashMode = (int32_t)val;
+            ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
+        }
+
+        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
+        uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
+        int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
+                fwk_whiteLevel);
+        if (NAME_NOT_FOUND != val) {
+            uint8_t whiteLevel = (uint8_t)val;
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
+                rc = BAD_VALUE;
+            }
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
+        uint8_t fwk_cacMode =
+                frame_settings.find(
+                        ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
+        int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
+                fwk_cacMode);
+        if (NAME_NOT_FOUND != val) {
+            cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
+            bool entryAvailable = FALSE;
+            // Check whether Frameworks set CAC mode is supported in device or not
+            for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
+                if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
+                    entryAvailable = TRUE;
+                    break;
+                }
+            }
+            LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
+            // If entry not found then set the device supported mode instead of frameworks mode i.e,
+            // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
+            // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
+            if (entryAvailable == FALSE) {
+                if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
+                    cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
+                } else {
+                    if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
+                        // High is not supported and so set the FAST as spec say's underlying
+                        // device implementation can be the same for both modes.
+                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
+                    } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
+                        // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
+                        // in order to avoid the fps drop due to high quality
+                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
+                    } else {
+                        cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
+                    }
+                }
+            }
+            LOGD("Final cacMode is %d", cacMode);
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
+                rc = BAD_VALUE;
+            }
+        } else {
+            LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
+        uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
+        int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
+                fwk_focusMode);
+        if (NAME_NOT_FOUND != val) {
+            uint8_t focusMode = (uint8_t)val;
+            LOGD("set focus mode %d", focusMode);
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
+                rc = BAD_VALUE;
+            }
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
+        float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
+                focalDistance)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
+        uint8_t fwk_antibandingMode =
+                frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
+        int val = lookupHalName(ANTIBANDING_MODES_MAP,
+                METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
+        if (NAME_NOT_FOUND != val) {
+            uint32_t hal_antibandingMode = (uint32_t)val;
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
+                    hal_antibandingMode)) {
+                rc = BAD_VALUE;
+            }
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
+        int32_t expCompensation = frame_settings.find(
+                ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
+        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
+            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
+        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
+            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
+                expCompensation)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
+        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
+            rc = BAD_VALUE;
+        }
+    }
+    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
+        rc = setHalFpsRange(frame_settings, hal_metadata);
+        if (rc != NO_ERROR) {
+            LOGE("setHalFpsRange failed");
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
+        uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
+        uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
+        int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
+                fwk_effectMode);
+        if (NAME_NOT_FOUND != val) {
+            uint8_t effectMode = (uint8_t)val;
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
+                rc = BAD_VALUE;
+            }
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
+        uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
+                colorCorrectMode)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
+        cam_color_correct_gains_t colorCorrectGains;
+        for (size_t i = 0; i < CC_GAIN_MAX; i++) {
+            colorCorrectGains.gains[i] =
+                    frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
+        }
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
+                colorCorrectGains)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
+        cam_color_correct_matrix_t colorCorrectTransform;
+        cam_rational_type_t transform_elem;
+        size_t num = 0;
+        for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
+           for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
+              transform_elem.numerator =
+                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
+              transform_elem.denominator =
+                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
+              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
+              num++;
+           }
+        }
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
+                colorCorrectTransform)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    cam_trigger_t aecTrigger;
+    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
+    aecTrigger.trigger_id = -1;
+    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
+        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
+        aecTrigger.trigger =
+            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
+        aecTrigger.trigger_id =
+            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
+                aecTrigger)) {
+            rc = BAD_VALUE;
+        }
+        LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
+                aecTrigger.trigger, aecTrigger.trigger_id);
+    }
+
+    /*af_trigger must come with a trigger id*/
+    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
+        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
+        cam_trigger_t af_trigger;
+        af_trigger.trigger =
+            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
+        af_trigger.trigger_id =
+            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
+            rc = BAD_VALUE;
+        }
+        LOGD("AfTrigger: %d AfTriggerID: %d",
+                af_trigger.trigger, af_trigger.trigger_id);
+    }
+
+    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
+        int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
+            rc = BAD_VALUE;
+        }
+    }
+    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
+        cam_edge_application_t edge_application;
+        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
+        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
+            edge_application.sharpness = 0;
+        } else {
+            edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
+        }
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
+        int32_t respectFlashMode = 1;
+        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
+            uint8_t fwk_aeMode =
+                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
+            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
+                respectFlashMode = 0;
+                LOGH("AE Mode controls flash, ignore android.flash.mode");
+            }
+        }
+        if (respectFlashMode) {
+            int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
+                    (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
+            LOGH("flash mode after mapping %d", val);
+            // To check: CAM_INTF_META_FLASH_MODE usage
+            if (NAME_NOT_FOUND != val) {
+                uint8_t flashMode = (uint8_t)val;
+                if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
+                    rc = BAD_VALUE;
+                }
+            }
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
+        uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
+        int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
+                flashFiringTime)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
+        uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
+                hotPixelMode)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
+        float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
+                lensAperture)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
+        float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
+                filterDensity)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
+        float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
+                focalLength)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
+        uint8_t optStabMode =
+                frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
+                optStabMode)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
+        uint8_t videoStabMode =
+                frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
+        LOGD("videoStabMode from APP = %d", videoStabMode);
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
+                videoStabMode)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+
+    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
+        uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
+                noiseRedMode)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
+        float reprocessEffectiveExposureFactor =
+            frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
+                reprocessEffectiveExposureFactor)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    cam_crop_region_t scalerCropRegion;
+    bool scalerCropSet = false;
+    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
+        scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
+        scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
+        scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
+        scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
+
+        // Map coordinate system from active array to sensor output.
+        mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
+                scalerCropRegion.width, scalerCropRegion.height);
+
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
+                scalerCropRegion)) {
+            rc = BAD_VALUE;
+        }
+        scalerCropSet = true;
+    }
+
+    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
+        int64_t sensorExpTime =
+                frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
+        LOGD("setting sensorExpTime %lld", sensorExpTime);
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
+                sensorExpTime)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
+        int64_t sensorFrameDuration =
+                frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
+        int64_t minFrameDuration = getMinFrameDuration(request);
+        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
+        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
+            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
+        LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
+                sensorFrameDuration)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
+        int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
+        if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
+                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
+        if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
+                sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
+        LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
+                sensorSensitivity)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
+        uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
+        uint8_t fwk_facedetectMode =
+                frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
+
+        int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
+                fwk_facedetectMode);
+
+        if (NAME_NOT_FOUND != val) {
+            uint8_t facedetectMode = (uint8_t)val;
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
+                    facedetectMode)) {
+                rc = BAD_VALUE;
+            }
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
+        uint8_t histogramMode =
+                frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
+                histogramMode)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
+        uint8_t sharpnessMapMode =
+                frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
+                sharpnessMapMode)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
+        uint8_t tonemapMode =
+                frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
+            rc = BAD_VALUE;
+        }
+    }
+    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
+    /*All tonemap channels will have the same number of points*/
+    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
+        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
+        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
+        cam_rgb_tonemap_curves tonemapCurves;
+        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
+        if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
+            LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
+                     tonemapCurves.tonemap_points_cnt,
+                    CAM_MAX_TONEMAP_CURVE_SIZE);
+            tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
+        }
+
+        /* ch0 = G*/
+        size_t point = 0;
+        cam_tonemap_curve_t tonemapCurveGreen;
+        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
+            for (size_t j = 0; j < 2; j++) {
+               tonemapCurveGreen.tonemap_points[i][j] =
+                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
+               point++;
+            }
+        }
+        tonemapCurves.curves[0] = tonemapCurveGreen;
+
+        /* ch 1 = B */
+        point = 0;
+        cam_tonemap_curve_t tonemapCurveBlue;
+        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
+            for (size_t j = 0; j < 2; j++) {
+               tonemapCurveBlue.tonemap_points[i][j] =
+                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
+               point++;
+            }
+        }
+        tonemapCurves.curves[1] = tonemapCurveBlue;
+
+        /* ch 2 = R */
+        point = 0;
+        cam_tonemap_curve_t tonemapCurveRed;
+        for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
+            for (size_t j = 0; j < 2; j++) {
+               tonemapCurveRed.tonemap_points[i][j] =
+                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
+               point++;
+            }
+        }
+        tonemapCurves.curves[2] = tonemapCurveRed;
+
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
+                tonemapCurves)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
+        uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
+                captureIntent)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
+        uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
+                blackLevelLock)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
+        uint8_t lensShadingMapMode =
+                frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
+                lensShadingMapMode)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
+        cam_area_t roi;
+        bool reset = true;
+        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
+
+        // Map coordinate system from active array to sensor output.
+        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
+                roi.rect.height);
+
+        if (scalerCropSet) {
+            reset = resetIfNeededROI(&roi, &scalerCropRegion);
+        }
+        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
+        cam_area_t roi;
+        bool reset = true;
+        convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
+
+        // Map coordinate system from active array to sensor output.
+        mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
+                roi.rect.height);
+
+        if (scalerCropSet) {
+            reset = resetIfNeededROI(&roi, &scalerCropRegion);
+        }
+        if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    // CDS for non-HFR non-video mode
+    if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
+            !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
+        int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
+        if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
+            LOGE("Invalid CDS mode %d!", *fwk_cds);
+        } else {
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
+                    CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
+                rc = BAD_VALUE;
+            }
+        }
+    }
+
+    // TNR
+    if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
+        frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
+        uint8_t b_TnrRequested = 0;
+        cam_denoise_param_t tnr;
+        tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
+        tnr.process_plates =
+            (cam_denoise_process_type_t)frame_settings.find(
+            QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
+        b_TnrRequested = tnr.denoise_enable;
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
+        int32_t fwk_testPatternMode =
+                frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
+        int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
+                METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
+
+        if (NAME_NOT_FOUND != testPatternMode) {
+            cam_test_pattern_data_t testPatternData;
+            memset(&testPatternData, 0, sizeof(testPatternData));
+            testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
+            if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
+                    frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
+                int32_t *fwk_testPatternData =
+                        frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
+                testPatternData.r = fwk_testPatternData[0];
+                testPatternData.b = fwk_testPatternData[3];
+                switch (gCamCapability[mCameraId]->color_arrangement) {
+                    case CAM_FILTER_ARRANGEMENT_RGGB:
+                    case CAM_FILTER_ARRANGEMENT_GRBG:
+                        testPatternData.gr = fwk_testPatternData[1];
+                        testPatternData.gb = fwk_testPatternData[2];
+                        break;
+                    case CAM_FILTER_ARRANGEMENT_GBRG:
+                    case CAM_FILTER_ARRANGEMENT_BGGR:
+                        testPatternData.gr = fwk_testPatternData[2];
+                        testPatternData.gb = fwk_testPatternData[1];
+                        break;
+                    default:
+                        LOGE("color arrangement %d is not supported",
+                                gCamCapability[mCameraId]->color_arrangement);
+                        break;
+                }
+            }
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
+                    testPatternData)) {
+                rc = BAD_VALUE;
+            }
+        } else {
+            LOGE("Invalid framework sensor test pattern mode %d",
+                    fwk_testPatternMode);
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
+        size_t count = 0;
+        camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
+        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
+                gps_coords.data.d, gps_coords.count, count);
+        if (gps_coords.count != count) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
+        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
+        size_t count = 0;
+        const char *gps_methods_src = (const char *)
+                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
+        memset(gps_methods, '\0', sizeof(gps_methods));
+        strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
+        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
+                gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
+        if (GPS_PROCESSING_METHOD_SIZE != count) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
+        int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
+                gps_timestamp)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
+        int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
+        cam_rotation_info_t rotation_info;
+        if (orientation == 0) {
+           rotation_info.rotation = ROTATE_0;
+        } else if (orientation == 90) {
+           rotation_info.rotation = ROTATE_90;
+        } else if (orientation == 180) {
+           rotation_info.rotation = ROTATE_180;
+        } else if (orientation == 270) {
+           rotation_info.rotation = ROTATE_270;
+        }
+        rotation_info.streamId = snapshotStreamId;
+        ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
+        uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
+        uint32_t thumb_quality = (uint32_t)
+                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
+                thumb_quality)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
+        cam_dimension_t dim;
+        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
+        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    // Internal metadata
+    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
+        size_t count = 0;
+        camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
+        ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
+                privatedata.data.i32, privatedata.count, count);
+        if (privatedata.count != count) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    if (m_debug_avtimer || frame_settings.exists(QCAMERA3_USE_AV_TIMER)) {
+        uint8_t* use_av_timer = NULL;
+
+        if (m_debug_avtimer){
+            use_av_timer = &m_debug_avtimer;
+        }
+        else{
+            use_av_timer =
+                frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8;
+        }
+
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    // EV step
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
+            gCamCapability[mCameraId]->exp_compensation_step)) {
+        rc = BAD_VALUE;
+    }
+
+    // CDS info
+    if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
+        cam_cds_data_t *cdsData = (cam_cds_data_t *)
+                frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
+
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
+                CAM_INTF_META_CDS_DATA, *cdsData)) {
+            rc = BAD_VALUE;
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : captureResultCb
+ *
+ * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
+ *
+ * PARAMETERS :
+ *   @frame  : frame information from mm-camera-interface
+ *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
+ *   @userdata: userdata
+ *
+ * RETURN     : NONE
+ *==========================================================================*/
+void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
+                camera3_stream_buffer_t *buffer,
+                uint32_t frame_number, bool isInputBuffer, void *userdata)
+{
+    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
+    if (hw == NULL) {
+        LOGE("Invalid hw %p", hw);
+        return;
+    }
+
+    hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
+    return;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : initialize
+ *
+ * DESCRIPTION: Pass framework callback pointers to HAL
+ *
+ * PARAMETERS :
+ *
+ *
+ * RETURN     : Success : 0
+ *              Failure: -ENODEV
+ *==========================================================================*/
+
+int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
+                                  const camera3_callback_ops_t *callback_ops)
+{
+    LOGD("E");
+    QCamera3HardwareInterface *hw =
+        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return -ENODEV;
+    }
+
+    int rc = hw->initialize(callback_ops);
+    LOGD("X");
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : configure_streams
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS :
+ *
+ *
+ * RETURN     : Success: 0
+ *              Failure: -EINVAL (if stream configuration is invalid)
+ *                       -ENODEV (fatal error)
+ *==========================================================================*/
+
+int QCamera3HardwareInterface::configure_streams(
+        const struct camera3_device *device,
+        camera3_stream_configuration_t *stream_list)
+{
+    LOGD("E");
+    QCamera3HardwareInterface *hw =
+        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return -ENODEV;
+    }
+    int rc = hw->configureStreams(stream_list);
+    LOGD("X");
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : construct_default_request_settings
+ *
+ * DESCRIPTION: Configure a settings buffer to meet the required use case
+ *
+ * PARAMETERS :
+ *
+ *
+ * RETURN     : Success: Return valid metadata
+ *              Failure: Return NULL
+ *==========================================================================*/
+const camera_metadata_t* QCamera3HardwareInterface::
+    construct_default_request_settings(const struct camera3_device *device,
+                                        int type)
+{
+
+    LOGD("E");
+    camera_metadata_t* fwk_metadata = NULL;
+    QCamera3HardwareInterface *hw =
+        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return NULL;
+    }
+
+    fwk_metadata = hw->translateCapabilityToMetadata(type);
+
+    LOGD("X");
+    return fwk_metadata;
+}
+
+/*===========================================================================
+ * FUNCTION   : process_capture_request
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS :
+ *
+ *
+ * RETURN     :
+ *==========================================================================*/
+int QCamera3HardwareInterface::process_capture_request(
+                    const struct camera3_device *device,
+                    camera3_capture_request_t *request)
+{
+    LOGD("E");
+    QCamera3HardwareInterface *hw =
+        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return -EINVAL;
+    }
+
+    int rc = hw->processCaptureRequest(request);
+    LOGD("X");
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : dump
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS :
+ *
+ *
+ * RETURN     :
+ *==========================================================================*/
+
+void QCamera3HardwareInterface::dump(
+                const struct camera3_device *device, int fd)
+{
+    /* Log level property is read when "adb shell dumpsys media.camera" is
+       called so that the log level can be controlled without restarting
+       the media server */
+    getLogLevel();
+
+    LOGD("E");
+    QCamera3HardwareInterface *hw =
+        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return;
+    }
+
+    hw->dump(fd);
+    LOGD("X");
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : flush
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS :
+ *
+ *
+ * RETURN     :
+ *==========================================================================*/
+
+int QCamera3HardwareInterface::flush(
+                const struct camera3_device *device)
+{
+    int rc;
+    LOGD("E");
+    QCamera3HardwareInterface *hw =
+        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return -EINVAL;
+    }
+
+    pthread_mutex_lock(&hw->mMutex);
+    // Validate current state
+    switch (hw->mState) {
+        case STARTED:
+            /* valid state */
+            break;
+
+        case ERROR:
+            pthread_mutex_unlock(&hw->mMutex);
+            hw->handleCameraDeviceError();
+            return -ENODEV;
+
+        default:
+            LOGI("Flush returned during state %d", hw->mState);
+            pthread_mutex_unlock(&hw->mMutex);
+            return 0;
+    }
+    pthread_mutex_unlock(&hw->mMutex);
+
+    rc = hw->flush(true /* restart channels */ );
+    LOGD("X");
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : close_camera_device
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS :
+ *
+ *
+ * RETURN     :
+ *==========================================================================*/
+int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
+{
+    int ret = NO_ERROR;
+    QCamera3HardwareInterface *hw =
+        reinterpret_cast<QCamera3HardwareInterface *>(
+            reinterpret_cast<camera3_device_t *>(device)->priv);
+    if (!hw) {
+        LOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+
+    LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
+    delete hw;
+    LOGI("[KPI Perf]: X");
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : getWaveletDenoiseProcessPlate
+ *
+ * DESCRIPTION: query wavelet denoise process plate
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : WNR prcocess plate value
+ *==========================================================================*/
+cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
+{
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.denoise.process.plates", prop, "0");
+    int processPlate = atoi(prop);
+    switch(processPlate) {
+    case 0:
+        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
+    case 1:
+        return CAM_WAVELET_DENOISE_CBCR_ONLY;
+    case 2:
+        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
+    case 3:
+        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
+    default:
+        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
+    }
+}
+
+
+/*===========================================================================
+ * FUNCTION   : getTemporalDenoiseProcessPlate
+ *
+ * DESCRIPTION: query temporal denoise process plate
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : TNR prcocess plate value
+ *==========================================================================*/
+cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
+{
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.tnr.process.plates", prop, "0");
+    int processPlate = atoi(prop);
+    switch(processPlate) {
+    case 0:
+        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
+    case 1:
+        return CAM_WAVELET_DENOISE_CBCR_ONLY;
+    case 2:
+        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
+    case 3:
+        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
+    default:
+        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
+    }
+}
+
+
+/*===========================================================================
+ * FUNCTION   : extractSceneMode
+ *
+ * DESCRIPTION: Extract scene mode from frameworks set metadata
+ *
+ * PARAMETERS :
+ *      @frame_settings: CameraMetadata reference
+ *      @metaMode: ANDROID_CONTORL_MODE
+ *      @hal_metadata: hal metadata structure
+ *
+ * RETURN     : None
+ *==========================================================================*/
+int32_t QCamera3HardwareInterface::extractSceneMode(
+        const CameraMetadata &frame_settings, uint8_t metaMode,
+        metadata_buffer_t *hal_metadata)
+{
+    int32_t rc = NO_ERROR;
+
+    if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
+        camera_metadata_ro_entry entry =
+                frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
+        if (0 == entry.count)
+            return rc;
+
+        uint8_t fwk_sceneMode = entry.data.u8[0];
+
+        int val = lookupHalName(SCENE_MODES_MAP,
+                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
+                fwk_sceneMode);
+        if (NAME_NOT_FOUND != val) {
+            uint8_t sceneMode = (uint8_t)val;
+            LOGD("sceneMode: %d", sceneMode);
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
+                    CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
+                rc = BAD_VALUE;
+            }
+        }
+    } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
+            (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
+        uint8_t sceneMode = CAM_SCENE_MODE_OFF;
+        LOGD("sceneMode: %d", sceneMode);
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
+                CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
+            rc = BAD_VALUE;
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : needRotationReprocess
+ *
+ * DESCRIPTION: if rotation needs to be done by reprocess in pp
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCamera3HardwareInterface::needRotationReprocess()
+{
+    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
+        // current rotation is not zero, and pp has the capability to process rotation
+        LOGH("need do reprocess for rotation");
+        return true;
+    }
+
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : needReprocess
+ *
+ * DESCRIPTION: if reprocess in needed
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
+{
+    if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
+        // TODO: add for ZSL HDR later
+        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
+        if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
+            LOGH("need do reprocess for ZSL WNR or min PP reprocess");
+            return true;
+        } else {
+            LOGH("already post processed frame");
+            return false;
+        }
+    }
+    return needRotationReprocess();
+}
+
+/*===========================================================================
+ * FUNCTION   : needJpegExifRotation
+ *
+ * DESCRIPTION: if rotation from jpeg is needed
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCamera3HardwareInterface::needJpegExifRotation()
+{
+   /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
+    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
+       LOGD("Need use Jpeg EXIF Rotation");
+       return true;
+    }
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : addOfflineReprocChannel
+ *
+ * DESCRIPTION: add a reprocess channel that will do reprocess on frames
+ *              coming from input channel
+ *
+ * PARAMETERS :
+ *   @config  : reprocess configuration
+ *   @inputChHandle : pointer to the input (source) channel
+ *
+ *
+ * RETURN     : Ptr to the newly created channel obj. NULL if failed.
+ *==========================================================================*/
+QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
+        const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
+{
+    int32_t rc = NO_ERROR;
+    QCamera3ReprocessChannel *pChannel = NULL;
+
+    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
+            mChannelHandle, mCameraHandle->ops, captureResultCb, config.padding,
+            CAM_QCOM_FEATURE_NONE, this, inputChHandle);
+    if (NULL == pChannel) {
+        LOGE("no mem for reprocess channel");
+        return NULL;
+    }
+
+    rc = pChannel->initialize(IS_TYPE_NONE);
+    if (rc != NO_ERROR) {
+        LOGE("init reprocess channel failed, ret = %d", rc);
+        delete pChannel;
+        return NULL;
+    }
+
+    // pp feature config
+    cam_pp_feature_config_t pp_config;
+    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
+
+    pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
+    if (gCamCapability[mCameraId]->qcom_supported_feature_mask
+            & CAM_QCOM_FEATURE_DSDN) {
+        //Use CPP CDS incase h/w supports it.
+        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
+        pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
+    }
+    if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
+        pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
+    }
+
+    rc = pChannel->addReprocStreamsFromSource(pp_config,
+            config,
+            IS_TYPE_NONE,
+            mMetadataChannel);
+
+    if (rc != NO_ERROR) {
+        delete pChannel;
+        return NULL;
+    }
+    return pChannel;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMobicatMask
+ *
+ * DESCRIPTION: returns mobicat mask
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : mobicat mask
+ *
+ *==========================================================================*/
+uint8_t QCamera3HardwareInterface::getMobicatMask()
+{
+    return m_MobicatMask;
+}
+
+/*===========================================================================
+ * FUNCTION   : setMobicat
+ *
+ * DESCRIPTION: set Mobicat on/off.
+ *
+ * PARAMETERS :
+ *   @params  : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3HardwareInterface::setMobicat()
+{
+    char value [PROPERTY_VALUE_MAX];
+    property_get("persist.camera.mobicat", value, "0");
+    int32_t ret = NO_ERROR;
+    uint8_t enableMobi = (uint8_t)atoi(value);
+
+    if (enableMobi) {
+        tune_cmd_t tune_cmd;
+        tune_cmd.type = SET_RELOAD_CHROMATIX;
+        tune_cmd.module = MODULE_ALL;
+        tune_cmd.value = TRUE;
+        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
+                CAM_INTF_PARM_SET_VFE_COMMAND,
+                tune_cmd);
+
+        ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
+                CAM_INTF_PARM_SET_PP_COMMAND,
+                tune_cmd);
+    }
+    m_MobicatMask = enableMobi;
+
+    return ret;
+}
+
+/*===========================================================================
+* FUNCTION   : getLogLevel
+*
+* DESCRIPTION: Reads the log level property into a variable
+*
+* PARAMETERS :
+*   None
+*
+* RETURN     :
+*   None
+*==========================================================================*/
+void QCamera3HardwareInterface::getLogLevel()
+{
+    char prop[PROPERTY_VALUE_MAX];
+    uint32_t globalLogLevel = 0;
+
+    property_get("persist.camera.hal.debug", prop, "0");
+    int val = atoi(prop);
+    if (0 <= val) {
+        gCamHal3LogLevel = (uint32_t)val;
+    }
+
+    property_get("persist.camera.kpi.debug", prop, "1");
+    gKpiDebugLevel = atoi(prop);
+
+    property_get("persist.camera.global.debug", prop, "0");
+    val = atoi(prop);
+    if (0 <= val) {
+        globalLogLevel = (uint32_t)val;
+    }
+
+    /* Highest log level among hal.logs and global.logs is selected */
+    if (gCamHal3LogLevel < globalLogLevel)
+        gCamHal3LogLevel = globalLogLevel;
+
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : validateStreamRotations
+ *
+ * DESCRIPTION: Check if the rotations requested are supported
+ *
+ * PARAMETERS :
+ *   @stream_list : streams to be configured
+ *
+ * RETURN     : NO_ERROR on success
+ *              -EINVAL on failure
+ *
+ *==========================================================================*/
+int QCamera3HardwareInterface::validateStreamRotations(
+        camera3_stream_configuration_t *streamList)
+{
+    int rc = NO_ERROR;
+
+    /*
+    * Loop through all streams requested in configuration
+    * Check if unsupported rotations have been requested on any of them
+    */
+    for (size_t j = 0; j < streamList->num_streams; j++){
+        camera3_stream_t *newStream = streamList->streams[j];
+
+        bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
+        bool isImplDef = (newStream->format ==
+                HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+        bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
+                isImplDef);
+
+        if (isRotated && (!isImplDef || isZsl)) {
+            LOGE("Error: Unsupported rotation of %d requested for stream"
+                    "type:%d and stream format:%d",
+                    newStream->rotation, newStream->stream_type,
+                    newStream->format);
+            rc = -EINVAL;
+            break;
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+* FUNCTION   : getFlashInfo
+*
+* DESCRIPTION: Retrieve information about whether the device has a flash.
+*
+* PARAMETERS :
+*   @cameraId  : Camera id to query
+*   @hasFlash  : Boolean indicating whether there is a flash device
+*                associated with given camera
+*   @flashNode : If a flash device exists, this will be its device node.
+*
+* RETURN     :
+*   None
+*==========================================================================*/
+void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
+        bool& hasFlash,
+        char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
+{
+    cam_capability_t* camCapability = gCamCapability[cameraId];
+    if (NULL == camCapability) {
+        hasFlash = false;
+        flashNode[0] = '\0';
+    } else {
+        hasFlash = camCapability->flash_available;
+        strlcpy(flashNode,
+                (char*)camCapability->flash_dev_name,
+                QCAMERA_MAX_FILEPATH_LENGTH);
+    }
+}
+
+/*===========================================================================
+* FUNCTION   : getEepromVersionInfo
+*
+* DESCRIPTION: Retrieve version info of the sensor EEPROM data
+*
+* PARAMETERS : None
+*
+* RETURN     : string describing EEPROM version
+*              "\0" if no such info available
+*==========================================================================*/
+const char *QCamera3HardwareInterface::getEepromVersionInfo()
+{
+    return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
+}
+
+/*===========================================================================
+* FUNCTION   : getLdafCalib
+*
+* DESCRIPTION: Retrieve Laser AF calibration data
+*
+* PARAMETERS : None
+*
+* RETURN     : Two uint32_t describing laser AF calibration data
+*              NULL if none is available.
+*==========================================================================*/
+const uint32_t *QCamera3HardwareInterface::getLdafCalib()
+{
+    if (mLdafCalibExist) {
+        return &mLdafCalib[0];
+    } else {
+        return NULL;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : dynamicUpdateMetaStreamInfo
+ *
+ * DESCRIPTION: This function:
+ *             (1) stops all the channels
+ *             (2) returns error on pending requests and buffers
+ *             (3) sends metastream_info in setparams
+ *             (4) starts all channels
+ *             This is useful when sensor has to be restarted to apply any
+ *             settings such as frame rate from a different sensor mode
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : NO_ERROR on success
+ *              Error codes on failure
+ *
+ *==========================================================================*/
+int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
+{
+    ATRACE_CALL();
+    int rc = NO_ERROR;
+
+    LOGD("E");
+
+    rc = stopAllChannels();
+    if (rc < 0) {
+        LOGE("stopAllChannels failed");
+        return rc;
+    }
+
+    rc = notifyErrorForPendingRequests();
+    if (rc < 0) {
+        LOGE("notifyErrorForPendingRequests failed");
+        return rc;
+    }
+
+    for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
+        LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
+                "Format:%d",
+                mStreamConfigInfo.type[i],
+                mStreamConfigInfo.stream_sizes[i].width,
+                mStreamConfigInfo.stream_sizes[i].height,
+                mStreamConfigInfo.postprocess_mask[i],
+                mStreamConfigInfo.format[i]);
+    }
+
+    /* Send meta stream info once again so that ISP can start */
+    ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
+            CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
+    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
+            mParameters);
+    if (rc < 0) {
+        LOGE("set Metastreaminfo failed. Sensor mode does not change");
+    }
+
+    rc = startAllChannels();
+    if (rc < 0) {
+        LOGE("startAllChannels failed");
+        return rc;
+    }
+
+    LOGD("X");
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stopAllChannels
+ *
+ * DESCRIPTION: This function stops (equivalent to stream-off) all channels
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : NO_ERROR on success
+ *              Error codes on failure
+ *
+ *==========================================================================*/
+int32_t QCamera3HardwareInterface::stopAllChannels()
+{
+    int32_t rc = NO_ERROR;
+
+    LOGD("Stopping all channels");
+    // Stop the Streams/Channels
+    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
+        it != mStreamInfo.end(); it++) {
+        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
+        if (channel) {
+            channel->stop();
+        }
+        (*it)->status = INVALID;
+    }
+
+    if (mSupportChannel) {
+        mSupportChannel->stop();
+    }
+    if (mAnalysisChannel) {
+        mAnalysisChannel->stop();
+    }
+    if (mRawDumpChannel) {
+        mRawDumpChannel->stop();
+    }
+    if (mMetadataChannel) {
+        /* If content of mStreamInfo is not 0, there is metadata stream */
+        mMetadataChannel->stop();
+    }
+
+    LOGD("All channels stopped");
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : startAllChannels
+ *
+ * DESCRIPTION: This function starts (equivalent to stream-on) all channels
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : NO_ERROR on success
+ *              Error codes on failure
+ *
+ *==========================================================================*/
+int32_t QCamera3HardwareInterface::startAllChannels()
+{
+    int32_t rc = NO_ERROR;
+
+    LOGD("Start all channels ");
+    // Start the Streams/Channels
+    if (mMetadataChannel) {
+        /* If content of mStreamInfo is not 0, there is metadata stream */
+        rc = mMetadataChannel->start();
+        if (rc < 0) {
+            LOGE("META channel start failed");
+            return rc;
+        }
+    }
+    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
+        it != mStreamInfo.end(); it++) {
+        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
+        if (channel) {
+            rc = channel->start();
+            if (rc < 0) {
+                LOGE("channel start failed");
+                return rc;
+            }
+        }
+    }
+    if (mAnalysisChannel) {
+        mAnalysisChannel->start();
+    }
+    if (mSupportChannel) {
+        rc = mSupportChannel->start();
+        if (rc < 0) {
+            LOGE("Support channel start failed");
+            return rc;
+        }
+    }
+    if (mRawDumpChannel) {
+        rc = mRawDumpChannel->start();
+        if (rc < 0) {
+            LOGE("RAW dump channel start failed");
+            return rc;
+        }
+    }
+
+    LOGD("All channels started");
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : notifyErrorForPendingRequests
+ *
+ * DESCRIPTION: This function sends error for all the pending requests/buffers
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : Error codes
+ *              NO_ERROR on success
+ *
+ *==========================================================================*/
+int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
+{
+    int32_t rc = NO_ERROR;
+    unsigned int frameNum = 0;
+    camera3_capture_result_t result;
+    camera3_stream_buffer_t *pStream_Buf = NULL;
+
+    memset(&result, 0, sizeof(camera3_capture_result_t));
+
+    if (mPendingRequestsList.size() > 0) {
+        pendingRequestIterator i = mPendingRequestsList.begin();
+        frameNum = i->frame_number;
+    } else {
+        /* There might still be pending buffers even though there are
+         no pending requests. Setting the frameNum to MAX so that
+         all the buffers with smaller frame numbers are returned */
+        frameNum = UINT_MAX;
+    }
+
+    LOGH("Oldest frame num on mPendingRequestsList = %u",
+       frameNum);
+
+    for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
+            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
+
+        if (req->frame_number < frameNum) {
+            // Send Error notify to frameworks for each buffer for which
+            // metadata buffer is already sent
+            LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
+                req->frame_number, req->mPendingBufferList.size());
+
+            pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
+            if (NULL == pStream_Buf) {
+                LOGE("No memory for pending buffers array");
+                return NO_MEMORY;
+            }
+            memset(pStream_Buf, 0,
+                sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
+            result.result = NULL;
+            result.frame_number = req->frame_number;
+            result.num_output_buffers = req->mPendingBufferList.size();
+            result.output_buffers = pStream_Buf;
+
+            size_t index = 0;
+            for (auto info = req->mPendingBufferList.begin();
+                info != req->mPendingBufferList.end(); ) {
+
+                camera3_notify_msg_t notify_msg;
+                memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
+                notify_msg.type = CAMERA3_MSG_ERROR;
+                notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
+                notify_msg.message.error.error_stream = info->stream;
+                notify_msg.message.error.frame_number = req->frame_number;
+                pStream_Buf[index].acquire_fence = -1;
+                pStream_Buf[index].release_fence = -1;
+                pStream_Buf[index].buffer = info->buffer;
+                pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
+                pStream_Buf[index].stream = info->stream;
+                mCallbackOps->notify(mCallbackOps, &notify_msg);
+                index++;
+                // Remove buffer from list
+                info = req->mPendingBufferList.erase(info);
+            }
+
+            // Remove this request from Map
+            LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
+                req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
+            req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
+
+            mCallbackOps->process_capture_result(mCallbackOps, &result);
+
+            delete [] pStream_Buf;
+        } else {
+
+            // Go through the pending requests info and send error request to framework
+            pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
+
+            LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
+
+            // Send error notify to frameworks
+            camera3_notify_msg_t notify_msg;
+            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
+            notify_msg.type = CAMERA3_MSG_ERROR;
+            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
+            notify_msg.message.error.error_stream = NULL;
+            notify_msg.message.error.frame_number = req->frame_number;
+            mCallbackOps->notify(mCallbackOps, &notify_msg);
+
+            pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
+            if (NULL == pStream_Buf) {
+                LOGE("No memory for pending buffers array");
+                return NO_MEMORY;
+            }
+            memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
+
+            result.result = NULL;
+            result.frame_number = req->frame_number;
+            result.input_buffer = i->input_buffer;
+            result.num_output_buffers = req->mPendingBufferList.size();
+            result.output_buffers = pStream_Buf;
+
+            size_t index = 0;
+            for (auto info = req->mPendingBufferList.begin();
+                info != req->mPendingBufferList.end(); ) {
+                pStream_Buf[index].acquire_fence = -1;
+                pStream_Buf[index].release_fence = -1;
+                pStream_Buf[index].buffer = info->buffer;
+                pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
+                pStream_Buf[index].stream = info->stream;
+                index++;
+                // Remove buffer from list
+                info = req->mPendingBufferList.erase(info);
+            }
+
+            // Remove this request from Map
+            LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
+                req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
+            req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
+
+            mCallbackOps->process_capture_result(mCallbackOps, &result);
+            delete [] pStream_Buf;
+            i = erasePendingRequest(i);
+        }
+    }
+
+    /* Reset pending frame Drop list and requests list */
+    mPendingFrameDropList.clear();
+
+    for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
+        req.mPendingBufferList.clear();
+    }
+    mPendingBuffersMap.mPendingBuffersInRequest.clear();
+    mPendingReprocessResultList.clear();
+    LOGH("Cleared all the pending buffers ");
+
+    return rc;
+}
+
+bool QCamera3HardwareInterface::isOnEncoder(
+        const cam_dimension_t max_viewfinder_size,
+        uint32_t width, uint32_t height)
+{
+    return (width > (uint32_t)max_viewfinder_size.width ||
+            height > (uint32_t)max_viewfinder_size.height);
+}
+
+/*===========================================================================
+ * FUNCTION   : setBundleInfo
+ *
+ * DESCRIPTION: Set bundle info for all streams that are bundle.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : NO_ERROR on success
+ *              Error codes on failure
+ *==========================================================================*/
+int32_t QCamera3HardwareInterface::setBundleInfo()
+{
+    int32_t rc = NO_ERROR;
+
+    if (mChannelHandle) {
+        cam_bundle_config_t bundleInfo;
+        memset(&bundleInfo, 0, sizeof(bundleInfo));
+        rc = mCameraHandle->ops->get_bundle_info(
+                mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
+        if (rc != NO_ERROR) {
+            LOGE("get_bundle_info failed");
+            return rc;
+        }
+        if (mAnalysisChannel) {
+            mAnalysisChannel->setBundleInfo(bundleInfo);
+        }
+        if (mSupportChannel) {
+            mSupportChannel->setBundleInfo(bundleInfo);
+        }
+        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
+                it != mStreamInfo.end(); it++) {
+            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
+            channel->setBundleInfo(bundleInfo);
+        }
+        if (mRawDumpChannel) {
+            mRawDumpChannel->setBundleInfo(bundleInfo);
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : get_num_overall_buffers
+ *
+ * DESCRIPTION: Estimate number of pending buffers across all requests.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : Number of overall pending buffers
+ *
+ *==========================================================================*/
+uint32_t PendingBuffersMap::get_num_overall_buffers()
+{
+    uint32_t sum_buffers = 0;
+    for (auto &req : mPendingBuffersInRequest) {
+        sum_buffers += req.mPendingBufferList.size();
+    }
+    return sum_buffers;
+}
+
+/*===========================================================================
+ * FUNCTION   : removeBuf
+ *
+ * DESCRIPTION: Remove a matching buffer from tracker.
+ *
+ * PARAMETERS : @buffer: image buffer for the callback
+ *
+ * RETURN     : None
+ *
+ *==========================================================================*/
+void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
+{
+    bool buffer_found = false;
+    for (auto req = mPendingBuffersInRequest.begin();
+            req != mPendingBuffersInRequest.end(); req++) {
+        for (auto k = req->mPendingBufferList.begin();
+                k != req->mPendingBufferList.end(); k++ ) {
+            if (k->buffer == buffer) {
+                LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
+                        req->frame_number, buffer);
+                k = req->mPendingBufferList.erase(k);
+                if (req->mPendingBufferList.empty()) {
+                    // Remove this request from Map
+                    req = mPendingBuffersInRequest.erase(req);
+                }
+                buffer_found = true;
+                break;
+            }
+        }
+        if (buffer_found) {
+            break;
+        }
+    }
+    LOGD("mPendingBuffersMap.num_overall_buffers = %d",
+            get_num_overall_buffers());
+}
+
+/*===========================================================================
+ * FUNCTION   : setPAAFSupport
+ *
+ * DESCRIPTION: Set the preview-assisted auto focus support bit in
+ *              feature mask according to stream type and filter
+ *              arrangement
+ *
+ * PARAMETERS : @feature_mask: current feature mask, which may be modified
+ *              @stream_type: stream type
+ *              @filter_arrangement: filter arrangement
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera3HardwareInterface::setPAAFSupport(
+        cam_feature_mask_t& feature_mask,
+        cam_stream_type_t stream_type,
+        cam_color_filter_arrangement_t filter_arrangement)
+{
+    LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
+            feature_mask, stream_type, filter_arrangement);
+
+    switch (filter_arrangement) {
+    case CAM_FILTER_ARRANGEMENT_RGGB:
+    case CAM_FILTER_ARRANGEMENT_GRBG:
+    case CAM_FILTER_ARRANGEMENT_GBRG:
+    case CAM_FILTER_ARRANGEMENT_BGGR:
+        if ((stream_type == CAM_STREAM_TYPE_CALLBACK) ||
+                (stream_type == CAM_STREAM_TYPE_PREVIEW) ||
+                (stream_type == CAM_STREAM_TYPE_VIDEO)) {
+            feature_mask |= CAM_QCOM_FEATURE_PAAF;
+        }
+        break;
+    case CAM_FILTER_ARRANGEMENT_Y:
+        if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
+            feature_mask |= CAM_QCOM_FEATURE_PAAF;
+        }
+        break;
+    default:
+        break;
+    }
+}
+
+/*===========================================================================
+* FUNCTION   : getSensorMountAngle
+*
+* DESCRIPTION: Retrieve sensor mount angle
+*
+* PARAMETERS : None
+*
+* RETURN     : sensor mount angle in uint32_t
+*==========================================================================*/
+uint32_t QCamera3HardwareInterface::getSensorMountAngle()
+{
+    return gCamCapability[mCameraId]->sensor_mount_angle;
+}
+
+/*===========================================================================
+* FUNCTION   : getRelatedCalibrationData
+*
+* DESCRIPTION: Retrieve related system calibration data
+*
+* PARAMETERS : None
+*
+* RETURN     : Pointer of related system calibration data
+*==========================================================================*/
+const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
+{
+    return (const cam_related_system_calibration_data_t *)
+            &(gCamCapability[mCameraId]->related_cam_calibration);
+}
+}; //end namespace qcamera
diff --git a/msmcobalt/QCamera2/HAL3/QCamera3HWI.h b/msmcobalt/QCamera2/HAL3/QCamera3HWI.h
new file mode 100644
index 0000000..f200f30
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL3/QCamera3HWI.h
@@ -0,0 +1,535 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#ifndef __QCAMERA3HARDWAREINTERFACE_H__
+#define __QCAMERA3HARDWAREINTERFACE_H__
+
+// System dependencies
+#include <camera/CameraMetadata.h>
+#include <pthread.h>
+#include <utils/KeyedVector.h>
+#include <utils/List.h>
+
+// Camera dependencies
+#include "hardware/camera3.h"
+#include "QCamera3Channel.h"
+#include "QCamera3CropRegionMapper.h"
+#include "QCamera3HALHeader.h"
+#include "QCamera3Mem.h"
+#include "QCameraPerf.h"
+#include "QCameraCommon.h"
+
+extern "C" {
+#include "mm_camera_interface.h"
+#include "mm_jpeg_interface.h"
+}
+
+using namespace android;
+
+namespace qcamera {
+
+#ifndef TRUE
+#define TRUE 1
+#endif
+
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+/* Time related macros */
+typedef int64_t nsecs_t;
+#define NSEC_PER_SEC 1000000000LLU
+#define NSEC_PER_USEC 1000LLU
+#define NSEC_PER_33MSEC 33000000LLU
+
+typedef enum {
+    SET_ENABLE,
+    SET_CONTROLENABLE,
+    SET_RELOAD_CHROMATIX,
+    SET_STATUS,
+} optype_t;
+
+#define MODULE_ALL 0
+
+extern volatile uint32_t gCamHal3LogLevel;
+
+class QCamera3MetadataChannel;
+class QCamera3PicChannel;
+class QCamera3HeapMemory;
+class QCamera3Exif;
+
+typedef struct {
+    camera3_stream_t *stream;
+    camera3_stream_buffer_set_t buffer_set;
+    stream_status_t status;
+    int registered;
+    QCamera3ProcessingChannel *channel;
+} stream_info_t;
+
+typedef struct {
+    // Stream handle
+    camera3_stream_t *stream;
+    // Buffer handle
+    buffer_handle_t *buffer;
+} PendingBufferInfo;
+
+typedef struct {
+    // Frame number corresponding to request
+    uint32_t frame_number;
+    // Time when request queued into system
+    nsecs_t timestamp;
+    List<PendingBufferInfo> mPendingBufferList;
+} PendingBuffersInRequest;
+
+class PendingBuffersMap {
+public:
+    // Number of outstanding buffers at flush
+    uint32_t numPendingBufsAtFlush;
+    // List of pending buffers per request
+    List<PendingBuffersInRequest> mPendingBuffersInRequest;
+    uint32_t get_num_overall_buffers();
+    void removeBuf(buffer_handle_t *buffer);
+};
+
+
+class QCamera3HardwareInterface {
+public:
+    /* static variable and functions accessed by camera service */
+    static camera3_device_ops_t mCameraOps;
+    //Id of each session in bundle/link
+    static uint32_t sessionId[MM_CAMERA_MAX_NUM_SENSORS];
+    static int initialize(const struct camera3_device *,
+                const camera3_callback_ops_t *callback_ops);
+    static int configure_streams(const struct camera3_device *,
+                camera3_stream_configuration_t *stream_list);
+    static const camera_metadata_t* construct_default_request_settings(
+                                const struct camera3_device *, int type);
+    static int process_capture_request(const struct camera3_device *,
+                                camera3_capture_request_t *request);
+
+    static void dump(const struct camera3_device *, int fd);
+    static int flush(const struct camera3_device *);
+    static int close_camera_device(struct hw_device_t* device);
+
+public:
+    QCamera3HardwareInterface(uint32_t cameraId,
+            const camera_module_callbacks_t *callbacks);
+    virtual ~QCamera3HardwareInterface();
+    static void camEvtHandle(uint32_t camera_handle, mm_camera_event_t *evt,
+                                          void *user_data);
+    int openCamera(struct hw_device_t **hw_device);
+    camera_metadata_t* translateCapabilityToMetadata(int type);
+
+    static int getCamInfo(uint32_t cameraId, struct camera_info *info);
+    static int initCapabilities(uint32_t cameraId);
+    static int initStaticMetadata(uint32_t cameraId);
+    static void makeTable(cam_dimension_t *dimTable, size_t size,
+            size_t max_size, int32_t *sizeTable);
+    static void makeFPSTable(cam_fps_range_t *fpsTable, size_t size,
+            size_t max_size, int32_t *fpsRangesTable);
+    static void makeOverridesList(cam_scene_mode_overrides_t *overridesTable,
+            size_t size, size_t max_size, uint8_t *overridesList,
+            uint8_t *supported_indexes, uint32_t camera_id);
+    static size_t filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
+            size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
+            uint8_t downscale_factor);
+    static void convertToRegions(cam_rect_t rect, int32_t* region, int weight);
+    static void convertFromRegions(cam_area_t &roi, const camera_metadata_t *settings,
+                                   uint32_t tag);
+    static bool resetIfNeededROI(cam_area_t* roi, const cam_crop_region_t* scalerCropRegion);
+    static void convertLandmarks(cam_face_landmarks_info_t face, int32_t* landmarks);
+    static int32_t getSensorSensitivity(int32_t iso_mode);
+
+    double computeNoiseModelEntryS(int32_t sensitivity);
+    double computeNoiseModelEntryO(int32_t sensitivity);
+
+    static void captureResultCb(mm_camera_super_buf_t *metadata,
+                camera3_stream_buffer_t *buffer, uint32_t frame_number,
+                bool isInputBuffer, void *userdata);
+
+    int initialize(const camera3_callback_ops_t *callback_ops);
+    int configureStreams(camera3_stream_configuration_t *stream_list);
+    int configureStreamsPerfLocked(camera3_stream_configuration_t *stream_list);
+    int processCaptureRequest(camera3_capture_request_t *request);
+    void dump(int fd);
+    int flushPerf();
+
+    int setFrameParameters(camera3_capture_request_t *request,
+            cam_stream_ID_t streamID, int blob_request, uint32_t snapshotStreamId);
+    int32_t setReprocParameters(camera3_capture_request_t *request,
+            metadata_buffer_t *reprocParam, uint32_t snapshotStreamId);
+    int translateToHalMetadata(const camera3_capture_request_t *request,
+            metadata_buffer_t *parm, uint32_t snapshotStreamId);
+    camera_metadata_t* translateCbUrgentMetadataToResultMetadata (
+                             metadata_buffer_t *metadata);
+    camera_metadata_t* translateFromHalMetadata(metadata_buffer_t *metadata,
+                            nsecs_t timestamp, int32_t request_id,
+                            const CameraMetadata& jpegMetadata, uint8_t pipeline_depth,
+                            uint8_t capture_intent, bool pprocDone, uint8_t fwk_cacMode);
+    camera_metadata_t* saveRequestSettings(const CameraMetadata& jpegMetadata,
+                            camera3_capture_request_t *request);
+    int initParameters();
+    void deinitParameters();
+    QCamera3ReprocessChannel *addOfflineReprocChannel(const reprocess_config_t &config,
+            QCamera3ProcessingChannel *inputChHandle);
+    bool needRotationReprocess();
+    bool needJpegExifRotation();
+    bool needReprocess(cam_feature_mask_t postprocess_mask);
+    bool needJpegRotation();
+    cam_denoise_process_type_t getWaveletDenoiseProcessPlate();
+    cam_denoise_process_type_t getTemporalDenoiseProcessPlate();
+
+    void captureResultCb(mm_camera_super_buf_t *metadata,
+                camera3_stream_buffer_t *buffer, uint32_t frame_number,
+                bool isInputBuffer);
+    cam_dimension_t calcMaxJpegDim();
+    bool needOnlineRotation();
+    uint32_t getJpegQuality();
+    QCamera3Exif *getExifData();
+    mm_jpeg_exif_params_t get3AExifParams();
+    uint8_t getMobicatMask();
+    static void getFlashInfo(const int cameraId,
+            bool& hasFlash,
+            char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH]);
+    const char *getEepromVersionInfo();
+    const uint32_t *getLdafCalib();
+    void get3AVersion(cam_q3a_version_t &swVersion);
+
+    // Get dual camera related info
+    bool isDeviceLinked() {return mIsDeviceLinked;}
+    bool isMainCamera() {return mIsMainCamera;}
+    uint32_t getSensorMountAngle();
+    const cam_related_system_calibration_data_t *getRelatedCalibrationData();
+
+    template <typename fwkType, typename halType> struct QCameraMap {
+        fwkType fwk_name;
+        halType hal_name;
+    };
+
+    typedef struct {
+        const char *const desc;
+        cam_cds_mode_type_t val;
+    } QCameraPropMap;
+
+
+private:
+
+    // State transition conditions:
+    // "\" means not applicable
+    // "x" means not valid
+    // +------------+----------+----------+-------------+------------+---------+-------+--------+
+    // |            |  CLOSED  |  OPENED  | INITIALIZED | CONFIGURED | STARTED | ERROR | DEINIT |
+    // +------------+----------+----------+-------------+------------+---------+-------+--------+
+    // |  CLOSED    |    \     |   open   |     x       |    x       |    x    |   x   |   x    |
+    // +------------+----------+----------+-------------+------------+---------+-------+--------+
+    // |  OPENED    |  close   |    \     | initialize  |    x       |    x    | error |   x    |
+    // +------------+----------+----------+-------------+------------+---------+-------+--------+
+    // |INITIALIZED |  close   |    x     |     \       | configure  |   x     | error |   x    |
+    // +------------+----------+----------+-------------+------------+---------+-------+--------+
+    // | CONFIGURED |  close   |    x     |     x       | configure  | request | error |   x    |
+    // +------------+----------+----------+-------------+------------+---------+-------+--------+
+    // |  STARTED   |  close   |    x     |     x       | configure  |    \    | error |   x    |
+    // +------------+----------+----------+-------------+------------+---------+-------+--------+
+    // |   ERROR    |  close   |    x     |     x       |     x      |    x    |   \   |  any   |
+    // +------------+----------+----------+-------------+------------+---------+-------+--------+
+    // |   DEINIT   |  close   |    x     |     x       |     x      |    x    |   x   |   \    |
+    // +------------+----------+----------+-------------+------------+---------+-------+--------+
+
+    typedef enum {
+        CLOSED,
+        OPENED,
+        INITIALIZED,
+        CONFIGURED,
+        STARTED,
+        ERROR,
+        DEINIT
+    } State;
+
+    int openCamera();
+    int closeCamera();
+    int flush(bool restartChannels);
+    static size_t calcMaxJpegSize(uint32_t camera_id);
+    cam_dimension_t getMaxRawSize(uint32_t camera_id);
+    static void addStreamConfig(Vector<int32_t> &available_stream_configs,
+            int32_t scalar_format, const cam_dimension_t &dim,
+            int32_t config_type);
+
+    int validateCaptureRequest(camera3_capture_request_t *request);
+    int validateStreamDimensions(camera3_stream_configuration_t *streamList);
+    int validateStreamRotations(camera3_stream_configuration_t *streamList);
+    void deriveMinFrameDuration();
+    void handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer);
+    int32_t handlePendingReprocResults(uint32_t frame_number);
+    int64_t getMinFrameDuration(const camera3_capture_request_t *request);
+    void handleMetadataWithLock(mm_camera_super_buf_t *metadata_buf,
+            bool free_and_bufdone_meta_buf);
+    void handleBatchMetadata(mm_camera_super_buf_t *metadata_buf,
+            bool free_and_bufdone_meta_buf);
+    void handleBufferWithLock(camera3_stream_buffer_t *buffer,
+            uint32_t frame_number);
+    void handleInputBufferWithLock(uint32_t frame_number);
+    void unblockRequestIfNecessary();
+    void dumpMetadataToFile(tuning_params_t &meta, uint32_t &dumpFrameCount,
+            bool enabled, const char *type, uint32_t frameNumber);
+    static void getLogLevel();
+
+    void cleanAndSortStreamInfo();
+    void extractJpegMetadata(CameraMetadata& jpegMetadata,
+            const camera3_capture_request_t *request);
+
+    bool isSupportChannelNeeded(camera3_stream_configuration_t *streamList,
+            cam_stream_size_info_t stream_config_info);
+    int32_t setMobicat();
+
+    int32_t getSensorOutputSize(cam_dimension_t &sensor_dim);
+    int32_t setHalFpsRange(const CameraMetadata &settings,
+            metadata_buffer_t *hal_metadata);
+    int32_t extractSceneMode(const CameraMetadata &frame_settings, uint8_t metaMode,
+            metadata_buffer_t *hal_metadata);
+    int32_t numOfSizesOnEncoder(const camera3_stream_configuration_t *streamList,
+            const cam_dimension_t &maxViewfinderSize);
+
+    void addToPPFeatureMask(int stream_format, uint32_t stream_idx);
+    void updateFpsInPreviewBuffer(metadata_buffer_t *metadata, uint32_t frame_number);
+
+    void enablePowerHint();
+    void disablePowerHint();
+    int32_t dynamicUpdateMetaStreamInfo();
+    int32_t startAllChannels();
+    int32_t stopAllChannels();
+    int32_t notifyErrorForPendingRequests();
+    int32_t getReprocessibleOutputStreamId(uint32_t &id);
+    int32_t handleCameraDeviceError();
+
+    bool isOnEncoder(const cam_dimension_t max_viewfinder_size,
+            uint32_t width, uint32_t height);
+    void hdrPlusPerfLock(mm_camera_super_buf_t *metadata_buf);
+
+    static bool supportBurstCapture(uint32_t cameraId);
+    int32_t setBundleInfo();
+
+    static void setPAAFSupport(cam_feature_mask_t& feature_mask,
+            cam_stream_type_t stream_type,
+            cam_color_filter_arrangement_t filter_arrangement);
+
+    camera3_device_t   mCameraDevice;
+    uint32_t           mCameraId;
+    mm_camera_vtbl_t  *mCameraHandle;
+    bool               mCameraInitialized;
+    camera_metadata_t *mDefaultMetadata[CAMERA3_TEMPLATE_COUNT];
+    const camera3_callback_ops_t *mCallbackOps;
+
+    QCamera3MetadataChannel *mMetadataChannel;
+    QCamera3PicChannel *mPictureChannel;
+    QCamera3RawChannel *mRawChannel;
+    QCamera3SupportChannel *mSupportChannel;
+    QCamera3SupportChannel *mAnalysisChannel;
+    QCamera3RawDumpChannel *mRawDumpChannel;
+    QCamera3RegularChannel *mDummyBatchChannel;
+    QCameraPerfLock m_perfLock;
+    QCameraCommon   mCommon;
+
+    uint32_t mChannelHandle;
+
+    void saveExifParams(metadata_buffer_t *metadata);
+    mm_jpeg_exif_params_t mExifParams;
+
+     //First request yet to be processed after configureStreams
+    bool mFirstConfiguration;
+    bool mFlush;
+    bool mFlushPerf;
+    bool mEnableRawDump;
+    QCamera3HeapMemory *mParamHeap;
+    metadata_buffer_t* mParameters;
+    metadata_buffer_t* mPrevParameters;
+    CameraMetadata mCurJpegMeta;
+    bool m_bIsVideo;
+    bool m_bIs4KVideo;
+    bool m_bEisSupportedSize;
+    bool m_bEisEnable;
+    typedef struct {
+        cam_dimension_t dim;
+        int format;
+        uint32_t usage;
+    } InputStreamInfo;
+
+    InputStreamInfo mInputStreamInfo;
+    uint8_t m_MobicatMask;
+    uint8_t m_bTnrEnabled;
+    int8_t  mSupportedFaceDetectMode;
+    uint8_t m_bTnrPreview;
+    uint8_t m_bTnrVideo;
+    uint8_t m_debug_avtimer;
+
+    /* Data structure to store pending request */
+    typedef struct {
+        camera3_stream_t *stream;
+        camera3_stream_buffer_t *buffer;
+        // metadata needs to be consumed by the corresponding stream
+        // in order to generate the buffer.
+        bool need_metadata;
+    } RequestedBufferInfo;
+    typedef struct {
+        uint32_t frame_number;
+        uint32_t num_buffers;
+        int32_t request_id;
+        List<RequestedBufferInfo> buffers;
+        int blob_request;
+        uint8_t bUrgentReceived;
+        nsecs_t timestamp;
+        camera3_stream_buffer_t *input_buffer;
+        const camera_metadata_t *settings;
+        CameraMetadata jpegMetadata;
+        uint8_t pipeline_depth;
+        uint32_t partial_result_cnt;
+        uint8_t capture_intent;
+        uint8_t fwkCacMode;
+        bool shutter_notified;
+    } PendingRequestInfo;
+    typedef struct {
+        uint32_t frame_number;
+        uint32_t stream_ID;
+    } PendingFrameDropInfo;
+
+    typedef struct {
+        camera3_notify_msg_t notify_msg;
+        camera3_stream_buffer_t buffer;
+        uint32_t frame_number;
+    } PendingReprocessResult;
+
+    typedef KeyedVector<uint32_t, Vector<PendingBufferInfo> > FlushMap;
+    typedef List<QCamera3HardwareInterface::PendingRequestInfo>::iterator
+            pendingRequestIterator;
+    typedef List<QCamera3HardwareInterface::RequestedBufferInfo>::iterator
+            pendingBufferIterator;
+
+    List<PendingReprocessResult> mPendingReprocessResultList;
+    List<PendingRequestInfo> mPendingRequestsList;
+    List<PendingFrameDropInfo> mPendingFrameDropList;
+    /* Use last frame number of the batch as key and first frame number of the
+     * batch as value for that key */
+    KeyedVector<uint32_t, uint32_t> mPendingBatchMap;
+
+    PendingBuffersMap mPendingBuffersMap;
+    pthread_cond_t mRequestCond;
+    uint32_t mPendingLiveRequest;
+    bool mWokenUpByDaemon;
+    int32_t mCurrentRequestId;
+    cam_stream_size_info_t mStreamConfigInfo;
+
+    //mutex for serialized access to camera3_device_ops_t functions
+    pthread_mutex_t mMutex;
+
+    //condition used to signal flush after buffers have returned
+    pthread_cond_t mBuffersCond;
+
+    List<stream_info_t*> mStreamInfo;
+
+    int64_t mMinProcessedFrameDuration;
+    int64_t mMinJpegFrameDuration;
+    int64_t mMinRawFrameDuration;
+
+    uint32_t mMetaFrameCount;
+    bool    mUpdateDebugLevel;
+    const camera_module_callbacks_t *mCallbacks;
+
+    uint8_t mCaptureIntent;
+    uint8_t mCacMode;
+    metadata_buffer_t mReprocMeta; //scratch meta buffer
+    /* 0: Not batch, non-zero: Number of image buffers in a batch */
+    uint8_t mBatchSize;
+    // Used only in batch mode
+    uint8_t mToBeQueuedVidBufs;
+    // Fixed video fps
+    float mHFRVideoFps;
+    uint8_t mOpMode;
+    uint32_t mFirstFrameNumberInBatch;
+    camera3_stream_t mDummyBatchStream;
+    bool mNeedSensorRestart;
+
+    /* sensor output size with current stream configuration */
+    QCamera3CropRegionMapper mCropRegionMapper;
+
+    /* Ldaf calibration data */
+    bool mLdafCalibExist;
+    uint32_t mLdafCalib[2];
+    bool mPowerHintEnabled;
+    int32_t mLastCustIntentFrmNum;
+
+    static const QCameraMap<camera_metadata_enum_android_control_effect_mode_t,
+            cam_effect_mode_type> EFFECT_MODES_MAP[];
+    static const QCameraMap<camera_metadata_enum_android_control_awb_mode_t,
+            cam_wb_mode_type> WHITE_BALANCE_MODES_MAP[];
+    static const QCameraMap<camera_metadata_enum_android_control_scene_mode_t,
+            cam_scene_mode_type> SCENE_MODES_MAP[];
+    static const QCameraMap<camera_metadata_enum_android_control_af_mode_t,
+            cam_focus_mode_type> FOCUS_MODES_MAP[];
+    static const QCameraMap<camera_metadata_enum_android_color_correction_aberration_mode_t,
+            cam_aberration_mode_t> COLOR_ABERRATION_MAP[];
+    static const QCameraMap<camera_metadata_enum_android_control_ae_antibanding_mode_t,
+            cam_antibanding_mode_type> ANTIBANDING_MODES_MAP[];
+    static const QCameraMap<camera_metadata_enum_android_lens_state_t,
+            cam_af_lens_state_t> LENS_STATE_MAP[];
+    static const QCameraMap<camera_metadata_enum_android_control_ae_mode_t,
+            cam_flash_mode_t> AE_FLASH_MODE_MAP[];
+    static const QCameraMap<camera_metadata_enum_android_flash_mode_t,
+            cam_flash_mode_t> FLASH_MODES_MAP[];
+    static const QCameraMap<camera_metadata_enum_android_statistics_face_detect_mode_t,
+            cam_face_detect_mode_t> FACEDETECT_MODES_MAP[];
+    static const QCameraMap<camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
+            cam_focus_calibration_t> FOCUS_CALIBRATION_MAP[];
+    static const QCameraMap<camera_metadata_enum_android_sensor_test_pattern_mode_t,
+            cam_test_pattern_mode_t> TEST_PATTERN_MAP[];
+    static const QCameraMap<camera_metadata_enum_android_sensor_reference_illuminant1_t,
+            cam_illuminat_t> REFERENCE_ILLUMINANT_MAP[];
+    static const QCameraMap<int32_t,
+            cam_hfr_mode_t> HFR_MODE_MAP[];
+
+    static const QCameraPropMap CDS_MAP[];
+
+    pendingRequestIterator erasePendingRequest(pendingRequestIterator i);
+    //GPU library to read buffer padding details.
+    void *lib_surface_utils;
+    int (*LINK_get_surface_pixel_alignment)();
+    uint32_t mSurfaceStridePadding;
+
+    State mState;
+    //Dual camera related params
+    bool mIsDeviceLinked;
+    bool mIsMainCamera;
+    uint8_t mLinkedCameraId;
+    QCamera3HeapMemory *m_pRelCamSyncHeap;
+    cam_sync_related_sensors_event_info_t *m_pRelCamSyncBuf;
+    cam_sync_related_sensors_event_info_t m_relCamSyncInfo;
+
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA2HARDWAREINTERFACE_H__ */
diff --git a/msmcobalt/QCamera2/HAL3/QCamera3Mem.cpp b/msmcobalt/QCamera2/HAL3/QCamera3Mem.cpp
new file mode 100644
index 0000000..ebcc3ba
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL3/QCamera3Mem.cpp
@@ -0,0 +1,1199 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#define LOG_TAG "QCameraHWI_Mem"
+
+// System dependencies
+#include <fcntl.h>
+#define MMAN_H <SYSTEM_HEADER_PREFIX/mman.h>
+#include MMAN_H
+#include "gralloc_priv.h"
+
+// Display dependencies
+#include "qdMetaData.h"
+
+// Camera dependencies
+#include "QCamera3HWI.h"
+#include "QCamera3Mem.h"
+#include "QCameraTrace.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+}
+
+using namespace android;
+
+namespace qcamera {
+
+// QCaemra2Memory base class
+
+/*===========================================================================
+ * FUNCTION   : QCamera3Memory
+ *
+ * DESCRIPTION: default constructor of QCamera3Memory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera3Memory::QCamera3Memory()
+{
+    mBufferCount = 0;
+    for (int i = 0; i < MM_CAMERA_MAX_NUM_FRAMES; i++) {
+        mMemInfo[i].fd = -1;
+        mMemInfo[i].main_ion_fd = -1;
+        mMemInfo[i].handle = 0;
+        mMemInfo[i].size = 0;
+        mCurrentFrameNumbers[i] = -1;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera3Memory
+ *
+ * DESCRIPTION: deconstructor of QCamera3Memory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera3Memory::~QCamera3Memory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : cacheOpsInternal
+ *
+ * DESCRIPTION: ion related memory cache operations
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *   @cmd     : cache ops command
+ *   @vaddr   : ptr to the virtual address
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3Memory::cacheOpsInternal(uint32_t index, unsigned int cmd, void *vaddr)
+{
+    Mutex::Autolock lock(mLock);
+
+    struct ion_flush_data cache_inv_data;
+    struct ion_custom_data custom_data;
+    int ret = OK;
+
+    if (MM_CAMERA_MAX_NUM_FRAMES <= index) {
+        LOGE("index %d out of bound [0, %d)",
+                 index, MM_CAMERA_MAX_NUM_FRAMES);
+        return BAD_INDEX;
+    }
+
+    if (0 == mMemInfo[index].handle) {
+        LOGE("Buffer at %d not registered", index);
+        return BAD_INDEX;
+    }
+
+    memset(&cache_inv_data, 0, sizeof(cache_inv_data));
+    memset(&custom_data, 0, sizeof(custom_data));
+    cache_inv_data.vaddr = vaddr;
+    cache_inv_data.fd = mMemInfo[index].fd;
+    cache_inv_data.handle = mMemInfo[index].handle;
+    cache_inv_data.length = (unsigned int)mMemInfo[index].size;
+    custom_data.cmd = cmd;
+    custom_data.arg = (unsigned long)&cache_inv_data;
+
+    LOGD("addr = %p, fd = %d, handle = %lx length = %d, ION Fd = %d",
+          cache_inv_data.vaddr, cache_inv_data.fd,
+         (unsigned long)cache_inv_data.handle, cache_inv_data.length,
+         mMemInfo[index].main_ion_fd);
+    ret = ioctl(mMemInfo[index].main_ion_fd, ION_IOC_CUSTOM, &custom_data);
+    if (ret < 0)
+        LOGE("Cache Invalidate failed: %s\n", strerror(errno));
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFd
+ *
+ * DESCRIPTION: return file descriptor of the indexed buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : file descriptor
+ *==========================================================================*/
+int QCamera3Memory::getFd(uint32_t index)
+{
+    Mutex::Autolock lock(mLock);
+
+    if (MM_CAMERA_MAX_NUM_FRAMES <= index) {
+        return BAD_INDEX;
+    }
+
+    if (0 == mMemInfo[index].handle) {
+        return BAD_INDEX;
+    }
+
+    return mMemInfo[index].fd;
+}
+
+/*===========================================================================
+ * FUNCTION   : getSize
+ *
+ * DESCRIPTION: return buffer size of the indexed buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : buffer size
+ *==========================================================================*/
+ssize_t QCamera3Memory::getSize(uint32_t index)
+{
+    Mutex::Autolock lock(mLock);
+
+    if (MM_CAMERA_MAX_NUM_FRAMES <= index) {
+        return BAD_INDEX;
+    }
+
+    if (0 == mMemInfo[index].handle) {
+        return BAD_INDEX;
+    }
+
+    return (ssize_t)mMemInfo[index].size;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCnt
+ *
+ * DESCRIPTION: query number of buffers allocated
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of buffers allocated
+ *==========================================================================*/
+uint32_t QCamera3Memory::getCnt()
+{
+    Mutex::Autolock lock(mLock);
+
+    return mBufferCount;
+}
+
+/*===========================================================================
+ * FUNCTION   : getBufDef
+ *
+ * DESCRIPTION: query detailed buffer information
+ *
+ * PARAMETERS :
+ *   @offset  : [input] frame buffer offset
+ *   @bufDef  : [output] reference to struct to store buffer definition
+ *   @index   : [input] index of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Memory::getBufDef(const cam_frame_len_offset_t &offset,
+        mm_camera_buf_def_t &bufDef, uint32_t index)
+{
+    Mutex::Autolock lock(mLock);
+
+    if (!mBufferCount) {
+        LOGE("Memory not allocated");
+        return NO_INIT;
+    }
+
+    bufDef.fd = mMemInfo[index].fd;
+    bufDef.frame_len = mMemInfo[index].size;
+    bufDef.mem_info = (void *)this;
+    bufDef.buffer = getPtrLocked(index);
+    bufDef.planes_buf.num_planes = (int8_t)offset.num_planes;
+    bufDef.buf_idx = (uint8_t)index;
+
+    /* Plane 0 needs to be set separately. Set other planes in a loop */
+    bufDef.planes_buf.planes[0].length = offset.mp[0].len;
+    bufDef.planes_buf.planes[0].m.userptr = (long unsigned int)mMemInfo[index].fd;
+    bufDef.planes_buf.planes[0].data_offset = offset.mp[0].offset;
+    bufDef.planes_buf.planes[0].reserved[0] = 0;
+    for (int i = 1; i < bufDef.planes_buf.num_planes; i++) {
+         bufDef.planes_buf.planes[i].length = offset.mp[i].len;
+         bufDef.planes_buf.planes[i].m.userptr = (long unsigned int)mMemInfo[i].fd;
+         bufDef.planes_buf.planes[i].data_offset = offset.mp[i].offset;
+         bufDef.planes_buf.planes[i].reserved[0] =
+                 bufDef.planes_buf.planes[i-1].reserved[0] +
+                 bufDef.planes_buf.planes[i-1].length;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCamera3HeapMemory
+ *
+ * DESCRIPTION: constructor of QCamera3HeapMemory for ion memory used internally in HAL
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3HeapMemory::QCamera3HeapMemory(uint32_t maxCnt)
+    : QCamera3Memory()
+{
+    mMaxCnt = MIN(maxCnt, MM_CAMERA_MAX_NUM_FRAMES);
+    for (uint32_t i = 0; i < mMaxCnt; i ++)
+        mPtr[i] = NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera3HeapMemory
+ *
+ * DESCRIPTION: deconstructor of QCamera3HeapMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3HeapMemory::~QCamera3HeapMemory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : allocOneBuffer
+ *
+ * DESCRIPTION: impl of allocating one buffers of certain size
+ *
+ * PARAMETERS :
+ *   @memInfo : [output] reference to struct to store additional memory allocation info
+ *   @heap    : [input] heap id to indicate where the buffers will be allocated from
+ *   @size    : [input] lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3HeapMemory::allocOneBuffer(QCamera3MemInfo &memInfo,
+        unsigned int heap_id, size_t size)
+{
+    int rc = OK;
+    struct ion_handle_data handle_data;
+    struct ion_allocation_data allocData;
+    struct ion_fd_data ion_info_fd;
+    int main_ion_fd = -1;
+
+    main_ion_fd = open("/dev/ion", O_RDONLY);
+    if (main_ion_fd < 0) {
+        LOGE("Ion dev open failed: %s\n", strerror(errno));
+        goto ION_OPEN_FAILED;
+    }
+
+    memset(&allocData, 0, sizeof(allocData));
+    allocData.len = size;
+    /* to make it page size aligned */
+    allocData.len = (allocData.len + 4095U) & (~4095U);
+    allocData.align = 4096;
+    allocData.flags = ION_FLAG_CACHED;
+    allocData.heap_id_mask = heap_id;
+    rc = ioctl(main_ion_fd, ION_IOC_ALLOC, &allocData);
+    if (rc < 0) {
+        LOGE("ION allocation for len %d failed: %s\n", allocData.len,
+            strerror(errno));
+        goto ION_ALLOC_FAILED;
+    }
+
+    memset(&ion_info_fd, 0, sizeof(ion_info_fd));
+    ion_info_fd.handle = allocData.handle;
+    rc = ioctl(main_ion_fd, ION_IOC_SHARE, &ion_info_fd);
+    if (rc < 0) {
+        LOGE("ION map failed %s\n", strerror(errno));
+        goto ION_MAP_FAILED;
+    }
+
+    memInfo.main_ion_fd = main_ion_fd;
+    memInfo.fd = ion_info_fd.fd;
+    memInfo.handle = ion_info_fd.handle;
+    memInfo.size = allocData.len;
+    return OK;
+
+ION_MAP_FAILED:
+    memset(&handle_data, 0, sizeof(handle_data));
+    handle_data.handle = ion_info_fd.handle;
+    ioctl(main_ion_fd, ION_IOC_FREE, &handle_data);
+ION_ALLOC_FAILED:
+    close(main_ion_fd);
+ION_OPEN_FAILED:
+    return NO_MEMORY;
+}
+
+/*===========================================================================
+ * FUNCTION   : deallocOneBuffer
+ *
+ * DESCRIPTION: impl of deallocating one buffers
+ *
+ * PARAMETERS :
+ *   @memInfo : reference to struct that stores additional memory allocation info
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3HeapMemory::deallocOneBuffer(QCamera3MemInfo &memInfo)
+{
+    struct ion_handle_data handle_data;
+
+    if (memInfo.fd >= 0) {
+        close(memInfo.fd);
+        memInfo.fd = -1;
+    }
+
+    if (memInfo.main_ion_fd >= 0) {
+        memset(&handle_data, 0, sizeof(handle_data));
+        handle_data.handle = memInfo.handle;
+        ioctl(memInfo.main_ion_fd, ION_IOC_FREE, &handle_data);
+        close(memInfo.main_ion_fd);
+        memInfo.main_ion_fd = -1;
+    }
+    memInfo.handle = 0;
+    memInfo.size = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : getPtrLocked
+ *
+ * DESCRIPTION: Return buffer pointer.
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : buffer ptr
+ *==========================================================================*/
+void *QCamera3HeapMemory::getPtrLocked(uint32_t index)
+{
+    if (index >= mBufferCount) {
+        LOGE("index out of bound");
+        return (void *)BAD_INDEX;
+    }
+    return mPtr[index];
+}
+
+/*===========================================================================
+ * FUNCTION   : markFrameNumber
+ *
+ * DESCRIPTION: We use this function from the request call path to mark the
+ *              buffers with the frame number they are intended for this info
+ *              is used later when giving out callback & it is duty of PP to
+ *              ensure that data for that particular frameNumber/Request is
+ *              written to this buffer.
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *   @frame#  : Frame number from the framework
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3HeapMemory::markFrameNumber(uint32_t index, uint32_t frameNumber)
+{
+    Mutex::Autolock lock(mLock);
+
+    if (index >= mBufferCount) {
+        LOGE("Index %d out of bounds, current buffer count is %d",
+                 index, mBufferCount);
+        return BAD_INDEX;
+    }
+
+    if (0 == mMemInfo[index].handle) {
+        LOGE("Buffer at %d not allocated", index);
+        return BAD_INDEX;
+    }
+
+    mCurrentFrameNumbers[index] = (int32_t)frameNumber;
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFrameNumber
+ *
+ * DESCRIPTION: We use this to fetch the frameNumber for the request with which
+ *              this buffer was given to HAL
+ *
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : int32_t frameNumber
+ *              positive/zero  -- success
+ *              negative failure
+ *==========================================================================*/
+int32_t QCamera3HeapMemory::getFrameNumber(uint32_t index)
+{
+    Mutex::Autolock lock(mLock);
+
+    if (index >= mBufferCount) {
+        LOGE("Index %d out of bounds, current buffer count is %d",
+                 index, mBufferCount);
+        return -1;
+    }
+
+    if (0 == mMemInfo[index].handle) {
+        LOGE("Buffer at %d not registered", index);
+        return -1;
+    }
+
+    return mCurrentFrameNumbers[index];
+}
+
+/*===========================================================================
+ * FUNCTION   : getBufferIndex
+ *
+ * DESCRIPTION: We use this to fetch the buffer index for the request with
+ *              a particular frame number
+ *
+ *
+ * PARAMETERS :
+ *   @frameNumber  : frame number of the buffer
+ *
+ * RETURN     : int32_t buffer index
+ *              negative failure
+ *==========================================================================*/
+int32_t QCamera3HeapMemory::getBufferIndex(uint32_t frameNumber)
+{
+    Mutex::Autolock lock(mLock);
+
+    for (uint32_t index = 0;
+            index < mBufferCount; index++) {
+        if (mMemInfo[index].handle &&
+                mCurrentFrameNumbers[index] == (int32_t)frameNumber)
+            return (int32_t)index;
+    }
+    return -1;
+}
+
+/*===========================================================================
+ * FUNCTION   : getPtr
+ *
+ * DESCRIPTION: Return buffer pointer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : buffer ptr
+ *==========================================================================*/
+void *QCamera3HeapMemory::getPtr(uint32_t index)
+{
+    return getPtrLocked(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : allocate
+ *
+ * DESCRIPTION: allocate requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @size    : lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3HeapMemory::allocate(size_t size)
+{
+    unsigned int heap_id_mask = 0x1 << ION_IOMMU_HEAP_ID;
+    uint32_t i;
+    int rc = NO_ERROR;
+
+    //Note that now we allow incremental allocation. In other words, we allow
+    //multiple alloc being called as long as the sum of count does not exceed
+    //mMaxCnt.
+    if (mBufferCount > 0) {
+        LOGE("There is already buffer allocated.");
+        return BAD_INDEX;
+    }
+
+    for (i = 0; i < mMaxCnt; i ++) {
+        rc = allocOneBuffer(mMemInfo[i], heap_id_mask, size);
+        if (rc < 0) {
+            LOGE("AllocateIonMemory failed");
+            goto ALLOC_FAILED;
+        }
+
+        void *vaddr = mmap(NULL,
+                    mMemInfo[i].size,
+                    PROT_READ | PROT_WRITE,
+                    MAP_SHARED,
+                    mMemInfo[i].fd, 0);
+        if (vaddr == MAP_FAILED) {
+            deallocOneBuffer(mMemInfo[i]);
+            LOGE("mmap failed for buffer %d", i);
+            goto ALLOC_FAILED;
+        } else
+            mPtr[i] = vaddr;
+    }
+    if (rc == 0)
+        mBufferCount = mMaxCnt;
+
+    return OK;
+
+ALLOC_FAILED:
+    for (uint32_t j = 0; j < i; j++) {
+        munmap(mPtr[j], mMemInfo[j].size);
+        mPtr[j] = NULL;
+        deallocOneBuffer(mMemInfo[j]);
+    }
+    return NO_MEMORY;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocateOne
+ *
+ * DESCRIPTION: allocate one buffer
+ *
+ * PARAMETERS :
+ *   @size    : lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3HeapMemory::allocateOne(size_t size)
+{
+    unsigned int heap_id_mask = 0x1 << ION_IOMMU_HEAP_ID;
+    int rc = NO_ERROR;
+
+    //Note that now we allow incremental allocation. In other words, we allow
+    //multiple alloc being called as long as the sum of count does not exceed
+    //mMaxCnt.
+    if (mBufferCount + 1 > mMaxCnt) {
+        LOGE("Buffer count %d + 1 out of bound. Max is %d",
+                mBufferCount, mMaxCnt);
+        return BAD_INDEX;
+    }
+
+    rc = allocOneBuffer(mMemInfo[mBufferCount], heap_id_mask, size);
+    if (rc < 0) {
+        LOGE("AllocateIonMemory failed");
+        return NO_MEMORY;
+    }
+
+    void *vaddr = mmap(NULL,
+                mMemInfo[mBufferCount].size,
+                PROT_READ | PROT_WRITE,
+                MAP_SHARED,
+                mMemInfo[mBufferCount].fd, 0);
+    if (vaddr == MAP_FAILED) {
+        deallocOneBuffer(mMemInfo[mBufferCount]);
+        LOGE("mmap failed for buffer");
+        return NO_MEMORY;
+    } else
+        mPtr[mBufferCount] = vaddr;
+
+    if (rc == 0)
+        mBufferCount += 1;
+
+    return mBufferCount-1;
+}
+
+/*===========================================================================
+ * FUNCTION   : deallocate
+ *
+ * DESCRIPTION: deallocate buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3HeapMemory::deallocate()
+{
+    for (uint32_t i = 0; i < mBufferCount; i++) {
+        munmap(mPtr[i], mMemInfo[i].size);
+        mPtr[i] = NULL;
+        deallocOneBuffer(mMemInfo[i]);
+        mCurrentFrameNumbers[i] = -1;
+    }
+    mBufferCount = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : cacheOps
+ *
+ * DESCRIPTION: ion related memory cache operations
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *   @cmd     : cache ops command
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3HeapMemory::cacheOps(uint32_t index, unsigned int cmd)
+{
+    if (index >= mBufferCount)
+        return BAD_INDEX;
+    return cacheOpsInternal(index, cmd, mPtr[index]);
+}
+
+/*===========================================================================
+ * FUNCTION   : getMatchBufIndex
+ *
+ * DESCRIPTION: query buffer index by object ptr
+ *
+ * PARAMETERS :
+ *   @object  : object ptr
+ *
+ * RETURN     : buffer index if match found,
+ *              -1 if failed
+ *==========================================================================*/
+int QCamera3HeapMemory::getMatchBufIndex(void * /*object*/)
+{
+
+/*
+    TODO for HEAP memory type, would there be an equivalent requirement?
+
+    int index = -1;
+    buffer_handle_t *key = (buffer_handle_t*) object;
+    if (!key) {
+        return BAD_VALUE;
+    }
+    for (int i = 0; i < mBufferCount; i++) {
+        if (mBufferHandle[i] == key) {
+            index = i;
+            break;
+        }
+    }
+    return index;
+*/
+    LOGE("FATAL: Not supposed to come here");
+    return -1;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCamera3GrallocMemory
+ *
+ * DESCRIPTION: constructor of QCamera3GrallocMemory
+ *              preview stream buffers are allocated from gralloc native_windoe
+ *
+ * PARAMETERS :
+ *   @startIdx : start index of array after which we can register buffers in.
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3GrallocMemory::QCamera3GrallocMemory(uint32_t startIdx)
+        : QCamera3Memory(), mStartIdx(startIdx)
+{
+    for (int i = 0; i < MM_CAMERA_MAX_NUM_FRAMES; i ++) {
+        mBufferHandle[i] = NULL;
+        mPrivateHandle[i] = NULL;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera3GrallocMemory
+ *
+ * DESCRIPTION: deconstructor of QCamera3GrallocMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3GrallocMemory::~QCamera3GrallocMemory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : registerBuffer
+ *
+ * DESCRIPTION: registers frameworks-allocated gralloc buffer_handle_t
+ *
+ * PARAMETERS :
+ *   @buffers : buffer_handle_t pointer
+ *   @type :    cam_stream_type_t
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3GrallocMemory::registerBuffer(buffer_handle_t *buffer,
+        __unused cam_stream_type_t type)
+{
+    status_t ret = NO_ERROR;
+    struct ion_fd_data ion_info_fd;
+    void *vaddr = NULL;
+    int32_t colorSpace = ITU_R_601_FR;
+    int32_t idx = -1;
+
+    LOGD("E");
+
+    memset(&ion_info_fd, 0, sizeof(ion_info_fd));
+
+    if (0 <= getMatchBufIndex((void *) buffer)) {
+        LOGL("Buffer already registered");
+        return ALREADY_EXISTS;
+    }
+
+    Mutex::Autolock lock(mLock);
+    if (mBufferCount >= (MM_CAMERA_MAX_NUM_FRAMES - 1 - mStartIdx)) {
+        LOGE("Number of buffers %d greater than what's supported %d",
+                 mBufferCount, MM_CAMERA_MAX_NUM_FRAMES - mStartIdx);
+        return BAD_INDEX;
+    }
+
+    idx = getFreeIndexLocked();
+    if (0 > idx) {
+        LOGE("No available memory slots");
+        return BAD_INDEX;
+    }
+
+    mBufferHandle[idx] = buffer;
+    mPrivateHandle[idx] = (struct private_handle_t *)(*mBufferHandle[idx]);
+
+    setMetaData(mPrivateHandle[idx], UPDATE_COLOR_SPACE, &colorSpace);
+
+    mMemInfo[idx].main_ion_fd = open("/dev/ion", O_RDONLY);
+    if (mMemInfo[idx].main_ion_fd < 0) {
+        LOGE("failed: could not open ion device");
+        ret = NO_MEMORY;
+        goto end;
+    } else {
+        ion_info_fd.fd = mPrivateHandle[idx]->fd;
+        if (ioctl(mMemInfo[idx].main_ion_fd,
+                  ION_IOC_IMPORT, &ion_info_fd) < 0) {
+            LOGE("ION import failed\n");
+            close(mMemInfo[idx].main_ion_fd);
+            ret = NO_MEMORY;
+            goto end;
+        }
+    }
+    LOGD("idx = %d, fd = %d, size = %d, offset = %d",
+             idx, mPrivateHandle[idx]->fd,
+            mPrivateHandle[idx]->size,
+            mPrivateHandle[idx]->offset);
+    mMemInfo[idx].fd = mPrivateHandle[idx]->fd;
+    mMemInfo[idx].size =
+            ( /* FIXME: Should update ION interface */ size_t)
+            mPrivateHandle[idx]->size;
+    mMemInfo[idx].handle = ion_info_fd.handle;
+
+    vaddr = mmap(NULL,
+            mMemInfo[idx].size,
+            PROT_READ | PROT_WRITE,
+            MAP_SHARED,
+            mMemInfo[idx].fd, 0);
+    if (vaddr == MAP_FAILED) {
+        mMemInfo[idx].handle = 0;
+        ret = NO_MEMORY;
+    } else {
+        mPtr[idx] = vaddr;
+        mBufferCount++;
+    }
+
+end:
+    LOGD("X ");
+    return ret;
+}
+/*===========================================================================
+ * FUNCTION   : unregisterBufferLocked
+ *
+ * DESCRIPTION: Unregister buffer. Please note that this method has to be
+ *              called with 'mLock' acquired.
+ *
+ * PARAMETERS :
+ *   @idx     : unregister buffer at index 'idx'
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3GrallocMemory::unregisterBufferLocked(size_t idx)
+{
+    munmap(mPtr[idx], mMemInfo[idx].size);
+    mPtr[idx] = NULL;
+
+    struct ion_handle_data ion_handle;
+    memset(&ion_handle, 0, sizeof(ion_handle));
+    ion_handle.handle = mMemInfo[idx].handle;
+    if (ioctl(mMemInfo[idx].main_ion_fd, ION_IOC_FREE, &ion_handle) < 0) {
+        LOGE("ion free failed");
+    }
+    close(mMemInfo[idx].main_ion_fd);
+    memset(&mMemInfo[idx], 0, sizeof(struct QCamera3MemInfo));
+    mMemInfo[idx].main_ion_fd = -1;
+    mBufferHandle[idx] = NULL;
+    mPrivateHandle[idx] = NULL;
+    mCurrentFrameNumbers[idx] = -1;
+    mBufferCount--;
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : unregisterBuffer
+ *
+ * DESCRIPTION: unregister buffer
+ *
+ * PARAMETERS :
+ *   @idx     : unregister buffer at index 'idx'
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3GrallocMemory::unregisterBuffer(size_t idx)
+{
+    int32_t rc = NO_ERROR;
+    Mutex::Autolock lock(mLock);
+
+    LOGD("E ", __FUNCTION__);
+
+    if (MM_CAMERA_MAX_NUM_FRAMES <= idx) {
+        LOGE("Buffer index %d greater than what is supported %d",
+                 idx, MM_CAMERA_MAX_NUM_FRAMES);
+        return BAD_VALUE;
+    }
+    if (idx < mStartIdx) {
+        LOGE("buffer index %d less than starting index %d",
+                 idx, mStartIdx);
+        return BAD_INDEX;
+    }
+
+    if (0 == mMemInfo[idx].handle) {
+        LOGE("Trying to unregister buffer at %d which still not registered",
+                 idx);
+        return BAD_VALUE;
+    }
+
+    rc = unregisterBufferLocked(idx);
+
+    LOGD("X ",__FUNCTION__);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : unregisterBuffers
+ *
+ * DESCRIPTION: unregister buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3GrallocMemory::unregisterBuffers()
+{
+    int err = NO_ERROR;
+    Mutex::Autolock lock(mLock);
+
+    LOGD("E ", __FUNCTION__);
+
+    for (uint32_t cnt = mStartIdx; cnt < MM_CAMERA_MAX_NUM_FRAMES; cnt++) {
+        if (0 == mMemInfo[cnt].handle) {
+            continue;
+        }
+        err = unregisterBufferLocked(cnt);
+        if (NO_ERROR != err) {
+            LOGE("Error unregistering buffer %d error %d",
+                     cnt, err);
+        }
+    }
+    mBufferCount = 0;
+    LOGD("X ",__FUNCTION__);
+}
+
+/*===========================================================================
+ * FUNCTION   : markFrameNumber
+ *
+ * DESCRIPTION: We use this function from the request call path to mark the
+ *              buffers with the frame number they are intended for this info
+ *              is used later when giving out callback & it is duty of PP to
+ *              ensure that data for that particular frameNumber/Request is
+ *              written to this buffer.
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *   @frame#  : Frame number from the framework
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3GrallocMemory::markFrameNumber(uint32_t index, uint32_t frameNumber)
+{
+    Mutex::Autolock lock(mLock);
+
+    if (index >= MM_CAMERA_MAX_NUM_FRAMES) {
+        LOGE("Index out of bounds");
+        return BAD_INDEX;
+    }
+    if (index < mStartIdx) {
+        LOGE("buffer index %d less than starting index %d",
+                 index, mStartIdx);
+        return BAD_INDEX;
+    }
+
+    if (0 == mMemInfo[index].handle) {
+        LOGE("Buffer at %d not registered", index);
+        return BAD_INDEX;
+    }
+
+    mCurrentFrameNumbers[index] = (int32_t)frameNumber;
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFrameNumber
+ *
+ * DESCRIPTION: We use this to fetch the frameNumber for the request with which
+ *              this buffer was given to HAL
+ *
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : int32_t frameNumber
+ *              positive/zero  -- success
+ *              negative failure
+ *==========================================================================*/
+int32_t QCamera3GrallocMemory::getFrameNumber(uint32_t index)
+{
+    Mutex::Autolock lock(mLock);
+
+    if (index >= MM_CAMERA_MAX_NUM_FRAMES) {
+        LOGE("Index out of bounds");
+        return -1;
+    }
+    if (index < mStartIdx) {
+        LOGE("buffer index %d less than starting index %d",
+                 index, mStartIdx);
+        return BAD_INDEX;
+    }
+
+    if (0 == mMemInfo[index].handle) {
+        LOGE("Buffer at %d not registered", index);
+        return -1;
+    }
+
+    return mCurrentFrameNumbers[index];
+}
+
+/*===========================================================================
+ * FUNCTION   : getBufferIndex
+ *
+ * DESCRIPTION: We use this to fetch the buffer index for the request with
+ *              a particular frame number
+ *
+ *
+ * PARAMETERS :
+ *   @frameNumber  : frame number of the buffer
+ *
+ * RETURN     : int32_t buffer index
+ *              negative failure
+ *==========================================================================*/
+int32_t QCamera3GrallocMemory::getBufferIndex(uint32_t frameNumber)
+{
+    for (uint32_t index = mStartIdx;
+            index < MM_CAMERA_MAX_NUM_FRAMES; index++) {
+        if (mMemInfo[index].handle &&
+                mCurrentFrameNumbers[index] == (int32_t)frameNumber)
+            return (int32_t)index;
+    }
+    return -1;
+}
+
+/*===========================================================================
+ * FUNCTION   : cacheOps
+ *
+ * DESCRIPTION: ion related memory cache operations
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *   @cmd     : cache ops command
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3GrallocMemory::cacheOps(uint32_t index, unsigned int cmd)
+{
+    if (index >= MM_CAMERA_MAX_NUM_FRAMES) {
+        LOGE("Index out of bounds");
+        return -1;
+    }
+    if (index < mStartIdx) {
+        LOGE("buffer index %d less than starting index %d",
+                 index, mStartIdx);
+        return BAD_INDEX;
+    }
+
+    return cacheOpsInternal(index, cmd, mPtr[index]);
+}
+
+/*===========================================================================
+ * FUNCTION   : getMatchBufIndex
+ *
+ * DESCRIPTION: query buffer index by object ptr
+ *
+ * PARAMETERS :
+ *   @opaque  : opaque ptr
+ *
+ * RETURN     : buffer index if match found,
+ *              -1 if failed
+ *==========================================================================*/
+int QCamera3GrallocMemory::getMatchBufIndex(void *object)
+{
+    Mutex::Autolock lock(mLock);
+
+    int index = -1;
+    buffer_handle_t *key = (buffer_handle_t*) object;
+    if (!key) {
+        return BAD_VALUE;
+    }
+    for (uint32_t i = mStartIdx; i < MM_CAMERA_MAX_NUM_FRAMES; i++) {
+        if (mBufferHandle[i] == key) {
+            index = (int)i;
+            break;
+        }
+    }
+
+    return index;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFreeIndexLocked
+ *
+ * DESCRIPTION: Find free index slot. Note 'mLock' needs to be acquired
+ *              before calling this method.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : free buffer index if found,
+ *              -1 if failed
+ *==========================================================================*/
+int QCamera3GrallocMemory::getFreeIndexLocked()
+{
+    int index = -1;
+
+    if (mBufferCount >= (MM_CAMERA_MAX_NUM_FRAMES - 1)) {
+        LOGE("Number of buffers %d greater than what's supported %d",
+             mBufferCount, MM_CAMERA_MAX_NUM_FRAMES);
+        return index;
+    }
+
+    for (size_t i = mStartIdx; i < MM_CAMERA_MAX_NUM_FRAMES; i++) {
+        if (0 == mMemInfo[i].handle) {
+            index = i;
+            break;
+        }
+    }
+
+    return index;
+}
+
+/*===========================================================================
+ * FUNCTION   : getPtrLocked
+ *
+ * DESCRIPTION: Return buffer pointer. Please note 'mLock' must be acquired
+ *              before calling this method.
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : buffer ptr
+ *==========================================================================*/
+void *QCamera3GrallocMemory::getPtrLocked(uint32_t index)
+{
+    if (MM_CAMERA_MAX_NUM_FRAMES <= index) {
+        LOGE("index %d out of bound [0, %d)",
+                 index, MM_CAMERA_MAX_NUM_FRAMES);
+        return NULL;
+    }
+    if (index < mStartIdx) {
+        LOGE("buffer index %d less than starting index %d",
+                 index, mStartIdx);
+        return NULL;
+    }
+
+
+    if (0 == mMemInfo[index].handle) {
+        LOGE("Buffer at %d not registered", index);
+        return NULL;
+    }
+
+    return mPtr[index];
+}
+
+/*===========================================================================
+ * FUNCTION   : getPtr
+ *
+ * DESCRIPTION: Return buffer pointer.
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : buffer ptr
+ *==========================================================================*/
+void *QCamera3GrallocMemory::getPtr(uint32_t index)
+{
+    Mutex::Autolock lock(mLock);
+    return getPtrLocked(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : getBufferHandle
+ *
+ * DESCRIPTION: return framework pointer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : buffer ptr if match found
+                NULL if failed
+ *==========================================================================*/
+void *QCamera3GrallocMemory::getBufferHandle(uint32_t index)
+{
+    Mutex::Autolock lock(mLock);
+
+    if (MM_CAMERA_MAX_NUM_FRAMES <= index) {
+        LOGE("index %d out of bound [0, %d)",
+                 index, MM_CAMERA_MAX_NUM_FRAMES);
+        return NULL;
+    }
+    if (index < mStartIdx) {
+        LOGE("buffer index %d less than starting index %d",
+                 index, mStartIdx);
+        return NULL;
+    }
+
+    if (0 == mMemInfo[index].handle) {
+        LOGE("Buffer at %d not registered", index);
+        return NULL;
+    }
+
+    return mBufferHandle[index];
+}
+}; //namespace qcamera
diff --git a/msmcobalt/QCamera2/HAL3/QCamera3Mem.h b/msmcobalt/QCamera2/HAL3/QCamera3Mem.h
new file mode 100644
index 0000000..f8bccfe
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL3/QCamera3Mem.h
@@ -0,0 +1,158 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA3HWI_MEM_H__
+#define __QCAMERA3HWI_MEM_H__
+
+// System dependencies
+#include <linux/msm_ion.h>
+#include <utils/Mutex.h>
+
+// Camera dependencies
+#include "hardware/camera3.h"
+
+extern "C" {
+#include "mm_camera_interface.h"
+}
+
+using namespace android;
+
+namespace qcamera {
+
+// Base class for all memory types. Abstract.
+class QCamera3Memory {
+
+public:
+    int cleanCache(uint32_t index)
+    {
+        return cacheOps(index, ION_IOC_CLEAN_CACHES);
+    }
+    int invalidateCache(uint32_t index)
+    {
+        return cacheOps(index, ION_IOC_INV_CACHES);
+    }
+    int cleanInvalidateCache(uint32_t index)
+    {
+        return cacheOps(index, ION_IOC_CLEAN_INV_CACHES);
+    }
+    int getFd(uint32_t index);
+    ssize_t getSize(uint32_t index);
+    uint32_t getCnt();
+
+    virtual int cacheOps(uint32_t index, unsigned int cmd) = 0;
+    virtual int getMatchBufIndex(void *object) = 0;
+    virtual void *getPtr(uint32_t index) = 0;
+
+    virtual int32_t markFrameNumber(uint32_t index, uint32_t frameNumber) = 0;
+    virtual int32_t getFrameNumber(uint32_t index) = 0;
+    virtual int32_t getBufferIndex(uint32_t frameNumber) = 0;
+
+    QCamera3Memory();
+    virtual ~QCamera3Memory();
+
+    int32_t getBufDef(const cam_frame_len_offset_t &offset,
+            mm_camera_buf_def_t &bufDef, uint32_t index);
+
+protected:
+    struct QCamera3MemInfo {
+        int fd;
+        int main_ion_fd;
+        ion_user_handle_t handle;
+        size_t size;
+    };
+
+    int cacheOpsInternal(uint32_t index, unsigned int cmd, void *vaddr);
+    virtual void *getPtrLocked(uint32_t index) = 0;
+
+    uint32_t mBufferCount;
+    struct QCamera3MemInfo mMemInfo[MM_CAMERA_MAX_NUM_FRAMES];
+    void *mPtr[MM_CAMERA_MAX_NUM_FRAMES];
+    int32_t mCurrentFrameNumbers[MM_CAMERA_MAX_NUM_FRAMES];
+    Mutex mLock;
+};
+
+// Internal heap memory is used for memories used internally
+// They are allocated from /dev/ion. Examples are: capabilities,
+// parameters, metadata, and internal YUV data for jpeg encoding.
+class QCamera3HeapMemory : public QCamera3Memory {
+public:
+    QCamera3HeapMemory(uint32_t maxCnt);
+    virtual ~QCamera3HeapMemory();
+
+    int allocate(size_t size);
+    int allocateOne(size_t size);
+    void deallocate();
+
+    virtual int cacheOps(uint32_t index, unsigned int cmd);
+    virtual int getMatchBufIndex(void *object);
+    virtual void *getPtr(uint32_t index);
+
+    virtual int32_t markFrameNumber(uint32_t index, uint32_t frameNumber);
+    virtual int32_t getFrameNumber(uint32_t index);
+    virtual int32_t getBufferIndex(uint32_t frameNumber);
+
+protected:
+    virtual void *getPtrLocked(uint32_t index);
+private:
+    int allocOneBuffer(struct QCamera3MemInfo &memInfo,
+            unsigned int heap_id, size_t size);
+    void deallocOneBuffer(struct QCamera3MemInfo &memInfo);
+    uint32_t mMaxCnt;
+};
+
+// Gralloc Memory shared with frameworks
+class QCamera3GrallocMemory : public QCamera3Memory {
+public:
+    QCamera3GrallocMemory(uint32_t startIdx);
+    virtual ~QCamera3GrallocMemory();
+
+    int registerBuffer(buffer_handle_t *buffer, cam_stream_type_t type);
+    int32_t unregisterBuffer(size_t idx);
+    void unregisterBuffers();
+    virtual int cacheOps(uint32_t index, unsigned int cmd);
+    virtual int getMatchBufIndex(void *object);
+    virtual void *getPtr(uint32_t index);
+
+    virtual int32_t markFrameNumber(uint32_t index, uint32_t frameNumber);
+    virtual int32_t getFrameNumber(uint32_t index);
+    virtual int32_t getBufferIndex(uint32_t frameNumber);
+
+    void *getBufferHandle(uint32_t index);
+protected:
+    virtual void *getPtrLocked(uint32_t index);
+private:
+    int32_t unregisterBufferLocked(size_t idx);
+    int32_t getFreeIndexLocked();
+    buffer_handle_t *mBufferHandle[MM_CAMERA_MAX_NUM_FRAMES];
+    struct private_handle_t *mPrivateHandle[MM_CAMERA_MAX_NUM_FRAMES];
+
+    uint32_t mStartIdx;
+};
+};
+#endif
diff --git a/msmcobalt/QCamera2/HAL3/QCamera3PostProc.cpp b/msmcobalt/QCamera2/HAL3/QCamera3PostProc.cpp
new file mode 100644
index 0000000..447bd40
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL3/QCamera3PostProc.cpp
@@ -0,0 +1,3119 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCamera3PostProc"
+
+// To remove
+#include <cutils/properties.h>
+
+// System dependencies
+#include <stdio.h>
+
+// Camera dependencies
+#include "QCamera3Channel.h"
+#include "QCamera3HWI.h"
+#include "QCamera3PostProc.h"
+#include "QCamera3Stream.h"
+#include "QCameraTrace.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+#define ENABLE_MODEL_INFO_EXIF
+
+namespace qcamera {
+
+static const char ExifAsciiPrefix[] =
+    { 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0 };          // "ASCII\0\0\0"
+
+__unused
+static const char ExifUndefinedPrefix[] =
+    { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 };   // "\0\0\0\0\0\0\0\0"
+
+#define EXIF_ASCII_PREFIX_SIZE           8   //(sizeof(ExifAsciiPrefix))
+#define FOCAL_LENGTH_DECIMAL_PRECISION   1000
+
+/*===========================================================================
+ * FUNCTION   : QCamera3PostProcessor
+ *
+ * DESCRIPTION: constructor of QCamera3PostProcessor.
+ *
+ * PARAMETERS :
+ *   @cam_ctrl : ptr to HWI object
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera3PostProcessor::QCamera3PostProcessor(QCamera3ProcessingChannel* ch_ctrl)
+    : m_parent(ch_ctrl),
+      mJpegCB(NULL),
+      mJpegUserData(NULL),
+      mJpegClientHandle(0),
+      mJpegSessionId(0),
+      m_bThumbnailNeeded(TRUE),
+      m_pReprocChannel(NULL),
+      m_inputPPQ(releasePPInputData, this),
+      m_inputFWKPPQ(NULL, this),
+      m_ongoingPPQ(releaseOngoingPPData, this),
+      m_inputJpegQ(releaseJpegData, this),
+      m_ongoingJpegQ(releaseJpegData, this),
+      m_inputMetaQ(releaseMetadata, this),
+      m_jpegSettingsQ(NULL, this)
+{
+    memset(&mJpegHandle, 0, sizeof(mJpegHandle));
+    memset(&mJpegMetadata, 0, sizeof(mJpegMetadata));
+    pthread_mutex_init(&mReprocJobLock, NULL);
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera3PostProcessor
+ *
+ * DESCRIPTION: deconstructor of QCamera3PostProcessor.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera3PostProcessor::~QCamera3PostProcessor()
+{
+    pthread_mutex_destroy(&mReprocJobLock);
+}
+
+/*===========================================================================
+ * FUNCTION   : init
+ *
+ * DESCRIPTION: initialization of postprocessor
+ *
+ * PARAMETERS :
+ *   @memory              : output buffer memory
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::init(QCamera3StreamMem *memory)
+{
+    ATRACE_CALL();
+    mOutputMem = memory;
+    m_dataProcTh.launch(dataProcessRoutine, this);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : deinit
+ *
+ * DESCRIPTION: de-initialization of postprocessor
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::deinit()
+{
+    int rc = NO_ERROR;
+    m_dataProcTh.exit();
+
+    if (m_pReprocChannel != NULL) {
+        m_pReprocChannel->stop();
+        delete m_pReprocChannel;
+        m_pReprocChannel = NULL;
+    }
+
+    if(mJpegClientHandle > 0) {
+        rc = mJpegHandle.close(mJpegClientHandle);
+        LOGH("Jpeg closed, rc = %d, mJpegClientHandle = %x",
+               rc, mJpegClientHandle);
+        mJpegClientHandle = 0;
+        memset(&mJpegHandle, 0, sizeof(mJpegHandle));
+    }
+
+    mOutputMem = NULL;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : initJpeg
+ *
+ * DESCRIPTION: initialization of jpeg through postprocessor
+ *
+ * PARAMETERS :
+ *   @jpeg_cb      : callback to handle jpeg event from mm-camera-interface
+ *   @max_pic_dim  : max picture dimensions
+ *   @user_data    : user data ptr for jpeg callback
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::initJpeg(jpeg_encode_callback_t jpeg_cb,
+        cam_dimension_t* max_pic_dim,
+        void *user_data)
+{
+    ATRACE_CALL();
+    mJpegCB = jpeg_cb;
+    mJpegUserData = user_data;
+    mm_dimension max_size;
+
+    if ((0 > max_pic_dim->width) || (0 > max_pic_dim->height)) {
+        LOGE("Negative dimension %dx%d",
+                max_pic_dim->width, max_pic_dim->height);
+        return BAD_VALUE;
+    }
+
+    // set max pic size
+    memset(&max_size, 0, sizeof(mm_dimension));
+    max_size.w =  max_pic_dim->width;
+    max_size.h =  max_pic_dim->height;
+
+    // Pass OTP calibration data to JPEG
+    QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)m_parent->mUserData;
+    mJpegMetadata.default_sensor_flip = FLIP_NONE;
+    mJpegMetadata.sensor_mount_angle = hal_obj->getSensorMountAngle();
+    memcpy(&mJpegMetadata.otp_calibration_data,
+            hal_obj->getRelatedCalibrationData(),
+            sizeof(cam_related_system_calibration_data_t));
+    mJpegClientHandle = jpeg_open(&mJpegHandle, NULL, max_size, &mJpegMetadata);
+
+    if (!mJpegClientHandle) {
+        LOGE("jpeg_open did not work");
+        return UNKNOWN_ERROR;
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : start
+ *
+ * DESCRIPTION: start postprocessor. Data process thread and data notify thread
+ *              will be launched.
+ *
+ * PARAMETERS :
+ *   @config        : reprocess configuration
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : if any reprocess is needed, a reprocess channel/stream
+ *              will be started.
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::start(const reprocess_config_t &config)
+{
+    int32_t rc = NO_ERROR;
+    QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)m_parent->mUserData;
+
+    if (config.reprocess_type != REPROCESS_TYPE_NONE) {
+        if (m_pReprocChannel != NULL) {
+            m_pReprocChannel->stop();
+            delete m_pReprocChannel;
+            m_pReprocChannel = NULL;
+        }
+
+        // if reprocess is needed, start reprocess channel
+        LOGD("Setting input channel as pInputChannel");
+        m_pReprocChannel = hal_obj->addOfflineReprocChannel(config, m_parent);
+        if (m_pReprocChannel == NULL) {
+            LOGE("cannot add reprocess channel");
+            return UNKNOWN_ERROR;
+        }
+        /*start the reprocess channel only if buffers are already allocated, thus
+          only start it in an intermediate reprocess type, defer it for others*/
+        if (config.reprocess_type == REPROCESS_TYPE_JPEG) {
+            rc = m_pReprocChannel->start();
+            if (rc != 0) {
+                LOGE("cannot start reprocess channel");
+                delete m_pReprocChannel;
+                m_pReprocChannel = NULL;
+                return rc;
+            }
+        }
+    }
+    m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC, TRUE, FALSE);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : flush
+ *
+ * DESCRIPTION: stop ongoing postprocess and jpeg jobs
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::flush()
+{
+    int32_t rc = NO_ERROR;
+    qcamera_hal3_jpeg_data_t *jpeg_job =
+            (qcamera_hal3_jpeg_data_t *)m_ongoingJpegQ.dequeue();
+    while (jpeg_job != NULL) {
+        rc = mJpegHandle.abort_job(jpeg_job->jobId);
+        releaseJpegJobData(jpeg_job);
+        free(jpeg_job);
+
+        jpeg_job = (qcamera_hal3_jpeg_data_t *)m_ongoingJpegQ.dequeue();
+    }
+    rc = releaseOfflineBuffers(true);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop
+ *
+ * DESCRIPTION: stop postprocessor. Data process and notify thread will be stopped.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : reprocess channel will be stopped and deleted if there is any
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::stop()
+{
+    m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, TRUE, TRUE);
+
+    if (m_pReprocChannel != NULL) {
+        m_pReprocChannel->stop();
+        delete m_pReprocChannel;
+        m_pReprocChannel = NULL;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFWKJpegEncodeConfig
+ *
+ * DESCRIPTION: function to prepare encoding job information
+ *
+ * PARAMETERS :
+ *   @encode_parm   : param to be filled with encoding configuration
+ *   @frame         : framework input buffer
+ *   @jpeg_settings : jpeg settings to be applied for encoding
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::getFWKJpegEncodeConfig(
+        mm_jpeg_encode_params_t& encode_parm,
+        qcamera_fwk_input_pp_data_t *frame,
+        jpeg_settings_t *jpeg_settings)
+{
+    LOGD("E");
+    int32_t ret = NO_ERROR;
+
+    if ((NULL == frame) || (NULL == jpeg_settings)) {
+        return BAD_VALUE;
+    }
+
+    ssize_t bufSize = mOutputMem->getSize(jpeg_settings->out_buf_index);
+    if (BAD_INDEX == bufSize) {
+        LOGE("cannot retrieve buffer size for buffer %u",
+                jpeg_settings->out_buf_index);
+        return BAD_VALUE;
+    }
+
+    encode_parm.jpeg_cb = mJpegCB;
+    encode_parm.userdata = mJpegUserData;
+
+    if (jpeg_settings->thumbnail_size.width > 0 &&
+            jpeg_settings->thumbnail_size.height > 0)
+        m_bThumbnailNeeded = TRUE;
+    else
+        m_bThumbnailNeeded = FALSE;
+    encode_parm.encode_thumbnail = m_bThumbnailNeeded;
+
+    // get color format
+    cam_format_t img_fmt = frame->reproc_config.stream_format;
+    encode_parm.color_format = getColorfmtFromImgFmt(img_fmt);
+
+    // get jpeg quality
+    encode_parm.quality = jpeg_settings->jpeg_quality;
+    if (encode_parm.quality <= 0) {
+        encode_parm.quality = 85;
+    }
+
+    // get jpeg thumbnail quality
+    encode_parm.thumb_quality = jpeg_settings->jpeg_thumb_quality;
+
+    cam_frame_len_offset_t main_offset =
+            frame->reproc_config.input_stream_plane_info.plane_info;
+
+    encode_parm.num_src_bufs = 1;
+    encode_parm.src_main_buf[0].index = 0;
+    encode_parm.src_main_buf[0].buf_size = frame->input_buffer.frame_len;
+    encode_parm.src_main_buf[0].buf_vaddr = (uint8_t *) frame->input_buffer.buffer;
+    encode_parm.src_main_buf[0].fd = frame->input_buffer.fd;
+    encode_parm.src_main_buf[0].format = MM_JPEG_FMT_YUV;
+    encode_parm.src_main_buf[0].offset = main_offset;
+
+    //Pass input thumbnail buffer info to encoder.
+    //Note: Use main buffer to encode thumbnail
+    if (m_bThumbnailNeeded == TRUE) {
+        encode_parm.num_tmb_bufs = 1;
+        encode_parm.src_thumb_buf[0] = encode_parm.src_main_buf[0];
+    }
+
+    //Pass output jpeg buffer info to encoder.
+    //mOutputMem is allocated by framework.
+    encode_parm.num_dst_bufs = 1;
+    encode_parm.dest_buf[0].index = 0;
+    encode_parm.dest_buf[0].buf_size = (size_t)bufSize;
+    encode_parm.dest_buf[0].buf_vaddr = (uint8_t *)mOutputMem->getPtr(
+            jpeg_settings->out_buf_index);
+    encode_parm.dest_buf[0].fd = mOutputMem->getFd(
+            jpeg_settings->out_buf_index);
+    encode_parm.dest_buf[0].format = MM_JPEG_FMT_YUV;
+    encode_parm.dest_buf[0].offset = main_offset;
+
+    LOGD("X");
+    return NO_ERROR;
+
+    LOGD("X with error %d", ret);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegEncodeConfig
+ *
+ * DESCRIPTION: function to prepare encoding job information
+ *
+ * PARAMETERS :
+ *   @encode_parm   : param to be filled with encoding configuration
+ *   #main_stream   : stream object where the input buffer comes from
+ *   @jpeg_settings : jpeg settings to be applied for encoding
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::getJpegEncodeConfig(
+                mm_jpeg_encode_params_t& encode_parm,
+                QCamera3Stream *main_stream,
+                jpeg_settings_t *jpeg_settings)
+{
+    LOGD("E");
+    int32_t ret = NO_ERROR;
+    ssize_t bufSize = 0;
+
+    encode_parm.jpeg_cb = mJpegCB;
+    encode_parm.userdata = mJpegUserData;
+
+    if (jpeg_settings->thumbnail_size.width > 0 &&
+            jpeg_settings->thumbnail_size.height > 0)
+        m_bThumbnailNeeded = TRUE;
+    else
+        m_bThumbnailNeeded = FALSE;
+    encode_parm.encode_thumbnail = m_bThumbnailNeeded;
+
+    // get color format
+    cam_format_t img_fmt = CAM_FORMAT_YUV_420_NV12;  //default value
+    main_stream->getFormat(img_fmt);
+    encode_parm.color_format = getColorfmtFromImgFmt(img_fmt);
+
+    // get jpeg quality
+    encode_parm.quality = jpeg_settings->jpeg_quality;
+    if (encode_parm.quality <= 0) {
+        encode_parm.quality = 85;
+    }
+
+    // get jpeg thumbnail quality
+    encode_parm.thumb_quality = jpeg_settings->jpeg_thumb_quality;
+
+    cam_frame_len_offset_t main_offset;
+    memset(&main_offset, 0, sizeof(cam_frame_len_offset_t));
+    main_stream->getFrameOffset(main_offset);
+
+    // src buf config
+    //Pass input main image buffer info to encoder.
+    QCamera3StreamMem *pStreamMem = main_stream->getStreamBufs();
+    if (pStreamMem == NULL) {
+        LOGE("cannot get stream bufs from main stream");
+        ret = BAD_VALUE;
+        goto on_error;
+    }
+    encode_parm.num_src_bufs = MIN(pStreamMem->getCnt(), MM_JPEG_MAX_BUF);
+    for (uint32_t i = 0; i < encode_parm.num_src_bufs; i++) {
+        if (pStreamMem != NULL) {
+            encode_parm.src_main_buf[i].index = i;
+            bufSize = pStreamMem->getSize(i);
+            if (BAD_INDEX == bufSize) {
+                LOGE("cannot retrieve buffer size for buffer %u", i);
+                ret = BAD_VALUE;
+                goto on_error;
+            }
+            encode_parm.src_main_buf[i].buf_size = (size_t)bufSize;
+            encode_parm.src_main_buf[i].buf_vaddr = (uint8_t *)pStreamMem->getPtr(i);
+            encode_parm.src_main_buf[i].fd = pStreamMem->getFd(i);
+            encode_parm.src_main_buf[i].format = MM_JPEG_FMT_YUV;
+            encode_parm.src_main_buf[i].offset = main_offset;
+        }
+    }
+
+    //Pass input thumbnail buffer info to encoder.
+    //Note: Use main buffer to encode thumbnail
+    if (m_bThumbnailNeeded == TRUE) {
+        pStreamMem = main_stream->getStreamBufs();
+        if (pStreamMem == NULL) {
+            LOGE("cannot get stream bufs from thumb stream");
+            ret = BAD_VALUE;
+            goto on_error;
+        }
+        cam_frame_len_offset_t thumb_offset;
+        memset(&thumb_offset, 0, sizeof(cam_frame_len_offset_t));
+        main_stream->getFrameOffset(thumb_offset);
+        encode_parm.num_tmb_bufs = MIN(pStreamMem->getCnt(), MM_JPEG_MAX_BUF);
+        for (uint32_t i = 0; i < encode_parm.num_tmb_bufs; i++) {
+            if (pStreamMem != NULL) {
+                encode_parm.src_thumb_buf[i].index = i;
+                bufSize = pStreamMem->getSize(i);
+                if (BAD_INDEX == bufSize) {
+                    LOGE("cannot retrieve buffer size for buffer %u", i);
+                    ret = BAD_VALUE;
+                    goto on_error;
+                }
+                encode_parm.src_thumb_buf[i].buf_size = (uint32_t)bufSize;
+                encode_parm.src_thumb_buf[i].buf_vaddr = (uint8_t *)pStreamMem->getPtr(i);
+                encode_parm.src_thumb_buf[i].fd = pStreamMem->getFd(i);
+                encode_parm.src_thumb_buf[i].format = MM_JPEG_FMT_YUV;
+                encode_parm.src_thumb_buf[i].offset = thumb_offset;
+            }
+        }
+    }
+
+    //Pass output jpeg buffer info to encoder.
+    //mJpegMem is allocated by framework.
+    bufSize = mOutputMem->getSize(jpeg_settings->out_buf_index);
+    if (BAD_INDEX == bufSize) {
+        LOGE("cannot retrieve buffer size for buffer %u",
+                jpeg_settings->out_buf_index);
+        ret = BAD_VALUE;
+        goto on_error;
+    }
+    encode_parm.num_dst_bufs = 1;
+    encode_parm.dest_buf[0].index = 0;
+    encode_parm.dest_buf[0].buf_size = (size_t)bufSize;
+    encode_parm.dest_buf[0].buf_vaddr = (uint8_t *)mOutputMem->getPtr(
+            jpeg_settings->out_buf_index);
+    encode_parm.dest_buf[0].fd = mOutputMem->getFd(
+            jpeg_settings->out_buf_index);
+    encode_parm.dest_buf[0].format = MM_JPEG_FMT_YUV;
+    encode_parm.dest_buf[0].offset = main_offset;
+
+    LOGD("X");
+    return NO_ERROR;
+
+on_error:
+    LOGD("X with error %d", ret);
+    return ret;
+}
+
+int32_t QCamera3PostProcessor::processData(mm_camera_super_buf_t *input) {
+    return processData(input, NULL, 0);
+}
+
+/*===========================================================================
+ * FUNCTION   : processData
+ *
+ * DESCRIPTION: enqueue data into dataProc thread
+ *
+ * PARAMETERS :
+ *   @frame   : process input frame
+ *   @output  : process output frame
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : depends on if offline reprocess is needed, received frame will
+ *              be sent to either input queue of postprocess or jpeg encoding
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::processData(mm_camera_super_buf_t *input,
+        buffer_handle_t *output, uint32_t frameNumber)
+{
+    LOGD("E");
+    pthread_mutex_lock(&mReprocJobLock);
+
+    // enqueue to post proc input queue
+    qcamera_hal3_pp_buffer_t *pp_buffer = (qcamera_hal3_pp_buffer_t *)malloc(
+            sizeof(qcamera_hal3_pp_buffer_t));
+    if (NULL == pp_buffer) {
+        LOGE("out of memory");
+        return NO_MEMORY;
+    }
+    memset(pp_buffer, 0, sizeof(*pp_buffer));
+    pp_buffer->input = input;
+    pp_buffer->output = output;
+    pp_buffer->frameNumber = frameNumber;
+    m_inputPPQ.enqueue((void *)pp_buffer);
+    if (!(m_inputMetaQ.isEmpty())) {
+        LOGD("meta queue is not empty, do next job");
+        m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+    } else
+        LOGD("metadata queue is empty");
+    pthread_mutex_unlock(&mReprocJobLock);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : needsReprocess
+ *
+ * DESCRIPTION: Determine if reprocess is needed.
+ *
+ * PARAMETERS :
+ *   @frame   : process frame
+ *
+ * RETURN     :
+ *  TRUE if frame needs to be reprocessed
+ *  FALSE is frame does not need to be reprocessed
+ *
+ *==========================================================================*/
+bool QCamera3PostProcessor::needsReprocess(qcamera_fwk_input_pp_data_t *frame)
+{
+    metadata_buffer_t* meta = (metadata_buffer_t *) frame->metadata_buffer.buffer;
+    bool edgeModeOn = FALSE;
+    bool noiseRedModeOn = FALSE;
+    bool reproNotDone = TRUE;
+
+    if (frame->reproc_config.reprocess_type == REPROCESS_TYPE_NONE) {
+        return FALSE;
+    }
+
+    // edge detection
+    IF_META_AVAILABLE(cam_edge_application_t, edgeMode,
+            CAM_INTF_META_EDGE_MODE, meta) {
+        edgeModeOn = (CAM_EDGE_MODE_OFF != edgeMode->edge_mode);
+    }
+
+    // noise reduction
+    IF_META_AVAILABLE(uint32_t, noiseRedMode,
+            CAM_INTF_META_NOISE_REDUCTION_MODE, meta) {
+        noiseRedModeOn = (CAM_NOISE_REDUCTION_MODE_OFF != *noiseRedMode);
+    }
+
+    IF_META_AVAILABLE(uint8_t, reprocess_flags,
+            CAM_INTF_META_REPROCESS_FLAGS, meta) {
+        reproNotDone = FALSE;
+    }
+
+    return (edgeModeOn || noiseRedModeOn || reproNotDone);
+}
+
+/*===========================================================================
+ * FUNCTION   : processData
+ *
+ * DESCRIPTION: enqueue data into dataProc thread
+ *
+ * PARAMETERS :
+ *   @frame   : process frame
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : depends on if offline reprocess is needed, received frame will
+ *              be sent to either input queue of postprocess or jpeg encoding
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::processData(qcamera_fwk_input_pp_data_t *frame)
+{
+    if (needsReprocess(frame)) {
+        ATRACE_INT("Camera:Reprocess", 1);
+        LOGH("scheduling framework reprocess");
+        pthread_mutex_lock(&mReprocJobLock);
+        // enqueu to post proc input queue
+        m_inputFWKPPQ.enqueue((void *)frame);
+        m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+        pthread_mutex_unlock(&mReprocJobLock);
+    } else {
+        jpeg_settings_t *jpeg_settings = (jpeg_settings_t *)m_jpegSettingsQ.dequeue();
+
+        if (jpeg_settings == NULL) {
+            LOGE("Cannot find jpeg settings");
+            return BAD_VALUE;
+        }
+
+        LOGH("no need offline reprocess, sending to jpeg encoding");
+        qcamera_hal3_jpeg_data_t *jpeg_job =
+            (qcamera_hal3_jpeg_data_t *)malloc(sizeof(qcamera_hal3_jpeg_data_t));
+        if (jpeg_job == NULL) {
+            LOGE("No memory for jpeg job");
+            return NO_MEMORY;
+        }
+
+        memset(jpeg_job, 0, sizeof(qcamera_hal3_jpeg_data_t));
+        jpeg_job->fwk_frame = frame;
+        jpeg_job->jpeg_settings = jpeg_settings;
+        jpeg_job->metadata =
+                (metadata_buffer_t *) frame->metadata_buffer.buffer;
+
+        // enqueu to jpeg input queue
+        m_inputJpegQ.enqueue((void *)jpeg_job);
+        m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : processPPMetadata
+ *
+ * DESCRIPTION: enqueue data into dataProc thread
+ *
+ * PARAMETERS :
+ *   @frame   : process metadata frame received from pic channel
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::processPPMetadata(mm_camera_super_buf_t *reproc_meta)
+{
+    LOGD("E");
+    pthread_mutex_lock(&mReprocJobLock);
+    // enqueue to metadata input queue
+    m_inputMetaQ.enqueue((void *)reproc_meta);
+    if (!(m_inputPPQ.isEmpty())) {
+       LOGD("pp queue is not empty, do next job");
+       m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+    } else {
+       LOGD("pp queue is empty, not calling do next job");
+    }
+    pthread_mutex_unlock(&mReprocJobLock);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : processJpegSettingData
+ *
+ * DESCRIPTION: enqueue jpegSetting into dataProc thread
+ *
+ * PARAMETERS :
+ *   @jpeg_settings : jpeg settings data received from pic channel
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::processJpegSettingData(
+        jpeg_settings_t *jpeg_settings)
+{
+    if (!jpeg_settings) {
+        LOGE("invalid jpeg settings pointer");
+        return -EINVAL;
+    }
+    return m_jpegSettingsQ.enqueue((void *)jpeg_settings);
+}
+
+/*===========================================================================
+ * FUNCTION   : processPPData
+ *
+ * DESCRIPTION: process received frame after reprocess.
+ *
+ * PARAMETERS :
+ *   @frame   : received frame from reprocess channel.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : The frame after reprocess need to send to jpeg encoding.
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::processPPData(mm_camera_super_buf_t *frame)
+{
+    qcamera_hal3_pp_data_t *job = (qcamera_hal3_pp_data_t *)m_ongoingPPQ.dequeue();
+    ATRACE_INT("Camera:Reprocess", 0);
+    if (job == NULL || ((NULL == job->src_frame) && (NULL == job->fwk_src_frame))) {
+        LOGE("Cannot find reprocess job");
+        return BAD_VALUE;
+    }
+    if (job->jpeg_settings == NULL) {
+        LOGE("Cannot find jpeg settings");
+        return BAD_VALUE;
+    }
+
+    qcamera_hal3_jpeg_data_t *jpeg_job =
+        (qcamera_hal3_jpeg_data_t *)malloc(sizeof(qcamera_hal3_jpeg_data_t));
+    if (jpeg_job == NULL) {
+        LOGE("No memory for jpeg job");
+        return NO_MEMORY;
+    }
+
+    memset(jpeg_job, 0, sizeof(qcamera_hal3_jpeg_data_t));
+    jpeg_job->src_frame = frame;
+    if(frame != job->src_frame)
+        jpeg_job->src_reproc_frame = job->src_frame;
+    if (NULL == job->fwk_src_frame) {
+        jpeg_job->metadata = job->metadata;
+    } else {
+        jpeg_job->metadata =
+                (metadata_buffer_t *) job->fwk_src_frame->metadata_buffer.buffer;
+        jpeg_job->fwk_src_buffer = job->fwk_src_frame;
+    }
+    jpeg_job->src_metadata = job->src_metadata;
+    jpeg_job->jpeg_settings = job->jpeg_settings;
+
+    // free pp job buf
+    free(job);
+
+    // enqueu reprocessed frame to jpeg input queue
+    m_inputJpegQ.enqueue((void *)jpeg_job);
+
+    // wait up data proc thread
+    m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : dequeuePPJob
+ *
+ * DESCRIPTION: find a postprocessing job from ongoing pp queue by frame number
+ *
+ * PARAMETERS :
+ *   @frameNumber : frame number for the pp job
+ *
+ * RETURN     : ptr to a pp job struct. NULL if not found.
+ *==========================================================================*/
+qcamera_hal3_pp_data_t *QCamera3PostProcessor::dequeuePPJob(uint32_t frameNumber) {
+    qcamera_hal3_pp_data_t *pp_job = NULL;
+    pp_job = (qcamera_hal3_pp_data_t *)m_ongoingPPQ.dequeue();
+
+    if (pp_job == NULL) {
+        LOGE("Fatal: ongoing PP queue is empty");
+        return NULL;
+    }
+    if (pp_job->fwk_src_frame &&
+            (pp_job->fwk_src_frame->frameNumber != frameNumber)) {
+        LOGE("head of pp queue doesn't match requested frame number");
+    }
+    return pp_job;
+}
+
+/*===========================================================================
+ * FUNCTION   : findJpegJobByJobId
+ *
+ * DESCRIPTION: find a jpeg job from ongoing Jpeg queue by its job ID
+ *
+ * PARAMETERS :
+ *   @jobId   : job Id of the job
+ *
+ * RETURN     : ptr to a jpeg job struct. NULL if not found.
+ *
+ * NOTE       : Currently only one job is sending to mm-jpeg-interface for jpeg
+ *              encoding. Therefore simply dequeue from the ongoing Jpeg Queue
+ *              will serve the purpose to find the jpeg job.
+ *==========================================================================*/
+qcamera_hal3_jpeg_data_t *QCamera3PostProcessor::findJpegJobByJobId(uint32_t jobId)
+{
+    qcamera_hal3_jpeg_data_t * job = NULL;
+    if (jobId == 0) {
+        LOGE("not a valid jpeg jobId");
+        return NULL;
+    }
+
+    // currely only one jpeg job ongoing, so simply dequeue the head
+    job = (qcamera_hal3_jpeg_data_t *)m_ongoingJpegQ.dequeue();
+    return job;
+}
+
+/*===========================================================================
+ * FUNCTION   : releasePPInputData
+ *
+ * DESCRIPTION: callback function to release post process input data node
+ *
+ * PARAMETERS :
+ *   @data      : ptr to post process input data
+ *   @user_data : user data ptr (QCamera3Reprocessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera3PostProcessor::releasePPInputData(void *data, void *user_data)
+{
+    QCamera3PostProcessor *pme = (QCamera3PostProcessor *)user_data;
+    if (NULL != pme) {
+        qcamera_hal3_pp_buffer_t *buf = (qcamera_hal3_pp_buffer_t *)data;
+        if (NULL != buf) {
+            if (buf->input) {
+                pme->releaseSuperBuf(buf->input);
+                free(buf->input);
+                buf->input = NULL;
+            }
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseMetaData
+ *
+ * DESCRIPTION: callback function to release metadata camera buffer
+ *
+ * PARAMETERS :
+ *   @data      : ptr to post process input data
+ *   @user_data : user data ptr (QCamera3Reprocessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera3PostProcessor::releaseMetadata(void *data, void *user_data)
+{
+    QCamera3PostProcessor *pme = (QCamera3PostProcessor *)user_data;
+    if (NULL != pme) {
+        pme->m_parent->metadataBufDone((mm_camera_super_buf_t *)data);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseJpegData
+ *
+ * DESCRIPTION: callback function to release jpeg job node
+ *
+ * PARAMETERS :
+ *   @data      : ptr to ongoing jpeg job data
+ *   @user_data : user data ptr (QCamera3Reprocessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera3PostProcessor::releaseJpegData(void *data, void *user_data)
+{
+    QCamera3PostProcessor *pme = (QCamera3PostProcessor *)user_data;
+    if (NULL != pme) {
+        pme->releaseJpegJobData((qcamera_hal3_jpeg_data_t *)data);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseOngoingPPData
+ *
+ * DESCRIPTION: callback function to release ongoing postprocess job node
+ *
+ * PARAMETERS :
+ *   @data      : ptr to onging postprocess job
+ *   @user_data : user data ptr (QCamera3Reprocessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera3PostProcessor::releaseOngoingPPData(void *data, void *user_data)
+{
+    QCamera3PostProcessor *pme = (QCamera3PostProcessor *)user_data;
+    if (NULL != pme) {
+        qcamera_hal3_pp_data_t *pp_data = (qcamera_hal3_pp_data_t *)data;
+
+        if (pp_data && pp_data->src_frame)
+          pme->releaseSuperBuf(pp_data->src_frame);
+
+        pme->releasePPJobData(pp_data);
+
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseSuperBuf
+ *
+ * DESCRIPTION: function to release a superbuf frame by returning back to kernel
+ *
+ * PARAMETERS :
+ *   @super_buf : ptr to the superbuf frame
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera3PostProcessor::releaseSuperBuf(mm_camera_super_buf_t *super_buf)
+{
+    if (NULL != super_buf) {
+        if (m_parent != NULL) {
+            m_parent->bufDone(super_buf);
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseOfflineBuffers
+ *
+ * DESCRIPTION: function to release/unmap offline buffers if any
+ *
+ * PARAMETERS :
+ * @allBuffers : flag that asks to release all buffers or only one
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::releaseOfflineBuffers(bool allBuffers)
+{
+    int32_t rc = NO_ERROR;
+
+    if(NULL != m_pReprocChannel) {
+        rc = m_pReprocChannel->unmapOfflineBuffers(allBuffers);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseJpegJobData
+ *
+ * DESCRIPTION: function to release internal resources in jpeg job struct
+ *
+ * PARAMETERS :
+ *   @job     : ptr to jpeg job struct
+ *
+ * RETURN     : None
+ *
+ * NOTE       : original source frame need to be queued back to kernel for
+ *              future use. Output buf of jpeg job need to be released since
+ *              it's allocated for each job. Exif object need to be deleted.
+ *==========================================================================*/
+void QCamera3PostProcessor::releaseJpegJobData(qcamera_hal3_jpeg_data_t *job)
+{
+    ATRACE_CALL();
+    int32_t rc = NO_ERROR;
+    LOGD("E");
+    if (NULL != job) {
+        if (NULL != job->src_reproc_frame) {
+            free(job->src_reproc_frame);
+            job->src_reproc_frame = NULL;
+        }
+
+        if (NULL != job->src_frame) {
+            if (NULL != m_pReprocChannel) {
+                rc = m_pReprocChannel->bufDone(job->src_frame);
+                if (NO_ERROR != rc)
+                    LOGE("bufDone error: %d", rc);
+            }
+            free(job->src_frame);
+            job->src_frame = NULL;
+        }
+
+        if (NULL != job->fwk_src_buffer) {
+            free(job->fwk_src_buffer);
+            job->fwk_src_buffer = NULL;
+        } else if (NULL != job->src_metadata) {
+            m_parent->metadataBufDone(job->src_metadata);
+            free(job->src_metadata);
+            job->src_metadata = NULL;
+        }
+
+        if (NULL != job->fwk_frame) {
+            free(job->fwk_frame);
+            job->fwk_frame = NULL;
+        }
+
+        if (NULL != job->pJpegExifObj) {
+            delete job->pJpegExifObj;
+            job->pJpegExifObj = NULL;
+        }
+
+        if (NULL != job->jpeg_settings) {
+            free(job->jpeg_settings);
+            job->jpeg_settings = NULL;
+        }
+    }
+    /* Additional trigger to process any pending jobs in the input queue */
+    m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+    LOGD("X");
+}
+
+/*===========================================================================
+ * FUNCTION   : releasePPJobData
+ *
+ * DESCRIPTION: function to release internal resources in p pjob struct
+ *
+ * PARAMETERS :
+ *   @job     : ptr to pp job struct
+ *
+ * RETURN     : None
+ *
+ * NOTE       : Original source metadata buffer needs to be released and
+ *              queued back to kernel for future use. src_frame, src_metadata,
+ *              and fwk_src_frame structures need to be freed.
+ *==========================================================================*/
+void QCamera3PostProcessor::releasePPJobData(qcamera_hal3_pp_data_t *pp_job)
+{
+    ATRACE_CALL();
+    LOGD("E");
+    if (NULL != pp_job) {
+        if (NULL != pp_job->src_frame) {
+            free(pp_job->src_frame);
+            if (NULL != pp_job->src_metadata) {
+                m_parent->metadataBufDone(pp_job->src_metadata);
+                free(pp_job->src_metadata);
+            }
+            pp_job->src_frame = NULL;
+            pp_job->metadata = NULL;
+        }
+
+        if (NULL != pp_job->fwk_src_frame) {
+            free(pp_job->fwk_src_frame);
+            pp_job->fwk_src_frame = NULL;
+        }
+    }
+
+    /* Additional trigger to process any pending jobs in the input queue */
+    m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+    LOGD("X");
+}
+
+/*===========================================================================
+ * FUNCTION   : getColorfmtFromImgFmt
+ *
+ * DESCRIPTION: function to return jpeg color format based on its image format
+ *
+ * PARAMETERS :
+ *   @img_fmt : image format
+ *
+ * RETURN     : jpeg color format that can be understandable by omx lib
+ *==========================================================================*/
+mm_jpeg_color_format QCamera3PostProcessor::getColorfmtFromImgFmt(cam_format_t img_fmt)
+{
+    switch (img_fmt) {
+    case CAM_FORMAT_YUV_420_NV21:
+    case CAM_FORMAT_YUV_420_NV21_VENUS:
+        return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+    case CAM_FORMAT_YUV_420_NV21_ADRENO:
+        return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+    case CAM_FORMAT_YUV_420_NV12:
+    case CAM_FORMAT_YUV_420_NV12_VENUS:
+        return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2;
+    case CAM_FORMAT_YUV_420_YV12:
+        return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2;
+    case CAM_FORMAT_YUV_422_NV61:
+        return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1;
+    case CAM_FORMAT_YUV_422_NV16:
+        return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1;
+    default:
+        return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegImgTypeFromImgFmt
+ *
+ * DESCRIPTION: function to return jpeg encode image type based on its image format
+ *
+ * PARAMETERS :
+ *   @img_fmt : image format
+ *
+ * RETURN     : return jpeg source image format (YUV or Bitstream)
+ *==========================================================================*/
+mm_jpeg_format_t QCamera3PostProcessor::getJpegImgTypeFromImgFmt(cam_format_t img_fmt)
+{
+    switch (img_fmt) {
+    case CAM_FORMAT_YUV_420_NV21:
+    case CAM_FORMAT_YUV_420_NV21_ADRENO:
+    case CAM_FORMAT_YUV_420_NV12:
+    case CAM_FORMAT_YUV_420_NV12_VENUS:
+    case CAM_FORMAT_YUV_420_NV21_VENUS:
+    case CAM_FORMAT_YUV_420_YV12:
+    case CAM_FORMAT_YUV_422_NV61:
+    case CAM_FORMAT_YUV_422_NV16:
+        return MM_JPEG_FMT_YUV;
+    default:
+        return MM_JPEG_FMT_YUV;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : encodeFWKData
+ *
+ * DESCRIPTION: function to prepare encoding job information and send to
+ *              mm-jpeg-interface to do the encoding job
+ *
+ * PARAMETERS :
+ *   @jpeg_job_data : ptr to a struct saving job related information
+ *   @needNewSess   : flag to indicate if a new jpeg encoding session need
+ *                    to be created. After creation, this flag will be toggled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::encodeFWKData(qcamera_hal3_jpeg_data_t *jpeg_job_data,
+        uint8_t &needNewSess)
+{
+    LOGD("E");
+    int32_t ret = NO_ERROR;
+    mm_jpeg_job_t jpg_job;
+    uint32_t jobId = 0;
+    qcamera_fwk_input_pp_data_t *recvd_frame = NULL;
+    metadata_buffer_t *metadata = NULL;
+    jpeg_settings_t *jpeg_settings = NULL;
+    QCamera3HardwareInterface* hal_obj = NULL;
+    mm_jpeg_debug_exif_params_t *exif_debug_params = NULL;
+    bool needJpegExifRotation = false;
+
+    if (NULL == jpeg_job_data) {
+        LOGE("Invalid jpeg job");
+        return BAD_VALUE;
+    }
+
+    recvd_frame = jpeg_job_data->fwk_frame;
+    if (NULL == recvd_frame) {
+        LOGE("Invalid input buffer");
+        return BAD_VALUE;
+    }
+
+    metadata = jpeg_job_data->metadata;
+    if (NULL == metadata) {
+        LOGE("Invalid metadata buffer");
+        return BAD_VALUE;
+    }
+
+    jpeg_settings = jpeg_job_data->jpeg_settings;
+    if (NULL == jpeg_settings) {
+        LOGE("Invalid jpeg settings buffer");
+        return BAD_VALUE;
+    }
+
+    if ((NULL != jpeg_job_data->src_frame) && (NULL != jpeg_job_data->src_frame)) {
+        LOGE("Unsupported case both framework and camera source buffers are invalid!");
+        return BAD_VALUE;
+    }
+
+    hal_obj = (QCamera3HardwareInterface*)m_parent->mUserData;
+
+    if (mJpegClientHandle <= 0) {
+        LOGE("Error: bug here, mJpegClientHandle is 0");
+        return UNKNOWN_ERROR;
+    }
+
+    cam_dimension_t src_dim;
+    memset(&src_dim, 0, sizeof(cam_dimension_t));
+    src_dim.width = recvd_frame->reproc_config.input_stream_dim.width;
+    src_dim.height = recvd_frame->reproc_config.input_stream_dim.height;
+
+    cam_dimension_t dst_dim;
+    memset(&dst_dim, 0, sizeof(cam_dimension_t));
+    dst_dim.width = recvd_frame->reproc_config.output_stream_dim.width;
+    dst_dim.height = recvd_frame->reproc_config.output_stream_dim.height;
+
+    cam_rect_t crop;
+    memset(&crop, 0, sizeof(cam_rect_t));
+    //TBD_later - Zoom event removed in stream
+    //main_stream->getCropInfo(crop);
+
+    // Set JPEG encode crop in reprocess frame metadata
+    // If this JPEG crop info exist, encoder should
+    // crop and scale (if roi width and height is not 0)
+    IF_META_AVAILABLE(cam_stream_crop_info_t, jpeg_crop,
+            CAM_INTF_PARM_JPEG_ENCODE_CROP, metadata) {
+        memcpy(&crop, &(jpeg_crop->crop), sizeof(cam_rect_t));
+        // change the JPEG dst_dim if roi_map width and height is not 0
+        if (jpeg_crop->roi_map.width != 0 &&
+                jpeg_crop->roi_map.height != 0) {
+            dst_dim.width = jpeg_crop->roi_map.width;
+            dst_dim.height = jpeg_crop->roi_map.height;
+        }
+    }
+
+    needJpegExifRotation = (hal_obj->needJpegExifRotation() || !needsReprocess(recvd_frame));
+
+    LOGH("Need new session?:%d", needNewSess);
+    if (needNewSess) {
+        //creating a new session, so we must destroy the old one
+        if ( 0 < mJpegSessionId ) {
+            ret = mJpegHandle.destroy_session(mJpegSessionId);
+            if (ret != NO_ERROR) {
+                LOGE("Error destroying an old jpeg encoding session, id = %d",
+                       mJpegSessionId);
+                return ret;
+            }
+            mJpegSessionId = 0;
+        }
+        // create jpeg encoding session
+        mm_jpeg_encode_params_t encodeParam;
+        memset(&encodeParam, 0, sizeof(mm_jpeg_encode_params_t));
+        getFWKJpegEncodeConfig(encodeParam, recvd_frame, jpeg_settings);
+        LOGH("#src bufs:%d # tmb bufs:%d #dst_bufs:%d",
+                     encodeParam.num_src_bufs,encodeParam.num_tmb_bufs,encodeParam.num_dst_bufs);
+        if (!needJpegExifRotation &&
+            (jpeg_settings->jpeg_orientation == 90 ||
+            jpeg_settings->jpeg_orientation == 270)) {
+            // swap src width and height, stride and scanline due to rotation
+            encodeParam.main_dim.src_dim.width = src_dim.height;
+            encodeParam.main_dim.src_dim.height = src_dim.width;
+            encodeParam.thumb_dim.src_dim.width = src_dim.height;
+            encodeParam.thumb_dim.src_dim.height = src_dim.width;
+
+            int32_t temp = encodeParam.src_main_buf[0].offset.mp[0].stride;
+            encodeParam.src_main_buf[0].offset.mp[0].stride =
+                encodeParam.src_main_buf[0].offset.mp[0].scanline;
+            encodeParam.src_main_buf[0].offset.mp[0].scanline = temp;
+
+            temp = encodeParam.src_thumb_buf[0].offset.mp[0].stride;
+            encodeParam.src_thumb_buf[0].offset.mp[0].stride =
+                encodeParam.src_thumb_buf[0].offset.mp[0].scanline;
+            encodeParam.src_thumb_buf[0].offset.mp[0].scanline = temp;
+        } else {
+            encodeParam.main_dim.src_dim = src_dim;
+            encodeParam.thumb_dim.src_dim = src_dim;
+        }
+        encodeParam.main_dim.dst_dim = dst_dim;
+        encodeParam.thumb_dim.dst_dim = jpeg_settings->thumbnail_size;
+
+        if (needJpegExifRotation) {
+            encodeParam.thumb_rotation = (uint32_t)jpeg_settings->jpeg_orientation;
+        }
+
+        LOGI("Src Buffer cnt = %d, res = %dX%d len = %d rot = %d "
+            "src_dim = %dX%d dst_dim = %dX%d",
+            encodeParam.num_src_bufs,
+            encodeParam.src_main_buf[0].offset.mp[0].stride,
+            encodeParam.src_main_buf[0].offset.mp[0].scanline,
+            encodeParam.src_main_buf[0].offset.frame_len,
+            encodeParam.rotation,
+            src_dim.width, src_dim.height,
+            dst_dim.width, dst_dim.height);
+        LOGI("Src THUMB buf_cnt = %d, res = %dX%d len = %d rot = %d "
+            "src_dim = %dX%d, dst_dim = %dX%d",
+            encodeParam.num_tmb_bufs,
+            encodeParam.src_thumb_buf[0].offset.mp[0].stride,
+            encodeParam.src_thumb_buf[0].offset.mp[0].scanline,
+            encodeParam.src_thumb_buf[0].offset.frame_len,
+            encodeParam.thumb_rotation,
+            encodeParam.thumb_dim.src_dim.width,
+            encodeParam.thumb_dim.src_dim.height,
+            encodeParam.thumb_dim.dst_dim.width,
+            encodeParam.thumb_dim.dst_dim.height);
+
+        LOGH("#src bufs:%d # tmb bufs:%d #dst_bufs:%d",
+                     encodeParam.num_src_bufs,encodeParam.num_tmb_bufs,encodeParam.num_dst_bufs);
+
+        ret = mJpegHandle.create_session(mJpegClientHandle, &encodeParam, &mJpegSessionId);
+        if (ret != NO_ERROR) {
+            LOGE("Error creating a new jpeg encoding session, ret = %d", ret);
+            return ret;
+        }
+        needNewSess = FALSE;
+    }
+
+    // Fill in new job
+    memset(&jpg_job, 0, sizeof(mm_jpeg_job_t));
+    jpg_job.job_type = JPEG_JOB_TYPE_ENCODE;
+    jpg_job.encode_job.session_id = mJpegSessionId;
+    jpg_job.encode_job.src_index = 0;
+    jpg_job.encode_job.dst_index = 0;
+
+    // Set main dim job parameters and handle rotation
+    if (!needJpegExifRotation && (jpeg_settings->jpeg_orientation == 90 ||
+            jpeg_settings->jpeg_orientation == 270)) {
+
+        jpg_job.encode_job.main_dim.src_dim.width = src_dim.height;
+        jpg_job.encode_job.main_dim.src_dim.height = src_dim.width;
+
+        jpg_job.encode_job.main_dim.dst_dim.width = dst_dim.height;
+        jpg_job.encode_job.main_dim.dst_dim.height = dst_dim.width;
+
+        jpg_job.encode_job.main_dim.crop.width = crop.height;
+        jpg_job.encode_job.main_dim.crop.height = crop.width;
+        jpg_job.encode_job.main_dim.crop.left = crop.top;
+        jpg_job.encode_job.main_dim.crop.top = crop.left;
+    } else {
+        jpg_job.encode_job.main_dim.src_dim = src_dim;
+        jpg_job.encode_job.main_dim.dst_dim = dst_dim;
+        jpg_job.encode_job.main_dim.crop = crop;
+    }
+
+    // get 3a sw version info
+    cam_q3a_version_t sw_version;
+    memset(&sw_version, 0, sizeof(sw_version));
+    if (hal_obj)
+        hal_obj->get3AVersion(sw_version);
+
+    // get exif data
+    QCamera3Exif *pJpegExifObj = getExifData(metadata, jpeg_settings, needJpegExifRotation);
+    jpeg_job_data->pJpegExifObj = pJpegExifObj;
+    if (pJpegExifObj != NULL) {
+        jpg_job.encode_job.exif_info.exif_data = pJpegExifObj->getEntries();
+        jpg_job.encode_job.exif_info.numOfEntries =
+            pJpegExifObj->getNumOfEntries();
+        jpg_job.encode_job.exif_info.debug_data.sw_3a_version[0] =
+            sw_version.major_version;
+        jpg_job.encode_job.exif_info.debug_data.sw_3a_version[1] =
+            sw_version.minor_version;
+        jpg_job.encode_job.exif_info.debug_data.sw_3a_version[2] =
+            sw_version.patch_version;
+        jpg_job.encode_job.exif_info.debug_data.sw_3a_version[3] =
+            sw_version.new_feature_des;
+    }
+
+    // thumbnail dim
+    LOGH("Thumbnail needed:%d", m_bThumbnailNeeded);
+    if (m_bThumbnailNeeded == TRUE) {
+        jpg_job.encode_job.thumb_dim.dst_dim =
+                jpeg_settings->thumbnail_size;
+
+        if (!needJpegExifRotation && (jpeg_settings->jpeg_orientation == 90 ||
+                jpeg_settings->jpeg_orientation == 270)) {
+            //swap the thumbnail destination width and height if it has
+            //already been rotated
+            int temp = jpg_job.encode_job.thumb_dim.dst_dim.width;
+            jpg_job.encode_job.thumb_dim.dst_dim.width =
+                    jpg_job.encode_job.thumb_dim.dst_dim.height;
+            jpg_job.encode_job.thumb_dim.dst_dim.height = temp;
+
+            jpg_job.encode_job.thumb_dim.src_dim.width = src_dim.height;
+            jpg_job.encode_job.thumb_dim.src_dim.height = src_dim.width;
+
+            jpg_job.encode_job.thumb_dim.crop.width = crop.height;
+            jpg_job.encode_job.thumb_dim.crop.height = crop.width;
+            jpg_job.encode_job.thumb_dim.crop.left = crop.top;
+            jpg_job.encode_job.thumb_dim.crop.top = crop.left;
+        } else {
+        jpg_job.encode_job.thumb_dim.src_dim = src_dim;
+        jpg_job.encode_job.thumb_dim.crop = crop;
+        }
+        jpg_job.encode_job.thumb_index = 0;
+    }
+
+    jpg_job.encode_job.cam_exif_params = hal_obj->get3AExifParams();
+    exif_debug_params = jpg_job.encode_job.cam_exif_params.debug_params;
+    // Fill in exif debug data
+    // Allocate for a local copy of debug parameters
+    jpg_job.encode_job.cam_exif_params.debug_params =
+            (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
+    if (!jpg_job.encode_job.cam_exif_params.debug_params) {
+        LOGE("Out of Memory. Allocation failed for 3A debug exif params");
+        return NO_MEMORY;
+    }
+
+    jpg_job.encode_job.mobicat_mask = hal_obj->getMobicatMask();
+
+    if (metadata != NULL) {
+        // Fill in the metadata passed as parameter
+        jpg_job.encode_job.p_metadata = metadata;
+
+       jpg_job.encode_job.p_metadata->is_mobicat_aec_params_valid =
+                jpg_job.encode_job.cam_exif_params.cam_3a_params_valid;
+
+       if (jpg_job.encode_job.cam_exif_params.cam_3a_params_valid) {
+            jpg_job.encode_job.p_metadata->mobicat_aec_params =
+                jpg_job.encode_job.cam_exif_params.cam_3a_params;
+       }
+
+        if (exif_debug_params) {
+            // Copy debug parameters locally.
+           memcpy(jpg_job.encode_job.cam_exif_params.debug_params,
+                   exif_debug_params, (sizeof(mm_jpeg_debug_exif_params_t)));
+           /* Save a copy of 3A debug params */
+            jpg_job.encode_job.p_metadata->is_statsdebug_ae_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->ae_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_awb_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->awb_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_af_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->af_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_asd_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->asd_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_stats_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_bestats_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->bestats_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_bhist_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->bhist_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_3a_tuning_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->q3a_tuning_debug_params_valid;
+
+            if (jpg_job.encode_job.cam_exif_params.debug_params->ae_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_ae_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->ae_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->awb_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_awb_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->awb_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->af_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_af_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->af_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->asd_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_asd_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->asd_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_stats_buffer_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->bestats_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_bestats_buffer_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->bestats_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->bhist_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_bhist_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->bhist_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->q3a_tuning_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_3a_tuning_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->q3a_tuning_debug_params;
+            }
+        }
+    } else {
+       LOGW("Metadata is null");
+    }
+
+    // Multi image info
+    if (hal_obj->isDeviceLinked() == TRUE) {
+        jpg_job.encode_job.multi_image_info.type = MM_JPEG_TYPE_JPEG;
+        jpg_job.encode_job.multi_image_info.num_of_images = 1;
+        jpg_job.encode_job.multi_image_info.enable_metadata = 1;
+        if (hal_obj->isMainCamera() == TRUE) {
+            jpg_job.encode_job.multi_image_info.is_primary = 1;
+        } else {
+            jpg_job.encode_job.multi_image_info.is_primary = 0;
+        }
+    }
+
+    jpg_job.encode_job.hal_version = CAM_HAL_V3;
+
+    //Start jpeg encoding
+    ret = mJpegHandle.start_job(&jpg_job, &jobId);
+    if (jpg_job.encode_job.cam_exif_params.debug_params) {
+        free(jpg_job.encode_job.cam_exif_params.debug_params);
+    }
+    if (ret == NO_ERROR) {
+        // remember job info
+        jpeg_job_data->jobId = jobId;
+    }
+
+    LOGD("X");
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : encodeData
+ *
+ * DESCRIPTION: function to prepare encoding job information and send to
+ *              mm-jpeg-interface to do the encoding job
+ *
+ * PARAMETERS :
+ *   @jpeg_job_data : ptr to a struct saving job related information
+ *   @needNewSess   : flag to indicate if a new jpeg encoding session need
+ *                    to be created. After creation, this flag will be toggled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::encodeData(qcamera_hal3_jpeg_data_t *jpeg_job_data,
+                          uint8_t &needNewSess)
+{
+    ATRACE_CALL();
+    LOGD("E");
+    int32_t ret = NO_ERROR;
+    mm_jpeg_job_t jpg_job;
+    uint32_t jobId = 0;
+    QCamera3Stream *main_stream = NULL;
+    mm_camera_buf_def_t *main_frame = NULL;
+    QCamera3Channel *srcChannel = NULL;
+    mm_camera_super_buf_t *recvd_frame = NULL;
+    metadata_buffer_t *metadata = NULL;
+    jpeg_settings_t *jpeg_settings = NULL;
+    QCamera3HardwareInterface* hal_obj = NULL;
+    mm_jpeg_debug_exif_params_t *exif_debug_params = NULL;
+    if (m_parent != NULL) {
+       hal_obj = (QCamera3HardwareInterface*)m_parent->mUserData;
+    } else {
+       LOGE("m_parent is NULL, Error");
+       return BAD_VALUE;
+    }
+    bool needJpegExifRotation = false;
+
+    recvd_frame = jpeg_job_data->src_frame;
+    metadata = jpeg_job_data->metadata;
+    jpeg_settings = jpeg_job_data->jpeg_settings;
+
+    LOGD("encoding bufIndex: %u",
+        jpeg_job_data->src_frame->bufs[0]->buf_idx);
+
+    QCamera3Channel *pChannel = NULL;
+    // first check picture channel
+    if (m_parent->getMyHandle() == recvd_frame->ch_id) {
+        pChannel = m_parent;
+    }
+    // check reprocess channel if not found
+    if (pChannel == NULL) {
+        if (m_pReprocChannel != NULL &&
+            m_pReprocChannel->getMyHandle() == recvd_frame->ch_id) {
+            pChannel = m_pReprocChannel;
+        }
+    }
+
+    srcChannel = pChannel;
+
+    if (srcChannel == NULL) {
+        LOGE("No corresponding channel (ch_id = %d) exist, return here",
+               recvd_frame->ch_id);
+        return BAD_VALUE;
+    }
+
+    // find snapshot frame and thumnail frame
+    //Note: In this version we will receive only snapshot frame.
+    for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) {
+        QCamera3Stream *srcStream =
+            srcChannel->getStreamByHandle(recvd_frame->bufs[i]->stream_id);
+        if (srcStream != NULL) {
+            switch (srcStream->getMyType()) {
+            case CAM_STREAM_TYPE_SNAPSHOT:
+            case CAM_STREAM_TYPE_OFFLINE_PROC:
+                main_stream = srcStream;
+                main_frame = recvd_frame->bufs[i];
+                break;
+            default:
+                break;
+            }
+        }
+    }
+
+    if(NULL == main_frame){
+       LOGE("Main frame is NULL");
+       return BAD_VALUE;
+    }
+
+    QCamera3StreamMem *memObj = (QCamera3StreamMem *)main_frame->mem_info;
+    if (NULL == memObj) {
+        LOGE("Memeory Obj of main frame is NULL");
+        return NO_MEMORY;
+    }
+
+    // clean and invalidate cache ops through mem obj of the frame
+    memObj->cleanInvalidateCache(main_frame->buf_idx);
+
+    if (mJpegClientHandle <= 0) {
+        LOGE("Error: bug here, mJpegClientHandle is 0");
+        return UNKNOWN_ERROR;
+    }
+    cam_dimension_t src_dim;
+    memset(&src_dim, 0, sizeof(cam_dimension_t));
+    main_stream->getFrameDimension(src_dim);
+
+    cam_dimension_t dst_dim;
+    memset(&dst_dim, 0, sizeof(cam_dimension_t));
+    if (NO_ERROR != m_parent->getStreamSize(dst_dim)) {
+        LOGE("Failed to get size of the JPEG stream");
+        return UNKNOWN_ERROR;
+    }
+
+    needJpegExifRotation = hal_obj->needJpegExifRotation();
+    IF_META_AVAILABLE(cam_rotation_info_t, rotation_info, CAM_INTF_PARM_ROTATION, metadata) {
+        if (jpeg_settings->jpeg_orientation != 0 && rotation_info->rotation == ROTATE_0) {
+            needJpegExifRotation = TRUE;
+            LOGH("Need EXIF JPEG ROTATION");
+        }
+    }
+    LOGH("Need new session?:%d", needNewSess);
+    if (needNewSess) {
+        //creating a new session, so we must destroy the old one
+        if ( 0 < mJpegSessionId ) {
+            ret = mJpegHandle.destroy_session(mJpegSessionId);
+            if (ret != NO_ERROR) {
+                LOGE("Error destroying an old jpeg encoding session, id = %d",
+                       mJpegSessionId);
+                return ret;
+            }
+            mJpegSessionId = 0;
+        }
+        // create jpeg encoding session
+        mm_jpeg_encode_params_t encodeParam;
+        memset(&encodeParam, 0, sizeof(mm_jpeg_encode_params_t));
+        getJpegEncodeConfig(encodeParam, main_stream, jpeg_settings);
+        LOGH("#src bufs:%d # tmb bufs:%d #dst_bufs:%d",
+                     encodeParam.num_src_bufs,encodeParam.num_tmb_bufs,encodeParam.num_dst_bufs);
+        if (!needJpegExifRotation &&
+            (jpeg_settings->jpeg_orientation == 90 ||
+            jpeg_settings->jpeg_orientation == 270)) {
+           //swap src width and height, stride and scanline due to rotation
+           encodeParam.main_dim.src_dim.width = src_dim.height;
+           encodeParam.main_dim.src_dim.height = src_dim.width;
+           encodeParam.thumb_dim.src_dim.width = src_dim.height;
+           encodeParam.thumb_dim.src_dim.height = src_dim.width;
+
+           int32_t temp = encodeParam.src_main_buf[0].offset.mp[0].stride;
+           encodeParam.src_main_buf[0].offset.mp[0].stride =
+              encodeParam.src_main_buf[0].offset.mp[0].scanline;
+           encodeParam.src_main_buf[0].offset.mp[0].scanline = temp;
+
+           temp = encodeParam.src_thumb_buf[0].offset.mp[0].stride;
+           encodeParam.src_thumb_buf[0].offset.mp[0].stride =
+              encodeParam.src_thumb_buf[0].offset.mp[0].scanline;
+           encodeParam.src_thumb_buf[0].offset.mp[0].scanline = temp;
+        } else {
+           encodeParam.main_dim.src_dim  = src_dim;
+           encodeParam.thumb_dim.src_dim = src_dim;
+        }
+        encodeParam.main_dim.dst_dim = dst_dim;
+        encodeParam.thumb_dim.dst_dim = jpeg_settings->thumbnail_size;
+
+        if (needJpegExifRotation) {
+            encodeParam.thumb_rotation = (uint32_t)jpeg_settings->jpeg_orientation;
+        }
+
+        LOGI("Src Buffer cnt = %d, res = %dX%d len = %d rot = %d "
+            "src_dim = %dX%d dst_dim = %dX%d",
+            encodeParam.num_src_bufs,
+            encodeParam.src_main_buf[0].offset.mp[0].stride,
+            encodeParam.src_main_buf[0].offset.mp[0].scanline,
+            encodeParam.src_main_buf[0].offset.frame_len,
+            encodeParam.rotation,
+            src_dim.width, src_dim.height,
+            dst_dim.width, dst_dim.height);
+        LOGI("Src THUMB buf_cnt = %d, res = %dX%d len = %d rot = %d "
+            "src_dim = %dX%d, dst_dim = %dX%d",
+            encodeParam.num_tmb_bufs,
+            encodeParam.src_thumb_buf[0].offset.mp[0].stride,
+            encodeParam.src_thumb_buf[0].offset.mp[0].scanline,
+            encodeParam.src_thumb_buf[0].offset.frame_len,
+            encodeParam.thumb_rotation,
+            encodeParam.thumb_dim.src_dim.width,
+            encodeParam.thumb_dim.src_dim.height,
+            encodeParam.thumb_dim.dst_dim.width,
+            encodeParam.thumb_dim.dst_dim.height);
+        ret = mJpegHandle.create_session(mJpegClientHandle, &encodeParam, &mJpegSessionId);
+        if (ret != NO_ERROR) {
+            LOGE("Error creating a new jpeg encoding session, ret = %d", ret);
+            return ret;
+        }
+        needNewSess = FALSE;
+    }
+
+    // Fill in new job
+    memset(&jpg_job, 0, sizeof(mm_jpeg_job_t));
+    jpg_job.job_type = JPEG_JOB_TYPE_ENCODE;
+    jpg_job.encode_job.session_id = mJpegSessionId;
+    jpg_job.encode_job.src_index = (int32_t)main_frame->buf_idx;
+    jpg_job.encode_job.dst_index = 0;
+
+    cam_rect_t crop;
+    memset(&crop, 0, sizeof(cam_rect_t));
+    //TBD_later - Zoom event removed in stream
+    //main_stream->getCropInfo(crop);
+
+    // Set main dim job parameters and handle rotation
+    if (!needJpegExifRotation && (jpeg_settings->jpeg_orientation == 90 ||
+            jpeg_settings->jpeg_orientation == 270)) {
+
+        jpg_job.encode_job.main_dim.src_dim.width = src_dim.height;
+        jpg_job.encode_job.main_dim.src_dim.height = src_dim.width;
+
+        jpg_job.encode_job.main_dim.dst_dim.width = dst_dim.height;
+        jpg_job.encode_job.main_dim.dst_dim.height = dst_dim.width;
+
+        jpg_job.encode_job.main_dim.crop.width = crop.height;
+        jpg_job.encode_job.main_dim.crop.height = crop.width;
+        jpg_job.encode_job.main_dim.crop.left = crop.top;
+        jpg_job.encode_job.main_dim.crop.top = crop.left;
+    } else {
+        jpg_job.encode_job.main_dim.src_dim = src_dim;
+        jpg_job.encode_job.main_dim.dst_dim = dst_dim;
+        jpg_job.encode_job.main_dim.crop = crop;
+    }
+
+    // get 3a sw version info
+    cam_q3a_version_t sw_version;
+    memset(&sw_version, 0, sizeof(sw_version));
+
+    if (hal_obj)
+        hal_obj->get3AVersion(sw_version);
+
+    // get exif data
+    QCamera3Exif *pJpegExifObj = getExifData(metadata, jpeg_settings, needJpegExifRotation);
+    jpeg_job_data->pJpegExifObj = pJpegExifObj;
+    if (pJpegExifObj != NULL) {
+        jpg_job.encode_job.exif_info.exif_data = pJpegExifObj->getEntries();
+        jpg_job.encode_job.exif_info.numOfEntries =
+            pJpegExifObj->getNumOfEntries();
+        jpg_job.encode_job.exif_info.debug_data.sw_3a_version[0] =
+            sw_version.major_version;
+        jpg_job.encode_job.exif_info.debug_data.sw_3a_version[1] =
+            sw_version.minor_version;
+        jpg_job.encode_job.exif_info.debug_data.sw_3a_version[2] =
+            sw_version.patch_version;
+        jpg_job.encode_job.exif_info.debug_data.sw_3a_version[3] =
+            sw_version.new_feature_des;
+    }
+
+    // thumbnail dim
+    LOGH("Thumbnail needed:%d", m_bThumbnailNeeded);
+    if (m_bThumbnailNeeded == TRUE) {
+        jpg_job.encode_job.thumb_dim.dst_dim =
+                jpeg_settings->thumbnail_size;
+
+      if (!needJpegExifRotation &&
+          (jpeg_settings->jpeg_orientation  == 90 ||
+           jpeg_settings->jpeg_orientation == 270)) {
+            //swap the thumbnail destination width and height if it has
+            //already been rotated
+            int temp = jpg_job.encode_job.thumb_dim.dst_dim.width;
+            jpg_job.encode_job.thumb_dim.dst_dim.width =
+                    jpg_job.encode_job.thumb_dim.dst_dim.height;
+            jpg_job.encode_job.thumb_dim.dst_dim.height = temp;
+
+            jpg_job.encode_job.thumb_dim.src_dim.width = src_dim.height;
+            jpg_job.encode_job.thumb_dim.src_dim.height = src_dim.width;
+
+            jpg_job.encode_job.thumb_dim.crop.width = crop.height;
+            jpg_job.encode_job.thumb_dim.crop.height = crop.width;
+            jpg_job.encode_job.thumb_dim.crop.left = crop.top;
+            jpg_job.encode_job.thumb_dim.crop.top = crop.left;
+        } else {
+           jpg_job.encode_job.thumb_dim.src_dim = src_dim;
+           jpg_job.encode_job.thumb_dim.crop = crop;
+        }
+        jpg_job.encode_job.thumb_index = main_frame->buf_idx;
+        LOGI("Thumbnail idx = %d src w/h (%dx%d), dst w/h (%dx%d)",
+                jpg_job.encode_job.thumb_index,
+                jpg_job.encode_job.thumb_dim.src_dim.width,
+                jpg_job.encode_job.thumb_dim.src_dim.height,
+                jpg_job.encode_job.thumb_dim.dst_dim.width,
+                jpg_job.encode_job.thumb_dim.dst_dim.height);
+    }
+    LOGI("Main image idx = %d src w/h (%dx%d), dst w/h (%dx%d)",
+            jpg_job.encode_job.src_index,
+            jpg_job.encode_job.main_dim.src_dim.width,
+            jpg_job.encode_job.main_dim.src_dim.height,
+            jpg_job.encode_job.main_dim.dst_dim.width,
+            jpg_job.encode_job.main_dim.dst_dim.height);
+
+    jpg_job.encode_job.cam_exif_params = hal_obj->get3AExifParams();
+    exif_debug_params = jpg_job.encode_job.cam_exif_params.debug_params;
+
+    // Allocate for a local copy of debug parameters
+    jpg_job.encode_job.cam_exif_params.debug_params =
+            (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
+    if (!jpg_job.encode_job.cam_exif_params.debug_params) {
+        LOGE("Out of Memory. Allocation failed for 3A debug exif params");
+        return NO_MEMORY;
+    }
+
+    jpg_job.encode_job.mobicat_mask = hal_obj->getMobicatMask();
+
+    if (metadata != NULL) {
+       //Fill in the metadata passed as parameter
+       jpg_job.encode_job.p_metadata = metadata;
+
+       jpg_job.encode_job.p_metadata->is_mobicat_aec_params_valid =
+                jpg_job.encode_job.cam_exif_params.cam_3a_params_valid;
+
+       if (jpg_job.encode_job.cam_exif_params.cam_3a_params_valid) {
+            jpg_job.encode_job.p_metadata->mobicat_aec_params =
+                jpg_job.encode_job.cam_exif_params.cam_3a_params;
+       }
+
+       if (exif_debug_params) {
+            // Copy debug parameters locally.
+           memcpy(jpg_job.encode_job.cam_exif_params.debug_params,
+                   exif_debug_params, (sizeof(mm_jpeg_debug_exif_params_t)));
+           /* Save a copy of 3A debug params */
+            jpg_job.encode_job.p_metadata->is_statsdebug_ae_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->ae_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_awb_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->awb_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_af_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->af_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_asd_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->asd_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_stats_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_bestats_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->bestats_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_bhist_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->bhist_debug_params_valid;
+            jpg_job.encode_job.p_metadata->is_statsdebug_3a_tuning_params_valid =
+                    jpg_job.encode_job.cam_exif_params.debug_params->q3a_tuning_debug_params_valid;
+
+            if (jpg_job.encode_job.cam_exif_params.debug_params->ae_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_ae_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->ae_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->awb_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_awb_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->awb_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->af_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_af_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->af_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->asd_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_asd_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->asd_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_stats_buffer_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->stats_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->bestats_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_bestats_buffer_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->bestats_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->bhist_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_bhist_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->bhist_debug_params;
+            }
+            if (jpg_job.encode_job.cam_exif_params.debug_params->q3a_tuning_debug_params_valid) {
+                jpg_job.encode_job.p_metadata->statsdebug_3a_tuning_data =
+                        jpg_job.encode_job.cam_exif_params.debug_params->q3a_tuning_debug_params;
+            }
+        }
+    } else {
+       LOGW("Metadata is null");
+    }
+
+    // Multi image info
+    if (hal_obj->isDeviceLinked() == TRUE) {
+        jpg_job.encode_job.multi_image_info.type = MM_JPEG_TYPE_JPEG;
+        jpg_job.encode_job.multi_image_info.num_of_images = 1;
+        jpg_job.encode_job.multi_image_info.enable_metadata = 1;
+        if (hal_obj->isMainCamera() == TRUE) {
+            jpg_job.encode_job.multi_image_info.is_primary = 1;
+        } else {
+            jpg_job.encode_job.multi_image_info.is_primary = 0;
+        }
+    }
+
+    jpg_job.encode_job.hal_version = CAM_HAL_V3;
+
+    //Start jpeg encoding
+    ret = mJpegHandle.start_job(&jpg_job, &jobId);
+    if (jpg_job.encode_job.cam_exif_params.debug_params) {
+        free(jpg_job.encode_job.cam_exif_params.debug_params);
+    }
+    if (ret == NO_ERROR) {
+        // remember job info
+        jpeg_job_data->jobId = jobId;
+    }
+
+    LOGD("X");
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : dataProcessRoutine
+ *
+ * DESCRIPTION: data process routine that handles input data either from input
+ *              Jpeg Queue to do jpeg encoding, or from input PP Queue to do
+ *              reprocess.
+ *
+ * PARAMETERS :
+ *   @data    : user data ptr (QCamera3PostProcessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void *QCamera3PostProcessor::dataProcessRoutine(void *data)
+{
+    int running = 1;
+    int ret;
+    uint8_t is_active = FALSE;
+    uint8_t needNewSess = TRUE;
+    mm_camera_super_buf_t *meta_buffer = NULL;
+    LOGD("E");
+    QCamera3PostProcessor *pme = (QCamera3PostProcessor *)data;
+    QCameraCmdThread *cmdThread = &pme->m_dataProcTh;
+    cmdThread->setName("cam_data_proc");
+
+    do {
+        do {
+            ret = cam_sem_wait(&cmdThread->cmd_sem);
+            if (ret != 0 && errno != EINVAL) {
+                LOGE("cam_sem_wait error (%s)",
+                            strerror(errno));
+                return NULL;
+            }
+        } while (ret != 0);
+
+        // we got notified about new cmd avail in cmd queue
+        camera_cmd_type_t cmd = cmdThread->getCmd();
+        switch (cmd) {
+        case CAMERA_CMD_TYPE_START_DATA_PROC:
+            LOGH("start data proc");
+            is_active = TRUE;
+            needNewSess = TRUE;
+
+            pme->m_ongoingPPQ.init();
+            pme->m_inputJpegQ.init();
+            pme->m_inputPPQ.init();
+            pme->m_inputFWKPPQ.init();
+            pme->m_inputMetaQ.init();
+            cam_sem_post(&cmdThread->sync_sem);
+
+            break;
+        case CAMERA_CMD_TYPE_STOP_DATA_PROC:
+            {
+                LOGH("stop data proc");
+                is_active = FALSE;
+
+                // cancel all ongoing jpeg jobs
+                qcamera_hal3_jpeg_data_t *jpeg_job =
+                    (qcamera_hal3_jpeg_data_t *)pme->m_ongoingJpegQ.dequeue();
+                while (jpeg_job != NULL) {
+                    pme->mJpegHandle.abort_job(jpeg_job->jobId);
+
+                    pme->releaseJpegJobData(jpeg_job);
+                    free(jpeg_job);
+
+                    jpeg_job = (qcamera_hal3_jpeg_data_t *)pme->m_ongoingJpegQ.dequeue();
+                }
+
+                // destroy jpeg encoding session
+                if ( 0 < pme->mJpegSessionId ) {
+                    pme->mJpegHandle.destroy_session(pme->mJpegSessionId);
+                    pme->mJpegSessionId = 0;
+                }
+
+                needNewSess = TRUE;
+
+                // flush ongoing postproc Queue
+                pme->m_ongoingPPQ.flush();
+
+                // flush input jpeg Queue
+                pme->m_inputJpegQ.flush();
+
+                // flush input Postproc Queue
+                pme->m_inputPPQ.flush();
+
+                // flush framework input Postproc Queue
+                pme->m_inputFWKPPQ.flush();
+
+                pme->m_inputMetaQ.flush();
+
+                // signal cmd is completed
+                cam_sem_post(&cmdThread->sync_sem);
+            }
+            break;
+        case CAMERA_CMD_TYPE_DO_NEXT_JOB:
+            {
+                LOGH("Do next job, active is %d", is_active);
+                /* needNewSess is set to TRUE as postproc is not re-STARTed
+                 * anymore for every captureRequest */
+                needNewSess = TRUE;
+                if (is_active == TRUE) {
+                    // check if there is any ongoing jpeg jobs
+                    if (pme->m_ongoingJpegQ.isEmpty()) {
+                        LOGD("ongoing jpeg queue is empty so doing the jpeg job");
+                        // no ongoing jpeg job, we are fine to send jpeg encoding job
+                        qcamera_hal3_jpeg_data_t *jpeg_job =
+                            (qcamera_hal3_jpeg_data_t *)pme->m_inputJpegQ.dequeue();
+
+                        if (NULL != jpeg_job) {
+                            // add into ongoing jpeg job Q
+                            pme->m_ongoingJpegQ.enqueue((void *)jpeg_job);
+
+                            if (jpeg_job->fwk_frame) {
+                                ret = pme->encodeFWKData(jpeg_job, needNewSess);
+                            } else {
+                                ret = pme->encodeData(jpeg_job, needNewSess);
+                            }
+                            if (NO_ERROR != ret) {
+                                // dequeue the last one
+                                pme->m_ongoingJpegQ.dequeue(false);
+
+                                pme->releaseJpegJobData(jpeg_job);
+                                free(jpeg_job);
+                            }
+                        }
+                    }
+
+                    // check if there are any framework pp jobs
+                    if (!pme->m_inputFWKPPQ.isEmpty()) {
+                        qcamera_fwk_input_pp_data_t *fwk_frame =
+                                (qcamera_fwk_input_pp_data_t *) pme->m_inputFWKPPQ.dequeue();
+                        if (NULL != fwk_frame) {
+                            qcamera_hal3_pp_data_t *pp_job =
+                                    (qcamera_hal3_pp_data_t *)malloc(sizeof(qcamera_hal3_pp_data_t));
+                            jpeg_settings_t *jpeg_settings =
+                                    (jpeg_settings_t *)pme->m_jpegSettingsQ.dequeue();
+                            if (pp_job != NULL) {
+                                memset(pp_job, 0, sizeof(qcamera_hal3_pp_data_t));
+                                pp_job->jpeg_settings = jpeg_settings;
+                                if (pme->m_pReprocChannel != NULL) {
+                                    if (NO_ERROR != pme->m_pReprocChannel->overrideFwkMetadata(fwk_frame)) {
+                                        LOGE("Failed to extract output crop");
+                                    }
+                                    // add into ongoing PP job Q
+                                    pp_job->fwk_src_frame = fwk_frame;
+                                    pme->m_ongoingPPQ.enqueue((void *)pp_job);
+                                    ret = pme->m_pReprocChannel->doReprocessOffline(fwk_frame);
+                                    if (NO_ERROR != ret) {
+                                        // remove from ongoing PP job Q
+                                        pme->m_ongoingPPQ.dequeue(false);
+                                    }
+                                } else {
+                                    LOGE("Reprocess channel is NULL");
+                                    ret = -1;
+                                }
+                            } else {
+                                LOGE("no mem for qcamera_hal3_pp_data_t");
+                                ret = -1;
+                            }
+
+                            if (0 != ret) {
+                                // free pp_job
+                                if (pp_job != NULL) {
+                                    free(pp_job);
+                                }
+                                // free frame
+                                if (fwk_frame != NULL) {
+                                    free(fwk_frame);
+                                }
+                            }
+                        }
+                    }
+
+                    LOGH("dequeuing pp frame");
+                    pthread_mutex_lock(&pme->mReprocJobLock);
+                    if(!pme->m_inputPPQ.isEmpty() && !pme->m_inputMetaQ.isEmpty()) {
+                        qcamera_hal3_pp_buffer_t *pp_buffer =
+                            (qcamera_hal3_pp_buffer_t *)pme->m_inputPPQ.dequeue();
+                        meta_buffer =
+                            (mm_camera_super_buf_t *)pme->m_inputMetaQ.dequeue();
+                        jpeg_settings_t *jpeg_settings =
+                           (jpeg_settings_t *)pme->m_jpegSettingsQ.dequeue();
+                        pthread_mutex_unlock(&pme->mReprocJobLock);
+                        qcamera_hal3_pp_data_t *pp_job =
+                            (qcamera_hal3_pp_data_t *)malloc(sizeof(qcamera_hal3_pp_data_t));
+                        if (pp_job == NULL) {
+                            LOGE("no mem for qcamera_hal3_pp_data_t");
+                            ret = -1;
+                        } else if (meta_buffer == NULL) {
+                            LOGE("failed to dequeue from m_inputMetaQ");
+                            ret = -1;
+                        } else if (pp_buffer == NULL) {
+                            LOGE("failed to dequeue from m_inputPPQ");
+                            ret = -1;
+                        } else if (pp_buffer != NULL){
+                            memset(pp_job, 0, sizeof(qcamera_hal3_pp_data_t));
+                            pp_job->src_frame = pp_buffer->input;
+                            pp_job->src_metadata = meta_buffer;
+                            if (meta_buffer->bufs[0] != NULL) {
+                                pp_job->metadata = (metadata_buffer_t *)
+                                        meta_buffer->bufs[0]->buffer;
+                            }
+                            pp_job->jpeg_settings = jpeg_settings;
+                            pme->m_ongoingPPQ.enqueue((void *)pp_job);
+                            if (pme->m_pReprocChannel != NULL) {
+                                mm_camera_buf_def_t *meta_buffer_arg = NULL;
+                                meta_buffer_arg = meta_buffer->bufs[0];
+                                qcamera_fwk_input_pp_data_t fwk_frame;
+                                memset(&fwk_frame, 0, sizeof(qcamera_fwk_input_pp_data_t));
+                                fwk_frame.frameNumber = pp_buffer->frameNumber;
+                                ret = pme->m_pReprocChannel->overrideMetadata(
+                                        pp_buffer, meta_buffer_arg,
+                                        pp_job->jpeg_settings,
+                                        fwk_frame);
+                                if (NO_ERROR == ret) {
+                                    // add into ongoing PP job Q
+                                    ret = pme->m_pReprocChannel->doReprocessOffline(
+                                            &fwk_frame, true);
+                                    if (NO_ERROR != ret) {
+                                        // remove from ongoing PP job Q
+                                        pme->m_ongoingPPQ.dequeue(false);
+                                    }
+                                }
+                            } else {
+                                LOGE("No reprocess. Calling processPPData directly");
+                                ret = pme->processPPData(pp_buffer->input);
+                            }
+                        }
+
+                        if (0 != ret) {
+                            // free pp_job
+                            if (pp_job != NULL) {
+                                free(pp_job);
+                            }
+                            // free frame
+                            if (pp_buffer != NULL) {
+                                if (pp_buffer->input) {
+                                    pme->releaseSuperBuf(pp_buffer->input);
+                                    free(pp_buffer->input);
+                                }
+                                free(pp_buffer);
+                            }
+                            //free metadata
+                            if (NULL != meta_buffer) {
+                                pme->m_parent->metadataBufDone(meta_buffer);
+                                free(meta_buffer);
+                            }
+                        } else {
+                            if (pp_buffer != NULL) {
+                                free(pp_buffer);
+                            }
+                        }
+                    } else {
+                        pthread_mutex_unlock(&pme->mReprocJobLock);
+                    }
+                } else {
+                    // not active, simply return buf and do no op
+                    qcamera_hal3_jpeg_data_t *jpeg_job =
+                        (qcamera_hal3_jpeg_data_t *)pme->m_inputJpegQ.dequeue();
+                    if (NULL != jpeg_job) {
+                        free(jpeg_job);
+                    }
+
+                    qcamera_hal3_pp_buffer_t* pp_buf =
+                            (qcamera_hal3_pp_buffer_t *)pme->m_inputPPQ.dequeue();
+                    if (NULL != pp_buf) {
+                        if (pp_buf->input) {
+                            pme->releaseSuperBuf(pp_buf->input);
+                            free(pp_buf->input);
+                            pp_buf->input = NULL;
+                        }
+                        free(pp_buf);
+                    }
+                    mm_camera_super_buf_t *metadata = (mm_camera_super_buf_t *)pme->m_inputMetaQ.dequeue();
+                    if (metadata != NULL) {
+                        pme->m_parent->metadataBufDone(metadata);
+                        free(metadata);
+                    }
+                    qcamera_fwk_input_pp_data_t *fwk_frame =
+                            (qcamera_fwk_input_pp_data_t *) pme->m_inputFWKPPQ.dequeue();
+                    if (NULL != fwk_frame) {
+                        free(fwk_frame);
+                    }
+                }
+            }
+            break;
+        case CAMERA_CMD_TYPE_EXIT:
+            running = 0;
+            break;
+        default:
+            break;
+        }
+    } while (running);
+    LOGD("X");
+    return NULL;
+}
+
+/* EXIF related helper methods */
+
+/*===========================================================================
+ * FUNCTION   : getRational
+ *
+ * DESCRIPTION: compose rational struct
+ *
+ * PARAMETERS :
+ *   @rat     : ptr to struct to store rational info
+ *   @num     :num of the rational
+ *   @denom   : denom of the rational
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t getRational(rat_t *rat, int num, int denom)
+{
+    if ((0 > num) || (0 >= denom)) {
+        LOGE("Negative values");
+        return BAD_VALUE;
+    }
+    if (NULL == rat) {
+        LOGE("NULL rat input");
+        return BAD_VALUE;
+    }
+    rat->num = (uint32_t)num;
+    rat->denom = (uint32_t)denom;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : parseGPSCoordinate
+ *
+ * DESCRIPTION: parse GPS coordinate string
+ *
+ * PARAMETERS :
+ *   @coord_str : [input] coordinate string
+ *   @coord     : [output]  ptr to struct to store coordinate
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int parseGPSCoordinate(const char *coord_str, rat_t* coord)
+{
+    if(coord == NULL) {
+        LOGE("error, invalid argument coord == NULL");
+        return BAD_VALUE;
+    }
+    double degF = atof(coord_str);
+    if (degF < 0) {
+        degF = -degF;
+    }
+    double minF = (degF - (int) degF) * 60;
+    double secF = (minF - (int) minF) * 60;
+
+    getRational(&coord[0], (int)degF, 1);
+    getRational(&coord[1], (int)minF, 1);
+    getRational(&coord[2], (int)(secF * 10000), 10000);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifDateTime
+ *
+ * DESCRIPTION: query exif date time
+ *
+ * PARAMETERS :
+ *   @dateTime   : string to store exif date time
+ *   @subsecTime : string to store exif subsec time
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t getExifDateTime(String8 &dateTime, String8 &subsecTime)
+{
+    int32_t ret = NO_ERROR;
+
+    //get time and date from system
+    struct timeval tv;
+    struct tm timeinfo_data;
+
+    int res = gettimeofday(&tv, NULL);
+    if (0 == res) {
+        struct tm *timeinfo = localtime_r(&tv.tv_sec, &timeinfo_data);
+        if (NULL != timeinfo) {
+            //Write datetime according to EXIF Spec
+            //"YYYY:MM:DD HH:MM:SS" (20 chars including \0)
+            dateTime = String8::format("%04d:%02d:%02d %02d:%02d:%02d",
+                    timeinfo->tm_year + 1900, timeinfo->tm_mon + 1,
+                    timeinfo->tm_mday, timeinfo->tm_hour,
+                    timeinfo->tm_min, timeinfo->tm_sec);
+            //Write subsec according to EXIF Sepc
+            subsecTime = String8::format("%06ld", tv.tv_usec);
+        } else {
+            LOGE("localtime_r() error");
+            ret = UNKNOWN_ERROR;
+        }
+    } else if (-1 == res) {
+        LOGE("gettimeofday() error: %s", strerror(errno));
+        ret = UNKNOWN_ERROR;
+    } else {
+        LOGE("gettimeofday() unexpected return code: %d", res);
+        ret = UNKNOWN_ERROR;
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifFocalLength
+ *
+ * DESCRIPTION: get exif focal length
+ *
+ * PARAMETERS :
+ *   @focalLength : ptr to rational struct to store focal length
+ *   @value       : focal length value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t getExifFocalLength(rat_t *focalLength, float value)
+{
+    int focalLengthValue =
+        (int)(value * FOCAL_LENGTH_DECIMAL_PRECISION);
+    return getRational(focalLength, focalLengthValue, FOCAL_LENGTH_DECIMAL_PRECISION);
+}
+
+/*===========================================================================
+  * FUNCTION   : getExifExpTimeInfo
+  *
+  * DESCRIPTION: get exif exposure time information
+  *
+  * PARAMETERS :
+  *   @expoTimeInfo     : rational exposure time value
+  *   @value            : exposure time value
+  * RETURN     : nt32_t type of status
+  *              NO_ERROR  -- success
+  *              none-zero failure code
+  *==========================================================================*/
+int32_t getExifExpTimeInfo(rat_t *expoTimeInfo, int64_t value)
+{
+
+    int64_t cal_exposureTime;
+    if (value != 0)
+        cal_exposureTime = value;
+    else
+        cal_exposureTime = 60;
+
+    return getRational(expoTimeInfo, 1, (int)cal_exposureTime);
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifGpsProcessingMethod
+ *
+ * DESCRIPTION: get GPS processing method
+ *
+ * PARAMETERS :
+ *   @gpsProcessingMethod : string to store GPS process method
+ *   @count               : length of the string
+ *   @value               : the value of the processing method
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t getExifGpsProcessingMethod(char *gpsProcessingMethod,
+        uint32_t &count, char* value)
+{
+    if(value != NULL) {
+        memcpy(gpsProcessingMethod, ExifAsciiPrefix, EXIF_ASCII_PREFIX_SIZE);
+        count = EXIF_ASCII_PREFIX_SIZE;
+        strlcpy(gpsProcessingMethod + EXIF_ASCII_PREFIX_SIZE,
+                value,
+                GPS_PROCESSING_METHOD_SIZE);
+        count += (uint32_t)strlen(value);
+        gpsProcessingMethod[count++] = '\0'; // increase 1 for the last NULL char
+        return NO_ERROR;
+    } else {
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifLatitude
+ *
+ * DESCRIPTION: get exif latitude
+ *
+ * PARAMETERS :
+ *   @latitude : ptr to rational struct to store latitude info
+ *   @latRef   : character to indicate latitude reference
+ *   @value    : value of the latitude
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t getExifLatitude(rat_t *latitude, char *latRef, double value)
+{
+    char str[30];
+    snprintf(str, sizeof(str), "%f", value);
+    if(str[0] != '\0') {
+        parseGPSCoordinate(str, latitude);
+
+        //set Latitude Ref
+        float latitudeValue = strtof(str, 0);
+        if(latitudeValue < 0.0f) {
+            latRef[0] = 'S';
+        } else {
+            latRef[0] = 'N';
+        }
+        latRef[1] = '\0';
+        return NO_ERROR;
+    }else{
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifLongitude
+ *
+ * DESCRIPTION: get exif longitude
+ *
+ * PARAMETERS :
+ *   @longitude : ptr to rational struct to store longitude info
+ *   @lonRef    : character to indicate longitude reference
+ *   @value     : value of the longitude
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t getExifLongitude(rat_t *longitude, char *lonRef, double value)
+{
+    char str[30];
+    snprintf(str, sizeof(str), "%f", value);
+    if(str[0] != '\0') {
+        parseGPSCoordinate(str, longitude);
+
+        //set Longitude Ref
+        float longitudeValue = strtof(str, 0);
+        if(longitudeValue < 0.0f) {
+            lonRef[0] = 'W';
+        } else {
+            lonRef[0] = 'E';
+        }
+        lonRef[1] = '\0';
+        return NO_ERROR;
+    }else{
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifAltitude
+ *
+ * DESCRIPTION: get exif altitude
+ *
+ * PARAMETERS :
+ *   @altitude : ptr to rational struct to store altitude info
+ *   @altRef   : character to indicate altitude reference
+ *   @argValue : altitude value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t getExifAltitude(rat_t *altitude, char *altRef, double argValue)
+{
+    char str[30];
+    snprintf(str, sizeof(str), "%f", argValue);
+    if (str[0] != '\0') {
+        double value = atof(str);
+        *altRef = 0;
+        if(value < 0){
+            *altRef = 1;
+            value = -value;
+        }
+        return getRational(altitude, (int)(value * 1000), 1000);
+    } else {
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifGpsDateTimeStamp
+ *
+ * DESCRIPTION: get exif GPS date time stamp
+ *
+ * PARAMETERS :
+ *   @gpsDateStamp : GPS date time stamp string
+ *   @bufLen       : length of the string
+ *   @gpsTimeStamp : ptr to rational struct to store time stamp info
+ *   @value        : timestamp value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t getExifGpsDateTimeStamp(char *gpsDateStamp, uint32_t bufLen,
+        rat_t *gpsTimeStamp, int64_t value)
+{
+    char str[30];
+    snprintf(str, sizeof(str), "%lld", (long long int)value);
+    if(str[0] != '\0') {
+        time_t unixTime = (time_t)atol(str);
+        struct tm *UTCTimestamp = gmtime(&unixTime);
+        if (UTCTimestamp != NULL && gpsDateStamp != NULL
+                && gpsTimeStamp != NULL) {
+            strftime(gpsDateStamp, bufLen, "%Y:%m:%d", UTCTimestamp);
+
+            getRational(&gpsTimeStamp[0], UTCTimestamp->tm_hour, 1);
+            getRational(&gpsTimeStamp[1], UTCTimestamp->tm_min, 1);
+            getRational(&gpsTimeStamp[2], UTCTimestamp->tm_sec, 1);
+            return NO_ERROR;
+        } else {
+            LOGE("Could not get the timestamp");
+            return BAD_VALUE;
+        }
+    } else {
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifExposureValue
+ *
+ * DESCRIPTION: get exif GPS date time stamp
+ *
+ * PARAMETERS :
+ *   @exposure_val        : rational exposure value
+ *   @exposure_comp       : exposure compensation
+ *   @step                : exposure step
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t getExifExposureValue(srat_t* exposure_val, int32_t exposure_comp,
+        cam_rational_type_t step)
+{
+    exposure_val->num = exposure_comp * step.numerator;
+    exposure_val->denom = step.denominator;
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifData
+ *
+ * DESCRIPTION: get exif data to be passed into jpeg encoding
+ *
+ * PARAMETERS :
+ * @metadata      : metadata of the encoding request
+ * @jpeg_settings : jpeg_settings for encoding
+ * @needJpegExifRotation: check if rotation need to added in EXIF
+ *
+ * RETURN     : exif data from user setting and GPS
+ *==========================================================================*/
+QCamera3Exif *QCamera3PostProcessor::getExifData(metadata_buffer_t *metadata,
+        jpeg_settings_t *jpeg_settings, bool needJpegExifRotation)
+{
+    QCamera3Exif *exif = new QCamera3Exif();
+    if (exif == NULL) {
+        LOGE("No memory for QCamera3Exif");
+        return NULL;
+    }
+    QCamera3HardwareInterface* hal_obj = NULL;
+    if (m_parent != NULL) {
+        hal_obj = (QCamera3HardwareInterface*)m_parent->mUserData;
+    } else {
+        LOGE("m_parent is NULL, Error");
+        return NULL;
+    }
+
+    int32_t rc = NO_ERROR;
+    uint32_t count = 0;
+
+    // add exif entries
+    String8 dateTime;
+    String8 subsecTime;
+    rc = getExifDateTime(dateTime, subsecTime);
+    if (rc == NO_ERROR) {
+        exif->addEntry(EXIFTAGID_DATE_TIME, EXIF_ASCII,
+                (uint32_t)(dateTime.length() + 1), (void *)dateTime.string());
+        exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_ORIGINAL, EXIF_ASCII,
+                (uint32_t)(dateTime.length() + 1), (void *)dateTime.string());
+        exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_DIGITIZED, EXIF_ASCII,
+                (uint32_t)(dateTime.length() + 1), (void *)dateTime.string());
+        exif->addEntry(EXIFTAGID_SUBSEC_TIME, EXIF_ASCII,
+                (uint32_t)(subsecTime.length() + 1), (void *)subsecTime.string());
+        exif->addEntry(EXIFTAGID_SUBSEC_TIME_ORIGINAL, EXIF_ASCII,
+                (uint32_t)(subsecTime.length() + 1), (void *)subsecTime.string());
+        exif->addEntry(EXIFTAGID_SUBSEC_TIME_DIGITIZED, EXIF_ASCII,
+                (uint32_t)(subsecTime.length() + 1), (void *)subsecTime.string());
+    } else {
+        LOGW("getExifDateTime failed");
+    }
+
+
+    if (metadata != NULL) {
+        IF_META_AVAILABLE(float, focal_length, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
+            rat_t focalLength;
+            rc = getExifFocalLength(&focalLength, *focal_length);
+            if (rc == NO_ERROR) {
+                exif->addEntry(EXIFTAGID_FOCAL_LENGTH,
+                        EXIF_RATIONAL,
+                        1,
+                        (void *)&(focalLength));
+            } else {
+                LOGW("getExifFocalLength failed");
+            }
+        }
+
+        IF_META_AVAILABLE(int32_t, isoSpeed, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
+            int16_t fwk_isoSpeed = (int16_t) *isoSpeed;
+            exif->addEntry(EXIFTAGID_ISO_SPEED_RATING, EXIF_SHORT, 1, (void *) &(fwk_isoSpeed));
+        }
+
+
+        IF_META_AVAILABLE(int64_t, sensor_exposure_time,
+                CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
+            rat_t sensorExpTime;
+            rc = getExifExpTimeInfo(&sensorExpTime, *sensor_exposure_time);
+            if (rc == NO_ERROR){
+                exif->addEntry(EXIFTAGID_EXPOSURE_TIME,
+                        EXIF_RATIONAL,
+                        1,
+                        (void *)&(sensorExpTime));
+            } else {
+                LOGW("getExifExpTimeInfo failed");
+            }
+        }
+
+        char* jpeg_gps_processing_method = jpeg_settings->gps_processing_method;
+        if (strlen(jpeg_gps_processing_method) > 0) {
+            char gpsProcessingMethod[EXIF_ASCII_PREFIX_SIZE +
+                    GPS_PROCESSING_METHOD_SIZE];
+            count = 0;
+            rc = getExifGpsProcessingMethod(gpsProcessingMethod,
+                    count,
+                    jpeg_gps_processing_method);
+            if(rc == NO_ERROR) {
+                exif->addEntry(EXIFTAGID_GPS_PROCESSINGMETHOD,
+                        EXIF_ASCII,
+                        count,
+                        (void *)gpsProcessingMethod);
+            } else {
+                LOGW("getExifGpsProcessingMethod failed");
+            }
+        }
+
+        if (jpeg_settings->gps_coordinates_valid) {
+
+            //latitude
+            rat_t latitude[3];
+            char latRef[2];
+            rc = getExifLatitude(latitude, latRef,
+                    jpeg_settings->gps_coordinates[0]);
+            if(rc == NO_ERROR) {
+                exif->addEntry(EXIFTAGID_GPS_LATITUDE,
+                        EXIF_RATIONAL,
+                        3,
+                        (void *)latitude);
+                exif->addEntry(EXIFTAGID_GPS_LATITUDE_REF,
+                        EXIF_ASCII,
+                        2,
+                        (void *)latRef);
+            } else {
+                LOGW("getExifLatitude failed");
+            }
+
+            //longitude
+            rat_t longitude[3];
+            char lonRef[2];
+            rc = getExifLongitude(longitude, lonRef,
+                    jpeg_settings->gps_coordinates[1]);
+            if(rc == NO_ERROR) {
+                exif->addEntry(EXIFTAGID_GPS_LONGITUDE,
+                        EXIF_RATIONAL,
+                        3,
+                        (void *)longitude);
+
+                exif->addEntry(EXIFTAGID_GPS_LONGITUDE_REF,
+                        EXIF_ASCII,
+                        2,
+                        (void *)lonRef);
+            } else {
+                LOGW("getExifLongitude failed");
+            }
+
+            //altitude
+            rat_t altitude;
+            char altRef;
+            rc = getExifAltitude(&altitude, &altRef,
+                    jpeg_settings->gps_coordinates[2]);
+            if(rc == NO_ERROR) {
+                exif->addEntry(EXIFTAGID_GPS_ALTITUDE,
+                        EXIF_RATIONAL,
+                        1,
+                        (void *)&(altitude));
+
+                exif->addEntry(EXIFTAGID_GPS_ALTITUDE_REF,
+                        EXIF_BYTE,
+                        1,
+                        (void *)&altRef);
+            } else {
+                LOGW("getExifAltitude failed");
+            }
+        }
+
+        if (jpeg_settings->gps_timestamp_valid) {
+
+            char gpsDateStamp[20];
+            rat_t gpsTimeStamp[3];
+            rc = getExifGpsDateTimeStamp(gpsDateStamp, 20, gpsTimeStamp,
+                    jpeg_settings->gps_timestamp);
+            if(rc == NO_ERROR) {
+                exif->addEntry(EXIFTAGID_GPS_DATESTAMP, EXIF_ASCII,
+                        (uint32_t)(strlen(gpsDateStamp) + 1),
+                        (void *)gpsDateStamp);
+
+                exif->addEntry(EXIFTAGID_GPS_TIMESTAMP,
+                        EXIF_RATIONAL,
+                        3,
+                        (void *)gpsTimeStamp);
+            } else {
+                LOGW("getExifGpsDataTimeStamp failed");
+            }
+        }
+
+        IF_META_AVAILABLE(int32_t, exposure_comp, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
+            IF_META_AVAILABLE(cam_rational_type_t, comp_step, CAM_INTF_PARM_EV_STEP, metadata) {
+                srat_t exposure_val;
+                rc = getExifExposureValue(&exposure_val, *exposure_comp, *comp_step);
+                if(rc == NO_ERROR) {
+                    exif->addEntry(EXIFTAGID_EXPOSURE_BIAS_VALUE,
+                            EXIF_SRATIONAL,
+                            1,
+                            (void *)(&exposure_val));
+                } else {
+                    LOGW("getExifExposureValue failed ");
+                }
+            }
+        }
+    } else {
+        LOGW("no metadata provided ");
+    }
+
+#ifdef ENABLE_MODEL_INFO_EXIF
+
+    char value[PROPERTY_VALUE_MAX];
+    if (property_get("ro.product.manufacturer", value, "QCOM-AA") > 0) {
+        exif->addEntry(EXIFTAGID_MAKE, EXIF_ASCII,
+                (uint32_t)(strlen(value) + 1), (void *)value);
+    } else {
+        LOGW("getExifMaker failed");
+    }
+
+    if (property_get("ro.product.model", value, "QCAM-AA") > 0) {
+        exif->addEntry(EXIFTAGID_MODEL, EXIF_ASCII,
+                (uint32_t)(strlen(value) + 1), (void *)value);
+    } else {
+        LOGW("getExifModel failed");
+    }
+
+    if (property_get("ro.build.description", value, "QCAM-AA") > 0) {
+        exif->addEntry(EXIFTAGID_SOFTWARE, EXIF_ASCII,
+                (uint32_t)(strlen(value) + 1), (void *)value);
+    } else {
+        LOGW("getExifSoftware failed");
+    }
+
+#endif
+
+    if (jpeg_settings->image_desc_valid) {
+        if (exif->addEntry(EXIFTAGID_IMAGE_DESCRIPTION, EXIF_ASCII,
+                strlen(jpeg_settings->image_desc)+1,
+                (void *)jpeg_settings->image_desc)) {
+            LOGW("Adding IMAGE_DESCRIPTION tag failed");
+        }
+    }
+
+    if (needJpegExifRotation) {
+        int16_t orientation;
+        switch (jpeg_settings->jpeg_orientation) {
+            case 0:
+                orientation = 1;
+                break;
+            case 90:
+                orientation = 6;
+                break;
+            case 180:
+                orientation = 3;
+                break;
+            case 270:
+                orientation = 8;
+                break;
+            default:
+                orientation = 1;
+                break;
+        }
+        exif->addEntry(EXIFTAGID_ORIENTATION,
+                       EXIF_SHORT,
+                       1,
+                       (void *)&orientation);
+        exif->addEntry(EXIFTAGID_TN_ORIENTATION,
+                       EXIF_SHORT,
+                       1,
+                       (void *)&orientation);
+
+    }
+
+    return exif;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCamera3Exif
+ *
+ * DESCRIPTION: constructor of QCamera3Exif
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera3Exif::QCamera3Exif()
+    : m_nNumEntries(0)
+{
+    memset(m_Entries, 0, sizeof(m_Entries));
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera3Exif
+ *
+ * DESCRIPTION: deconstructor of QCamera3Exif. Will release internal memory ptr.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera3Exif::~QCamera3Exif()
+{
+    for (uint32_t i = 0; i < m_nNumEntries; i++) {
+        switch (m_Entries[i].tag_entry.type) {
+            case EXIF_BYTE:
+                {
+                    if (m_Entries[i].tag_entry.count > 1 &&
+                            m_Entries[i].tag_entry.data._bytes != NULL) {
+                        free(m_Entries[i].tag_entry.data._bytes);
+                        m_Entries[i].tag_entry.data._bytes = NULL;
+                    }
+                }
+                break;
+            case EXIF_ASCII:
+                {
+                    if (m_Entries[i].tag_entry.data._ascii != NULL) {
+                        free(m_Entries[i].tag_entry.data._ascii);
+                        m_Entries[i].tag_entry.data._ascii = NULL;
+                    }
+                }
+                break;
+            case EXIF_SHORT:
+                {
+                    if (m_Entries[i].tag_entry.count > 1 &&
+                            m_Entries[i].tag_entry.data._shorts != NULL) {
+                        free(m_Entries[i].tag_entry.data._shorts);
+                        m_Entries[i].tag_entry.data._shorts = NULL;
+                    }
+                }
+                break;
+            case EXIF_LONG:
+                {
+                    if (m_Entries[i].tag_entry.count > 1 &&
+                            m_Entries[i].tag_entry.data._longs != NULL) {
+                        free(m_Entries[i].tag_entry.data._longs);
+                        m_Entries[i].tag_entry.data._longs = NULL;
+                    }
+                }
+                break;
+            case EXIF_RATIONAL:
+                {
+                    if (m_Entries[i].tag_entry.count > 1 &&
+                            m_Entries[i].tag_entry.data._rats != NULL) {
+                        free(m_Entries[i].tag_entry.data._rats);
+                        m_Entries[i].tag_entry.data._rats = NULL;
+                    }
+                }
+                break;
+            case EXIF_UNDEFINED:
+                {
+                    if (m_Entries[i].tag_entry.data._undefined != NULL) {
+                        free(m_Entries[i].tag_entry.data._undefined);
+                        m_Entries[i].tag_entry.data._undefined = NULL;
+                    }
+                }
+                break;
+            case EXIF_SLONG:
+                {
+                    if (m_Entries[i].tag_entry.count > 1 &&
+                            m_Entries[i].tag_entry.data._slongs != NULL) {
+                        free(m_Entries[i].tag_entry.data._slongs);
+                        m_Entries[i].tag_entry.data._slongs = NULL;
+                    }
+                }
+                break;
+            case EXIF_SRATIONAL:
+                {
+                    if (m_Entries[i].tag_entry.count > 1 &&
+                            m_Entries[i].tag_entry.data._srats != NULL) {
+                        free(m_Entries[i].tag_entry.data._srats);
+                        m_Entries[i].tag_entry.data._srats = NULL;
+                    }
+                }
+                break;
+            default:
+                LOGW("Error, Unknown type");
+                break;
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : addEntry
+ *
+ * DESCRIPTION: function to add an entry to exif data
+ *
+ * PARAMETERS :
+ *   @tagid   : exif tag ID
+ *   @type    : data type
+ *   @count   : number of data in uint of its type
+ *   @data    : input data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Exif::addEntry(exif_tag_id_t tagid,
+                              exif_tag_type_t type,
+                              uint32_t count,
+                              void *data)
+{
+    int32_t rc = NO_ERROR;
+    if(m_nNumEntries >= MAX_HAL3_EXIF_TABLE_ENTRIES) {
+        LOGE("Number of entries exceeded limit");
+        return NO_MEMORY;
+    }
+
+    m_Entries[m_nNumEntries].tag_id = tagid;
+    m_Entries[m_nNumEntries].tag_entry.type = type;
+    m_Entries[m_nNumEntries].tag_entry.count = count;
+    m_Entries[m_nNumEntries].tag_entry.copy = 1;
+    switch (type) {
+        case EXIF_BYTE:
+            {
+                if (count > 1) {
+                    uint8_t *values = (uint8_t *)malloc(count);
+                    if (values == NULL) {
+                        LOGE("No memory for byte array");
+                        rc = NO_MEMORY;
+                    } else {
+                        memcpy(values, data, count);
+                        m_Entries[m_nNumEntries].tag_entry.data._bytes = values;
+                    }
+                } else {
+                    m_Entries[m_nNumEntries].tag_entry.data._byte =
+                        *(uint8_t *)data;
+                }
+            }
+            break;
+        case EXIF_ASCII:
+            {
+                char *str = NULL;
+                str = (char *)malloc(count + 1);
+                if (str == NULL) {
+                    LOGE("No memory for ascii string");
+                    rc = NO_MEMORY;
+                } else {
+                    memset(str, 0, count + 1);
+                    memcpy(str, data, count);
+                    m_Entries[m_nNumEntries].tag_entry.data._ascii = str;
+                }
+            }
+            break;
+        case EXIF_SHORT:
+            {
+                uint16_t *exif_data = (uint16_t *)data;
+                if (count > 1) {
+                    uint16_t *values =
+                        (uint16_t *)malloc(count * sizeof(uint16_t));
+                    if (values == NULL) {
+                        LOGE("No memory for short array");
+                        rc = NO_MEMORY;
+                    } else {
+                        memcpy(values, exif_data, count * sizeof(uint16_t));
+                        m_Entries[m_nNumEntries].tag_entry.data._shorts = values;
+                    }
+                } else {
+                    m_Entries[m_nNumEntries].tag_entry.data._short =
+                        *(uint16_t *)data;
+                }
+            }
+            break;
+        case EXIF_LONG:
+            {
+                uint32_t *exif_data = (uint32_t *)data;
+                if (count > 1) {
+                    uint32_t *values =
+                        (uint32_t *)malloc(count * sizeof(uint32_t));
+                    if (values == NULL) {
+                        LOGE("No memory for long array");
+                        rc = NO_MEMORY;
+                    } else {
+                        memcpy(values, exif_data, count * sizeof(uint32_t));
+                        m_Entries[m_nNumEntries].tag_entry.data._longs = values;
+                    }
+                } else {
+                    m_Entries[m_nNumEntries].tag_entry.data._long =
+                        *(uint32_t *)data;
+                }
+            }
+            break;
+        case EXIF_RATIONAL:
+            {
+                rat_t *exif_data = (rat_t *)data;
+                if (count > 1) {
+                    rat_t *values = (rat_t *)malloc(count * sizeof(rat_t));
+                    if (values == NULL) {
+                        LOGE("No memory for rational array");
+                        rc = NO_MEMORY;
+                    } else {
+                        memcpy(values, exif_data, count * sizeof(rat_t));
+                        m_Entries[m_nNumEntries].tag_entry.data._rats = values;
+                    }
+                } else {
+                    m_Entries[m_nNumEntries].tag_entry.data._rat =
+                        *(rat_t *)data;
+                }
+            }
+            break;
+        case EXIF_UNDEFINED:
+            {
+                uint8_t *values = (uint8_t *)malloc(count);
+                if (values == NULL) {
+                    LOGE("No memory for undefined array");
+                    rc = NO_MEMORY;
+                } else {
+                    memcpy(values, data, count);
+                    m_Entries[m_nNumEntries].tag_entry.data._undefined = values;
+                }
+            }
+            break;
+        case EXIF_SLONG:
+            {
+                int32_t *exif_data = (int32_t *)data;
+                if (count > 1) {
+                    int32_t *values =
+                        (int32_t *)malloc(count * sizeof(int32_t));
+                    if (values == NULL) {
+                        LOGE("No memory for signed long array");
+                        rc = NO_MEMORY;
+                    } else {
+                        memcpy(values, exif_data, count * sizeof(int32_t));
+                        m_Entries[m_nNumEntries].tag_entry.data._slongs =values;
+                    }
+                } else {
+                    m_Entries[m_nNumEntries].tag_entry.data._slong =
+                        *(int32_t *)data;
+                }
+            }
+            break;
+        case EXIF_SRATIONAL:
+            {
+                srat_t *exif_data = (srat_t *)data;
+                if (count > 1) {
+                    srat_t *values = (srat_t *)malloc(count * sizeof(srat_t));
+                    if (values == NULL) {
+                        LOGE("No memory for sign rational array");
+                        rc = NO_MEMORY;
+                    } else {
+                        memcpy(values, exif_data, count * sizeof(srat_t));
+                        m_Entries[m_nNumEntries].tag_entry.data._srats = values;
+                    }
+                } else {
+                    m_Entries[m_nNumEntries].tag_entry.data._srat =
+                        *(srat_t *)data;
+                }
+            }
+            break;
+        default:
+            LOGE("Error, Unknown type");
+            break;
+    }
+
+    // Increase number of entries
+    m_nNumEntries++;
+    return rc;
+}
+
+}; // namespace qcamera
diff --git a/msmcobalt/QCamera2/HAL3/QCamera3PostProc.h b/msmcobalt/QCamera2/HAL3/QCamera3PostProc.h
new file mode 100644
index 0000000..4f6126f
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL3/QCamera3PostProc.h
@@ -0,0 +1,192 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCamera3_POSTPROC_H__
+#define __QCamera3_POSTPROC_H__
+
+// Camera dependencies
+#include "hardware/camera3.h"
+#include "QCamera3HALHeader.h"
+#include "QCameraCmdThread.h"
+#include "QCameraQueue.h"
+
+extern "C" {
+#include "mm_camera_interface.h"
+#include "mm_jpeg_interface.h"
+}
+
+namespace qcamera {
+
+class QCamera3Exif;
+class QCamera3ProcessingChannel;
+class QCamera3ReprocessChannel;
+class QCamera3Stream;
+class QCamera3StreamMem;
+
+typedef struct {
+    camera3_stream_buffer_t src_frame;// source frame
+    mm_camera_buf_def_t metadata_buffer;
+    mm_camera_buf_def_t input_buffer;
+    reprocess_config_t reproc_config;
+    buffer_handle_t *output_buffer;
+    uint32_t frameNumber;
+} qcamera_fwk_input_pp_data_t;
+
+typedef struct {
+    uint32_t jobId;                  // job ID
+    uint32_t client_hdl;             // handle of jpeg client (obtained when open jpeg)
+    mm_camera_super_buf_t *src_frame;// source frame (need to be returned back to kernel after done)
+    mm_camera_super_buf_t *src_reproc_frame; // original source frame for reproc if not NULL
+    qcamera_fwk_input_pp_data_t *fwk_frame; // source framework buffer
+    qcamera_fwk_input_pp_data_t *fwk_src_buffer; // original framework source frame for reproc
+    QCamera3Exif *pJpegExifObj;
+    metadata_buffer_t *metadata;
+    mm_camera_super_buf_t *src_metadata;
+    jpeg_settings_t *jpeg_settings;
+} qcamera_hal3_jpeg_data_t;
+
+typedef struct {
+    uint32_t jobId;                  // job ID
+    mm_camera_super_buf_t *src_frame;// source frame (need to be returned back to kernel after done)
+    qcamera_fwk_input_pp_data_t *fwk_src_frame;// source frame
+    metadata_buffer_t *metadata;
+    jpeg_settings_t *jpeg_settings;
+    mm_camera_super_buf_t *src_metadata;
+} qcamera_hal3_pp_data_t;
+
+typedef struct {
+    mm_camera_super_buf_t *input;
+    buffer_handle_t *output;
+    uint32_t frameNumber;
+} qcamera_hal3_pp_buffer_t;
+
+#define MAX_HAL3_EXIF_TABLE_ENTRIES 23
+class QCamera3Exif
+{
+public:
+    QCamera3Exif();
+    virtual ~QCamera3Exif();
+
+    int32_t addEntry(exif_tag_id_t tagid,
+                     exif_tag_type_t type,
+                     uint32_t count,
+                     void *data);
+    uint32_t getNumOfEntries() {return m_nNumEntries;};
+    QEXIF_INFO_DATA *getEntries() {return m_Entries;};
+
+private:
+    QEXIF_INFO_DATA m_Entries[MAX_HAL3_EXIF_TABLE_ENTRIES];  // exif tags for JPEG encoder
+    uint32_t  m_nNumEntries;                            // number of valid entries
+};
+
+class QCamera3PostProcessor
+{
+public:
+    QCamera3PostProcessor(QCamera3ProcessingChannel *ch_ctrl);
+    virtual ~QCamera3PostProcessor();
+
+    int32_t init(QCamera3StreamMem *mMemory);
+    int32_t initJpeg(jpeg_encode_callback_t jpeg_cb,
+            cam_dimension_t *m_max_pic_dim,
+            void *user_data);
+    int32_t deinit();
+    int32_t start(const reprocess_config_t &config);
+    int32_t stop();
+    int32_t flush();
+    int32_t processData(qcamera_fwk_input_pp_data_t *frame);
+    int32_t processData(mm_camera_super_buf_t *input,
+            buffer_handle_t *output, uint32_t frameNumber);
+    int32_t processData(mm_camera_super_buf_t *input);
+    int32_t processPPData(mm_camera_super_buf_t *frame);
+    int32_t processPPMetadata(mm_camera_super_buf_t *reproc_meta);
+    int32_t processJpegSettingData(jpeg_settings_t *jpeg_settings);
+    qcamera_hal3_pp_data_t *dequeuePPJob(uint32_t frameNumber);
+    qcamera_hal3_jpeg_data_t *findJpegJobByJobId(uint32_t jobId);
+    void releaseJpegJobData(qcamera_hal3_jpeg_data_t *job);
+    int32_t releaseOfflineBuffers(bool all);
+    void releasePPJobData(qcamera_hal3_pp_data_t *job);
+
+private:
+    int32_t sendEvtNotify(int32_t msg_type, int32_t ext1, int32_t ext2);
+    mm_jpeg_color_format getColorfmtFromImgFmt(cam_format_t img_fmt);
+    mm_jpeg_format_t getJpegImgTypeFromImgFmt(cam_format_t img_fmt);
+    int32_t getJpegEncodeConfig(mm_jpeg_encode_params_t& encode_parm,
+                                  QCamera3Stream *main_stream,
+                                  jpeg_settings_t *jpeg_settings);
+    int32_t getFWKJpegEncodeConfig(mm_jpeg_encode_params_t& encode_parm,
+            qcamera_fwk_input_pp_data_t *frame,
+            jpeg_settings_t *jpeg_settings);
+    QCamera3Exif * getExifData(metadata_buffer_t *metadata,
+            jpeg_settings_t *jpeg_settings, bool needJpegExifRotation);
+    int32_t encodeData(qcamera_hal3_jpeg_data_t *jpeg_job_data,
+                       uint8_t &needNewSess);
+    int32_t encodeFWKData(qcamera_hal3_jpeg_data_t *jpeg_job_data,
+            uint8_t &needNewSess);
+    void releaseSuperBuf(mm_camera_super_buf_t *super_buf);
+    static void releaseNotifyData(void *user_data, void *cookie);
+    int32_t processRawImageImpl(mm_camera_super_buf_t *recvd_frame);
+
+    static void releaseJpegData(void *data, void *user_data);
+    static void releasePPInputData(void *data, void *user_data);
+    static void releaseMetadata(void *data, void *user_data);
+    static void releaseOngoingPPData(void *data, void *user_data);
+
+    static void *dataProcessRoutine(void *data);
+
+    bool needsReprocess(qcamera_fwk_input_pp_data_t *frame);
+
+private:
+    QCamera3ProcessingChannel  *m_parent;
+    jpeg_encode_callback_t     mJpegCB;
+    void *                     mJpegUserData;
+    mm_jpeg_ops_t              mJpegHandle;
+    uint32_t                   mJpegClientHandle;
+    uint32_t                   mJpegSessionId;
+    cam_jpeg_metadata_t        mJpegMetadata;
+
+    uint32_t                   m_bThumbnailNeeded;
+    QCamera3StreamMem          *mOutputMem;
+    QCamera3ReprocessChannel *  m_pReprocChannel;
+
+    QCameraQueue m_inputPPQ;            // input queue for postproc
+    QCameraQueue m_inputFWKPPQ;         // framework input queue for postproc
+    QCameraQueue m_ongoingPPQ;          // ongoing postproc queue
+    QCameraQueue m_inputJpegQ;          // input jpeg job queue
+    QCameraQueue m_ongoingJpegQ;        // ongoing jpeg job queue
+    QCameraQueue m_inputRawQ;           // input raw job queue
+    QCameraQueue m_inputMetaQ;          // input meta queue
+    QCameraQueue m_jpegSettingsQ;       // input jpeg setting queue
+    QCameraCmdThread m_dataProcTh;      // thread for data processing
+
+    pthread_mutex_t mReprocJobLock;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCamera3_POSTPROC_H__ */
diff --git a/msmcobalt/QCamera2/HAL3/QCamera3Stream.cpp b/msmcobalt/QCamera2/HAL3/QCamera3Stream.cpp
new file mode 100644
index 0000000..9e8957f
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL3/QCamera3Stream.cpp
@@ -0,0 +1,1532 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCamera3Stream"
+
+// Camera dependencies
+#include "QCamera3HWI.h"
+#include "QCamera3Stream.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+using namespace android;
+
+namespace qcamera {
+#define MAX_BATCH_SIZE   32
+
+const char* QCamera3Stream::mStreamNames[] = {
+        "CAM_DEFAULT",
+        "CAM_PREVIEW",
+        "CAM_POSTVIEW",
+        "CAM_SNAPSHOT",
+        "CAM_VIDEO",
+        "CAM_CALLBACK",
+        "CAM_IMPL_DEFINED",
+        "CAM_METADATA",
+        "CAM_RAW",
+        "CAM_OFFLINE_PROC",
+        "CAM_PARM",
+        "CAM_ANALYSIS"
+        "CAM_MAX" };
+
+/*===========================================================================
+ * FUNCTION   : get_bufs
+ *
+ * DESCRIPTION: static function entry to allocate stream buffers
+ *
+ * PARAMETERS :
+ *   @offset     : offset info of stream buffers
+ *   @num_bufs   : number of buffers allocated
+ *   @initial_reg_flag: flag to indicate if buffer needs to be registered
+ *                      at kernel initially
+ *   @bufs       : output of allocated buffers
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::get_bufs(
+                     cam_frame_len_offset_t *offset,
+                     uint8_t *num_bufs,
+                     uint8_t **initial_reg_flag,
+                     mm_camera_buf_def_t **bufs,
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                     void *user_data)
+{
+    int32_t rc = NO_ERROR;
+    QCamera3Stream *stream = reinterpret_cast<QCamera3Stream *>(user_data);
+    if (!stream) {
+        LOGE("getBufs invalid stream pointer");
+        return NO_MEMORY;
+    }
+    rc = stream->getBufs(offset, num_bufs, initial_reg_flag, bufs, ops_tbl);
+    if (NO_ERROR != rc) {
+        LOGE("stream->getBufs failed");
+        return NO_MEMORY;
+    }
+    if (stream->mBatchSize) {
+        //Allocate batch buffers if mBatchSize is non-zero. All the output
+        //arguments correspond to batch containers and not image buffers
+        rc = stream->getBatchBufs(num_bufs, initial_reg_flag,
+                bufs, ops_tbl);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : put_bufs
+ *
+ * DESCRIPTION: static function entry to deallocate stream buffers
+ *
+ * PARAMETERS :
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::put_bufs(
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                     void *user_data)
+{
+    int32_t rc = NO_ERROR;
+    QCamera3Stream *stream = reinterpret_cast<QCamera3Stream *>(user_data);
+    if (!stream) {
+        LOGE("putBufs invalid stream pointer");
+        return NO_MEMORY;
+    }
+
+    if (stream->mBatchSize) {
+        rc = stream->putBatchBufs(ops_tbl);
+        if (NO_ERROR != rc) {
+            LOGE("stream->putBatchBufs failed");
+        }
+    }
+    rc = stream->putBufs(ops_tbl);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : invalidate_buf
+ *
+ * DESCRIPTION: static function entry to invalidate a specific stream buffer
+ *
+ * PARAMETERS :
+ *   @index      : index of the stream buffer to invalidate
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::invalidate_buf(uint32_t index, void *user_data)
+{
+    int32_t rc = NO_ERROR;
+
+    QCamera3Stream *stream = reinterpret_cast<QCamera3Stream *>(user_data);
+    if (!stream) {
+        LOGE("invalid stream pointer");
+        return NO_MEMORY;
+    }
+    if (stream->mBatchSize) {
+        int32_t retVal = NO_ERROR;
+        for (size_t i = 0;
+                i < stream->mBatchBufDefs[index].user_buf.bufs_used; i++) {
+            uint32_t buf_idx = stream->mBatchBufDefs[index].user_buf.buf_idx[i];
+            retVal = stream->invalidateBuf(buf_idx);
+            if (NO_ERROR != retVal) {
+                LOGE("invalidateBuf failed for buf_idx: %d err: %d",
+                         buf_idx, retVal);
+            }
+            rc |= retVal;
+        }
+    } else {
+        rc = stream->invalidateBuf(index);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : clean_invalidate_buf
+ *
+ * DESCRIPTION: static function entry to clean and invalidate a specific stream buffer
+ *
+ * PARAMETERS :
+ *   @index      : index of the stream buffer to invalidate
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::clean_invalidate_buf(uint32_t index, void *user_data)
+{
+    int32_t rc = NO_ERROR;
+
+    QCamera3Stream *stream = reinterpret_cast<QCamera3Stream *>(user_data);
+    if (!stream) {
+        LOGE("invalid stream pointer");
+        return NO_MEMORY;
+    }
+    if (stream->mBatchSize) {
+        int32_t retVal = NO_ERROR;
+        for (size_t i = 0;
+                i < stream->mBatchBufDefs[index].user_buf.bufs_used; i++) {
+            uint32_t buf_idx = stream->mBatchBufDefs[index].user_buf.buf_idx[i];
+            retVal = stream->cleanInvalidateBuf(buf_idx);
+            if (NO_ERROR != retVal) {
+                LOGE("invalidateBuf failed for buf_idx: %d err: %d",
+                         buf_idx, retVal);
+            }
+            rc |= retVal;
+        }
+    } else {
+        rc = stream->cleanInvalidateBuf(index);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCamera3Stream
+ *
+ * DESCRIPTION: constructor of QCamera3Stream
+ *
+ * PARAMETERS :
+ *   @allocator  : memory allocator obj
+ *   @camHandle  : camera handle
+ *   @chId       : channel handle
+ *   @camOps     : ptr to camera ops table
+ *   @paddingInfo: ptr to padding info
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera3Stream::QCamera3Stream(uint32_t camHandle,
+                             uint32_t chId,
+                             mm_camera_ops_t *camOps,
+                             cam_padding_info_t *paddingInfo,
+                             QCamera3Channel *channel) :
+        mCamHandle(camHandle),
+        mChannelHandle(chId),
+        mHandle(0),
+        mCamOps(camOps),
+        mStreamInfo(NULL),
+        mMemOps(NULL),
+        mNumBufs(0),
+        mDataCB(NULL),
+        mUserData(NULL),
+        mDataQ(releaseFrameData, this),
+        mStreamInfoBuf(NULL),
+        mStreamBufs(NULL),
+        mBufDefs(NULL),
+        mChannel(channel),
+        mBatchSize(0),
+        mNumBatchBufs(0),
+        mStreamBatchBufs(NULL),
+        mBatchBufDefs(NULL),
+        mCurrentBatchBufDef(NULL),
+        mBufsStaged(0),
+        mFreeBatchBufQ(NULL, this)
+{
+    mMemVtbl.user_data = this;
+    mMemVtbl.get_bufs = get_bufs;
+    mMemVtbl.put_bufs = put_bufs;
+    mMemVtbl.invalidate_buf = invalidate_buf;
+    mMemVtbl.clean_invalidate_buf = clean_invalidate_buf;
+    mMemVtbl.set_config_ops = NULL;
+    memset(&mFrameLenOffset, 0, sizeof(mFrameLenOffset));
+    memcpy(&mPaddingInfo, paddingInfo, sizeof(cam_padding_info_t));
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera3Stream
+ *
+ * DESCRIPTION: deconstructor of QCamera3Stream
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera3Stream::~QCamera3Stream()
+{
+    if (mStreamInfoBuf != NULL) {
+        int rc = mCamOps->unmap_stream_buf(mCamHandle,
+                    mChannelHandle, mHandle, CAM_MAPPING_BUF_TYPE_STREAM_INFO, 0, -1);
+        if (rc < 0) {
+            LOGE("Failed to un-map stream info buffer");
+        }
+        mStreamInfoBuf->deallocate();
+        delete mStreamInfoBuf;
+        mStreamInfoBuf = NULL;
+    }
+    // delete stream
+    if (mHandle > 0) {
+        mCamOps->delete_stream(mCamHandle, mChannelHandle, mHandle);
+        mHandle = 0;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : init
+ *
+ * DESCRIPTION: initialize stream obj
+ *
+ * PARAMETERS :
+ *   @streamType     : stream type
+ *   @streamFormat   : stream format
+ *   @streamDim      : stream dimension
+ *   @reprocess_config: reprocess stream input configuration
+ *   @minNumBuffers  : minimal buffer count for particular stream type
+ *   @postprocess_mask: PP mask
+ *   @is_type  : Image stabilization type, cam_is_type_t
+ *   @batchSize  : Number of image buffers in a batch.
+ *                 0: No batch. N: container with N image buffers
+ *   @stream_cb      : callback handle
+ *   @userdata       : user data
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::init(cam_stream_type_t streamType,
+                            cam_format_t streamFormat,
+                            cam_dimension_t streamDim,
+                            cam_rotation_t streamRotation,
+                            cam_stream_reproc_config_t* reprocess_config,
+                            uint8_t minNumBuffers,
+                            cam_feature_mask_t postprocess_mask,
+                            cam_is_type_t is_type,
+                            uint32_t batchSize,
+                            hal3_stream_cb_routine stream_cb,
+                            void *userdata)
+{
+    int32_t rc = OK;
+    ssize_t bufSize = BAD_INDEX;
+    mm_camera_stream_config_t stream_config;
+    LOGD("batch size is %d", batchSize);
+
+    mHandle = mCamOps->add_stream(mCamHandle, mChannelHandle);
+    if (!mHandle) {
+        LOGE("add_stream failed");
+        rc = UNKNOWN_ERROR;
+        goto done;
+    }
+
+    // allocate and map stream info memory
+    mStreamInfoBuf = new QCamera3HeapMemory(1);
+    if (mStreamInfoBuf == NULL) {
+        LOGE("no memory for stream info buf obj");
+        rc = -ENOMEM;
+        goto err1;
+    }
+    rc = mStreamInfoBuf->allocate(sizeof(cam_stream_info_t));
+    if (rc < 0) {
+        LOGE("no memory for stream info");
+        rc = -ENOMEM;
+        goto err2;
+    }
+
+    mStreamInfo =
+        reinterpret_cast<cam_stream_info_t *>(mStreamInfoBuf->getPtr(0));
+    memset(mStreamInfo, 0, sizeof(cam_stream_info_t));
+    mStreamInfo->stream_type = streamType;
+    mStreamInfo->fmt = streamFormat;
+    mStreamInfo->dim = streamDim;
+    mStreamInfo->num_bufs = minNumBuffers;
+    mStreamInfo->pp_config.feature_mask = postprocess_mask;
+    mStreamInfo->is_type = is_type;
+    mStreamInfo->pp_config.rotation = streamRotation;
+    LOGD("stream_type is %d, feature_mask is %Ld",
+           mStreamInfo->stream_type, mStreamInfo->pp_config.feature_mask);
+
+    bufSize = mStreamInfoBuf->getSize(0);
+    if (BAD_INDEX != bufSize) {
+        rc = mCamOps->map_stream_buf(mCamHandle,
+                mChannelHandle, mHandle, CAM_MAPPING_BUF_TYPE_STREAM_INFO,
+                0, -1, mStreamInfoBuf->getFd(0), (size_t)bufSize,
+                mStreamInfoBuf->getPtr(0));
+        if (rc < 0) {
+            LOGE("Failed to map stream info buffer");
+            goto err3;
+        }
+    } else {
+        LOGE("Failed to retrieve buffer size (bad index)");
+        goto err3;
+    }
+
+    mNumBufs = minNumBuffers;
+    if (reprocess_config != NULL) {
+        mStreamInfo->reprocess_config = *reprocess_config;
+        mStreamInfo->streaming_mode = CAM_STREAMING_MODE_BURST;
+        //mStreamInfo->num_of_burst = reprocess_config->offline.num_of_bufs;
+        mStreamInfo->num_of_burst = 1;
+    } else if (batchSize) {
+        if (batchSize > MAX_BATCH_SIZE) {
+            LOGE("batchSize:%d is very large", batchSize);
+            rc = BAD_VALUE;
+            goto err4;
+        }
+        else {
+            mNumBatchBufs = MAX_INFLIGHT_HFR_REQUESTS / batchSize;
+            mStreamInfo->streaming_mode = CAM_STREAMING_MODE_BATCH;
+            mStreamInfo->user_buf_info.frame_buf_cnt = batchSize;
+            mStreamInfo->user_buf_info.size =
+                    (uint32_t)(sizeof(msm_camera_user_buf_cont_t));
+            mStreamInfo->num_bufs = mNumBatchBufs;
+            //Frame interval is irrelavent since time stamp calculation is not
+            //required from the mCamOps
+            mStreamInfo->user_buf_info.frameInterval = 0;
+            LOGD("batch size is %d", batchSize);
+        }
+    } else {
+        mStreamInfo->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+    }
+
+    // Configure the stream
+    stream_config.stream_info = mStreamInfo;
+    stream_config.mem_vtbl = mMemVtbl;
+    stream_config.padding_info = mPaddingInfo;
+    stream_config.userdata = this;
+    stream_config.stream_cb = dataNotifyCB;
+    stream_config.stream_cb_sync = NULL;
+
+    rc = mCamOps->config_stream(mCamHandle,
+            mChannelHandle, mHandle, &stream_config);
+    if (rc < 0) {
+        LOGE("Failed to config stream, rc = %d", rc);
+        goto err4;
+    }
+
+    mDataCB = stream_cb;
+    mUserData = userdata;
+    mBatchSize = batchSize;
+    return 0;
+
+err4:
+    mCamOps->unmap_stream_buf(mCamHandle,
+            mChannelHandle, mHandle, CAM_MAPPING_BUF_TYPE_STREAM_INFO, 0, -1);
+err3:
+    mStreamInfoBuf->deallocate();
+err2:
+    delete mStreamInfoBuf;
+    mStreamInfoBuf = NULL;
+    mStreamInfo = NULL;
+err1:
+    mCamOps->delete_stream(mCamHandle, mChannelHandle, mHandle);
+    mHandle = 0;
+    mNumBufs = 0;
+done:
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : start
+ *
+ * DESCRIPTION: start stream. Will start main stream thread to handle stream
+ *              related ops.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::start()
+{
+    int32_t rc = 0;
+
+    mDataQ.init();
+    if (mBatchSize)
+        mFreeBatchBufQ.init();
+    rc = mProcTh.launch(dataProcRoutine, this);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop
+ *
+ * DESCRIPTION: stop stream. Will stop main stream thread
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::stop()
+{
+    int32_t rc = 0;
+    rc = mProcTh.exit();
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : processDataNotify
+ *
+ * DESCRIPTION: process stream data notify
+ *
+ * PARAMETERS :
+ *   @frame   : stream frame received
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::processDataNotify(mm_camera_super_buf_t *frame)
+{
+    LOGD("E\n");
+    int32_t rc;
+    if (mDataQ.enqueue((void *)frame)) {
+        rc = mProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+    } else {
+        LOGD("Stream thread is not active, no ops here");
+        bufDone(frame->bufs[0]->buf_idx);
+        free(frame);
+        rc = NO_ERROR;
+    }
+    LOGD("X\n");
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : dataNotifyCB
+ *
+ * DESCRIPTION: callback for data notify. This function is registered with
+ *              mm-camera-interface to handle data notify
+ *
+ * PARAMETERS :
+ *   @recvd_frame   : stream frame received
+ *   userdata       : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3Stream::dataNotifyCB(mm_camera_super_buf_t *recvd_frame,
+                                 void *userdata)
+{
+    LOGD("E\n");
+    QCamera3Stream* stream = (QCamera3Stream *)userdata;
+    if (stream == NULL ||
+        recvd_frame == NULL ||
+        recvd_frame->bufs[0] == NULL ||
+        recvd_frame->bufs[0]->stream_id != stream->getMyHandle()) {
+        LOGE("Not a valid stream to handle buf");
+        return;
+    }
+
+    mm_camera_super_buf_t *frame =
+        (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+    if (frame == NULL) {
+        LOGE("No mem for mm_camera_buf_def_t");
+        stream->bufDone(recvd_frame->bufs[0]->buf_idx);
+        return;
+    }
+    *frame = *recvd_frame;
+    stream->processDataNotify(frame);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : dataProcRoutine
+ *
+ * DESCRIPTION: function to process data in the main stream thread
+ *
+ * PARAMETERS :
+ *   @data    : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void *QCamera3Stream::dataProcRoutine(void *data)
+{
+    int running = 1;
+    int ret;
+    QCamera3Stream *pme = (QCamera3Stream *)data;
+    QCameraCmdThread *cmdThread = &pme->mProcTh;
+
+    cmdThread->setName(mStreamNames[pme->mStreamInfo->stream_type]);
+
+    LOGD("E");
+    do {
+        do {
+            ret = cam_sem_wait(&cmdThread->cmd_sem);
+            if (ret != 0 && errno != EINVAL) {
+                LOGE("cam_sem_wait error (%s)",
+                       strerror(errno));
+                return NULL;
+            }
+        } while (ret != 0);
+
+        // we got notified about new cmd avail in cmd queue
+        camera_cmd_type_t cmd = cmdThread->getCmd();
+        switch (cmd) {
+        case CAMERA_CMD_TYPE_DO_NEXT_JOB:
+            {
+                LOGD("Do next job");
+                mm_camera_super_buf_t *frame =
+                    (mm_camera_super_buf_t *)pme->mDataQ.dequeue();
+                if (NULL != frame) {
+                    if (UNLIKELY(frame->bufs[0]->buf_type ==
+                            CAM_STREAM_BUF_TYPE_USERPTR)) {
+                        pme->handleBatchBuffer(frame);
+                    } else if (pme->mDataCB != NULL) {
+                        pme->mDataCB(frame, pme, pme->mUserData);
+                    } else {
+                        // no data cb routine, return buf here
+                        pme->bufDone(frame->bufs[0]->buf_idx);
+                    }
+                }
+            }
+            break;
+        case CAMERA_CMD_TYPE_EXIT:
+            LOGH("Exit");
+            /* flush data buf queue */
+            pme->mDataQ.flush();
+            pme->flushFreeBatchBufQ();
+            running = 0;
+            break;
+        default:
+            break;
+        }
+    } while (running);
+    LOGD("X");
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : bufDone
+ *
+ * DESCRIPTION: return stream buffer to kernel
+ *
+ * PARAMETERS :
+ *   @index   : index of buffer to be returned
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::bufDone(uint32_t index)
+{
+    int32_t rc = NO_ERROR;
+    Mutex::Autolock lock(mLock);
+
+    if ((index >= mNumBufs) || (mBufDefs == NULL)) {
+        LOGE("index; %d, mNumBufs: %d", index, mNumBufs);
+        return BAD_INDEX;
+    }
+    if (mStreamBufs == NULL)
+    {
+        LOGE("putBufs already called");
+        return INVALID_OPERATION;
+    }
+
+    if( NULL == mBufDefs[index].mem_info) {
+        if (NULL == mMemOps) {
+            LOGE("Camera operations not initialized");
+            return NO_INIT;
+        }
+
+        ssize_t bufSize = mStreamBufs->getSize(index);
+
+        if (BAD_INDEX != bufSize) {
+            LOGD("Map streamBufIdx: %d", index);
+            rc = mMemOps->map_ops(index, -1, mStreamBufs->getFd(index),
+                    (size_t)bufSize, mStreamBufs->getPtr(index),
+                    CAM_MAPPING_BUF_TYPE_STREAM_BUF, mMemOps->userdata);
+            if (rc < 0) {
+                LOGE("Failed to map camera buffer %d", index);
+                return rc;
+            }
+
+            rc = mStreamBufs->getBufDef(mFrameLenOffset, mBufDefs[index], index);
+            if (NO_ERROR != rc) {
+                LOGE("Couldn't find camera buffer definition");
+                mMemOps->unmap_ops(index, -1, CAM_MAPPING_BUF_TYPE_STREAM_BUF, mMemOps->userdata);
+                return rc;
+            }
+        } else {
+            LOGE("Failed to retrieve buffer size (bad index)");
+            return INVALID_OPERATION;
+        }
+    }
+
+    if (UNLIKELY(mBatchSize)) {
+        rc = aggregateBufToBatch(mBufDefs[index]);
+    } else {
+        rc = mCamOps->qbuf(mCamHandle, mChannelHandle, &mBufDefs[index]);
+        if (rc < 0) {
+            return FAILED_TRANSACTION;
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : bufRelease
+ *
+ * DESCRIPTION: release all resources associated with this buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of buffer to be released
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::bufRelease(int32_t index)
+{
+    int32_t rc = NO_ERROR;
+    Mutex::Autolock lock(mLock);
+
+    if ((index >= mNumBufs) || (mBufDefs == NULL)) {
+        return BAD_INDEX;
+    }
+
+    if (NULL != mBufDefs[index].mem_info) {
+        if (NULL == mMemOps) {
+            LOGE("Camera operations not initialized");
+            return NO_INIT;
+        }
+
+        rc = mMemOps->unmap_ops(index, -1, CAM_MAPPING_BUF_TYPE_STREAM_BUF,
+                mMemOps->userdata);
+        if (rc < 0) {
+            LOGE("Failed to un-map camera buffer %d", index);
+            return rc;
+        }
+
+        mBufDefs[index].mem_info = NULL;
+    } else {
+        LOGE("Buffer at index %d not registered");
+        return BAD_INDEX;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getBufs
+ *
+ * DESCRIPTION: allocate stream buffers
+ *
+ * PARAMETERS :
+ *   @offset     : offset info of stream buffers
+ *   @num_bufs   : number of buffers allocated
+ *   @initial_reg_flag: flag to indicate if buffer needs to be registered
+ *                      at kernel initially
+ *   @bufs       : output of allocated buffers
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::getBufs(cam_frame_len_offset_t *offset,
+                     uint8_t *num_bufs,
+                     uint8_t **initial_reg_flag,
+                     mm_camera_buf_def_t **bufs,
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+    int rc = NO_ERROR;
+    uint8_t *regFlags;
+    Mutex::Autolock lock(mLock);
+
+    if (!ops_tbl) {
+        LOGE("ops_tbl is NULL");
+        return INVALID_OPERATION;
+    }
+
+    mFrameLenOffset = *offset;
+    mMemOps = ops_tbl;
+
+    if (mStreamBufs != NULL) {
+       LOGE("Failed getBufs being called twice in a row without a putBufs call");
+       return INVALID_OPERATION;
+    }
+    mStreamBufs = mChannel->getStreamBufs(mFrameLenOffset.frame_len);
+    if (!mStreamBufs) {
+        LOGE("Failed to allocate stream buffers");
+        return NO_MEMORY;
+    }
+
+    for (uint32_t i = 0; i < mNumBufs; i++) {
+        if (mStreamBufs->valid(i)) {
+            ssize_t bufSize = mStreamBufs->getSize(i);
+            if (BAD_INDEX != bufSize) {
+                rc = ops_tbl->map_ops(i, -1, mStreamBufs->getFd(i),
+                        (size_t)bufSize, mStreamBufs->getPtr(i),
+                        CAM_MAPPING_BUF_TYPE_STREAM_BUF,
+                        ops_tbl->userdata);
+                if (rc < 0) {
+                    LOGE("map_stream_buf failed: %d", rc);
+                    for (uint32_t j = 0; j < i; j++) {
+                        if (mStreamBufs->valid(j)) {
+                            ops_tbl->unmap_ops(j, -1,
+                                    CAM_MAPPING_BUF_TYPE_STREAM_BUF,
+                                    ops_tbl->userdata);
+                        }
+                    }
+                    return INVALID_OPERATION;
+                }
+            } else {
+                LOGE("Failed to retrieve buffer size (bad index)");
+                return INVALID_OPERATION;
+            }
+        }
+    }
+
+    //regFlags array is allocated by us, but consumed and freed by mm-camera-interface
+    regFlags = (uint8_t *)malloc(sizeof(uint8_t) * mNumBufs);
+    if (!regFlags) {
+        LOGE("Out of memory");
+        for (uint32_t i = 0; i < mNumBufs; i++) {
+            if (mStreamBufs->valid(i)) {
+                ops_tbl->unmap_ops(i, -1, CAM_MAPPING_BUF_TYPE_STREAM_BUF,
+                        ops_tbl->userdata);
+            }
+        }
+        return NO_MEMORY;
+    }
+    memset(regFlags, 0, sizeof(uint8_t) * mNumBufs);
+
+    mBufDefs = (mm_camera_buf_def_t *)malloc(mNumBufs * sizeof(mm_camera_buf_def_t));
+    if (mBufDefs == NULL) {
+        LOGE("Failed to allocate mm_camera_buf_def_t %d", rc);
+        for (uint32_t i = 0; i < mNumBufs; i++) {
+            if (mStreamBufs->valid(i)) {
+                ops_tbl->unmap_ops(i, -1, CAM_MAPPING_BUF_TYPE_STREAM_BUF,
+                        ops_tbl->userdata);
+            }
+        }
+        free(regFlags);
+        regFlags = NULL;
+        return INVALID_OPERATION;
+    }
+    memset(mBufDefs, 0, mNumBufs * sizeof(mm_camera_buf_def_t));
+    for (uint32_t i = 0; i < mNumBufs; i++) {
+        if (mStreamBufs->valid(i)) {
+            mStreamBufs->getBufDef(mFrameLenOffset, mBufDefs[i], i);
+        }
+    }
+
+    rc = mStreamBufs->getRegFlags(regFlags);
+    if (rc < 0) {
+        LOGE("getRegFlags failed %d", rc);
+        for (uint32_t i = 0; i < mNumBufs; i++) {
+            if (mStreamBufs->valid(i)) {
+                ops_tbl->unmap_ops(i, -1, CAM_MAPPING_BUF_TYPE_STREAM_BUF,
+                        ops_tbl->userdata);
+            }
+        }
+        free(mBufDefs);
+        mBufDefs = NULL;
+        free(regFlags);
+        regFlags = NULL;
+        return INVALID_OPERATION;
+    }
+
+    *num_bufs = mNumBufs;
+    *initial_reg_flag = regFlags;
+    *bufs = mBufDefs;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : putBufs
+ *
+ * DESCRIPTION: deallocate stream buffers
+ *
+ * PARAMETERS :
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::putBufs(mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+    int rc = NO_ERROR;
+    Mutex::Autolock lock(mLock);
+
+    for (uint32_t i = 0; i < mNumBufs; i++) {
+        if (mStreamBufs->valid(i) && NULL != mBufDefs[i].mem_info) {
+            rc = ops_tbl->unmap_ops(i, -1, CAM_MAPPING_BUF_TYPE_STREAM_BUF, ops_tbl->userdata);
+            if (rc < 0) {
+                LOGE("un-map stream buf failed: %d", rc);
+            }
+        }
+    }
+    mBufDefs = NULL; // mBufDefs just keep a ptr to the buffer
+                     // mm-camera-interface own the buffer, so no need to free
+    memset(&mFrameLenOffset, 0, sizeof(mFrameLenOffset));
+
+    if (mStreamBufs == NULL) {
+        LOGE("getBuf failed previously, or calling putBufs twice");
+    }
+
+    mChannel->putStreamBufs();
+
+    //need to set mStreamBufs to null because putStreamBufs deletes that memory
+    mStreamBufs = NULL;
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : invalidateBuf
+ *
+ * DESCRIPTION: invalidate a specific stream buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer to invalidate
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::invalidateBuf(uint32_t index)
+{
+    if (mStreamBufs == NULL) {
+       LOGE("putBufs already called");
+       return INVALID_OPERATION;
+    } else
+       return mStreamBufs->invalidateCache(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : cleanInvalidateBuf
+ *
+ * DESCRIPTION: clean and invalidate a specific stream buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer to invalidate
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::cleanInvalidateBuf(uint32_t index)
+{
+    if (mStreamBufs == NULL) {
+        LOGE("putBufs already called");
+        return INVALID_OPERATION;
+    } else
+        return mStreamBufs->cleanInvalidateCache(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : getFrameOffset
+ *
+ * DESCRIPTION: query stream buffer frame offset info
+ *
+ * PARAMETERS :
+ *   @offset  : reference to struct to store the queried frame offset info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::getFrameOffset(cam_frame_len_offset_t &offset)
+{
+    offset = mFrameLenOffset;
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFrameDimension
+ *
+ * DESCRIPTION: query stream frame dimension info
+ *
+ * PARAMETERS :
+ *   @dim     : reference to struct to store the queried frame dimension
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::getFrameDimension(cam_dimension_t &dim)
+{
+    if (mStreamInfo != NULL) {
+        dim = mStreamInfo->dim;
+        return 0;
+    }
+    return -1;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFormat
+ *
+ * DESCRIPTION: query stream format
+ *
+ * PARAMETERS :
+ *   @fmt     : reference to stream format
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::getFormat(cam_format_t &fmt)
+{
+    if (mStreamInfo != NULL) {
+        fmt = mStreamInfo->fmt;
+        return 0;
+    }
+    return -1;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMyServerID
+ *
+ * DESCRIPTION: query server stream ID
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : stream ID from server
+ *==========================================================================*/
+uint32_t QCamera3Stream::getMyServerID() {
+    if (mStreamInfo != NULL) {
+        return mStreamInfo->stream_svr_id;
+    } else {
+        return 0;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getMyType
+ *
+ * DESCRIPTION: query stream type
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : type of stream
+ *==========================================================================*/
+cam_stream_type_t QCamera3Stream::getMyType() const
+{
+    if (mStreamInfo != NULL) {
+        return mStreamInfo->stream_type;
+    } else {
+        return CAM_STREAM_TYPE_MAX;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mapBuf
+ *
+ * DESCRIPTION: map stream related buffer to backend server
+ *
+ * PARAMETERS :
+ *   @buf_type : mapping type of buffer
+ *   @buf_idx  : index of buffer
+ *   @plane_idx: plane index
+ *   @fd       : fd of the buffer
+ *   @buffer : buffer ptr
+ *   @size     : lenght of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::mapBuf(uint8_t buf_type, uint32_t buf_idx,
+        int32_t plane_idx, int fd, void *buffer, size_t size)
+{
+    return mCamOps->map_stream_buf(mCamHandle, mChannelHandle,
+                                   mHandle, buf_type,
+                                   buf_idx, plane_idx,
+                                   fd, size, buffer);
+
+}
+
+/*===========================================================================
+ * FUNCTION   : unmapBuf
+ *
+ * DESCRIPTION: unmap stream related buffer to backend server
+ *
+ * PARAMETERS :
+ *   @buf_type : mapping type of buffer
+ *   @buf_idx  : index of buffer
+ *   @plane_idx: plane index
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::unmapBuf(uint8_t buf_type, uint32_t buf_idx, int32_t plane_idx)
+{
+    return mCamOps->unmap_stream_buf(mCamHandle, mChannelHandle,
+                                     mHandle, buf_type,
+                                     buf_idx, plane_idx);
+}
+
+/*===========================================================================
+ * FUNCTION   : setParameter
+ *
+ * DESCRIPTION: set stream based parameters
+ *
+ * PARAMETERS :
+ *   @param   : ptr to parameters to be set
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::setParameter(cam_stream_parm_buffer_t &param)
+{
+    int32_t rc = NO_ERROR;
+    mStreamInfo->parm_buf = param;
+    rc = mCamOps->set_stream_parms(mCamHandle,
+                                   mChannelHandle,
+                                   mHandle,
+                                   &mStreamInfo->parm_buf);
+    if (rc == NO_ERROR) {
+        param = mStreamInfo->parm_buf;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseFrameData
+ *
+ * DESCRIPTION: callback function to release frame data node
+ *
+ * PARAMETERS :
+ *   @data      : ptr to post process input data
+ *   @user_data : user data ptr (QCameraReprocessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera3Stream::releaseFrameData(void *data, void *user_data)
+{
+    QCamera3Stream *pme = (QCamera3Stream *)user_data;
+    mm_camera_super_buf_t *frame = (mm_camera_super_buf_t *)data;
+    if (NULL != pme) {
+        if (UNLIKELY(pme->mBatchSize)) {
+            /* For batch mode, the batch buffer is added to empty list */
+            if(!pme->mFreeBatchBufQ.enqueue((void*) frame->bufs[0])) {
+                LOGE("batchBuf.buf_idx: %d enqueue failed",
+                        frame->bufs[0]->buf_idx);
+            }
+        } else {
+            pme->bufDone(frame->bufs[0]->buf_idx);
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getBatchBufs
+ *
+ * DESCRIPTION: allocate batch containers for the stream
+ *
+ * PARAMETERS :
+ *   @num_bufs   : number of buffers allocated
+ *   @initial_reg_flag: flag to indicate if buffer needs to be registered
+ *                      at kernel initially
+ *   @bufs       : output of allocated buffers
+  *  @ops_tbl    : ptr to buf mapping/unmapping ops
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::getBatchBufs(
+        uint8_t *num_bufs, uint8_t **initial_reg_flag,
+        mm_camera_buf_def_t **bufs,
+        mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+    int rc = NO_ERROR;
+    uint8_t *regFlags;
+
+    if (!ops_tbl || !num_bufs || !initial_reg_flag || !bufs) {
+        LOGE("input args NULL");
+        return INVALID_OPERATION;
+    }
+    LOGH("Batch container allocation stream type = %d",
+             getMyType());
+
+    Mutex::Autolock lock(mLock);
+
+    mMemOps = ops_tbl;
+
+    //Allocate batch containers
+    mStreamBatchBufs = new QCamera3HeapMemory(1);
+    if (!mStreamBatchBufs) {
+        LOGE("unable to create batch container memory");
+        return NO_MEMORY;
+    }
+    // Allocating single buffer file-descriptor for all batch containers,
+    // mStreamBatchBufs considers all the container bufs as a single buffer. But
+    // QCamera3Stream manages that single buffer as multiple batch buffers
+    LOGD("Allocating batch container memory. numBatch: %d size: %d",
+             mNumBatchBufs, mStreamInfo->user_buf_info.size);
+    rc = mStreamBatchBufs->allocate(
+            mNumBatchBufs * mStreamInfo->user_buf_info.size);
+    if (rc < 0) {
+        LOGE("unable to allocate batch container memory");
+        rc = NO_MEMORY;
+        goto err1;
+    }
+
+    /* map batch buffers. getCnt here returns 1 because of single FD across
+     * batch bufs */
+    for (uint32_t i = 0; i < mStreamBatchBufs->getCnt(); i++) {
+        if (mNumBatchBufs) {
+            //For USER_BUF, size = number_of_container bufs instead of the total
+            //buf size
+            rc = ops_tbl->map_ops(i, -1, mStreamBatchBufs->getFd(i),
+                    (size_t)mNumBatchBufs, mStreamBatchBufs->getPtr(i),
+                    CAM_MAPPING_BUF_TYPE_STREAM_USER_BUF,
+                    ops_tbl->userdata);
+            if (rc < 0) {
+                LOGE("Failed to map stream container buffer: %d",
+                         rc);
+                //Unmap all the buffers that were successfully mapped before
+                //this buffer mapping failed
+                for (size_t j = 0; j < i; j++) {
+                    ops_tbl->unmap_ops(j, -1,
+                            CAM_MAPPING_BUF_TYPE_STREAM_USER_BUF,
+                            ops_tbl->userdata);
+                }
+                goto err2;
+            }
+        } else {
+            LOGE("Failed to retrieve buffer size (bad index)");
+            return INVALID_OPERATION;
+        }
+    }
+
+    LOGD("batch bufs successfully mmapped = %d",
+             mNumBatchBufs);
+
+    /* regFlags array is allocated here, but consumed and freed by
+     * mm-camera-interface */
+    regFlags = (uint8_t *)malloc(sizeof(uint8_t) * mNumBatchBufs);
+    if (!regFlags) {
+        LOGE("Out of memory");
+        rc = NO_MEMORY;
+        goto err3;
+    }
+    memset(regFlags, 0, sizeof(uint8_t) * mNumBatchBufs);
+    /* Do not queue the container buffers as the image buffers are not yet
+     * queued. mStreamBatchBufs->getRegFlags is not called as mStreamBatchBufs
+     * considers single buffer is allocated */
+    for (uint32_t i = 0; i < mNumBatchBufs; i++) {
+        regFlags[i] = 0;
+    }
+
+    mBatchBufDefs = (mm_camera_buf_def_t *)
+            malloc(mNumBatchBufs * sizeof(mm_camera_buf_def_t));
+    if (mBatchBufDefs == NULL) {
+        LOGE("mBatchBufDefs memory allocation failed");
+        rc = INVALID_OPERATION;
+        goto err4;
+    }
+    memset(mBatchBufDefs, 0, mNumBatchBufs * sizeof(mm_camera_buf_def_t));
+
+    //Populate bufDef and queue to free batchBufQ
+    for (uint32_t i = 0; i < mNumBatchBufs; i++) {
+        getBatchBufDef(mBatchBufDefs[i], i);
+        if(mFreeBatchBufQ.enqueue((void*) &mBatchBufDefs[i])) {
+            LOGD("mBatchBufDefs[%d]: 0x%p", i, &mBatchBufDefs[i]);
+        } else {
+            LOGE("enqueue mBatchBufDefs[%d] failed", i);
+        }
+    }
+
+    *num_bufs = mNumBatchBufs;
+    *initial_reg_flag = regFlags;
+    *bufs = mBatchBufDefs;
+    LOGH("stream type: %d, numBufs(batch): %d",
+             mStreamInfo->stream_type, mNumBatchBufs);
+
+    return NO_ERROR;
+err4:
+    free(regFlags);
+err3:
+    for (size_t i = 0; i < mStreamBatchBufs->getCnt(); i++) {
+        ops_tbl->unmap_ops(i, -1, CAM_MAPPING_BUF_TYPE_STREAM_USER_BUF,
+                ops_tbl->userdata);
+    }
+err2:
+    mStreamBatchBufs->deallocate();
+err1:
+    delete mStreamBatchBufs;
+    mStreamBatchBufs = NULL;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : putBatchBufs
+ *
+ * DESCRIPTION: deallocate stream batch buffers
+ *
+ * PARAMETERS :
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::putBatchBufs(mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+    int rc = NO_ERROR;
+    Mutex::Autolock lock(mLock);
+
+    if (mStreamBatchBufs) {
+        for (uint32_t i = 0; i < mStreamBatchBufs->getCnt(); i++) {
+            rc = ops_tbl->unmap_ops(i, -1, CAM_MAPPING_BUF_TYPE_STREAM_USER_BUF,
+                    ops_tbl->userdata);
+            if (rc < 0) {
+                LOGE("un-map batch buf failed: %d", rc);
+            }
+        }
+        mStreamBatchBufs->deallocate();
+        delete mStreamBatchBufs;
+        mStreamBatchBufs = NULL;
+    }
+    // mm-camera-interface frees bufDefs even though bufDefs are allocated by
+    // QCamera3Stream. Don't free here
+    mBatchBufDefs = NULL;
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getBatchBufDef
+ *
+ * DESCRIPTION: query detailed buffer information of batch buffer
+ *
+ * PARAMETERS :
+ *   @bufDef  : [output] reference to struct to store buffer definition
+ *   @@index  : [input] index of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::getBatchBufDef(mm_camera_buf_def_t& batchBufDef,
+        int32_t index)
+{
+    int rc = NO_ERROR;
+    memset(&batchBufDef, 0, sizeof(mm_camera_buf_def_t));
+    if (mStreamBatchBufs) {
+        //Single file descriptor for all batch buffers
+        batchBufDef.fd          = mStreamBatchBufs->getFd(0);
+        batchBufDef.buf_type    = CAM_STREAM_BUF_TYPE_USERPTR;
+        batchBufDef.frame_len   = mStreamInfo->user_buf_info.size;
+        batchBufDef.mem_info    = mStreamBatchBufs;
+        batchBufDef.buffer      = (uint8_t *)mStreamBatchBufs->getPtr(0) +
+                                    (index * mStreamInfo->user_buf_info.size);
+        batchBufDef.buf_idx     = index;
+        batchBufDef.user_buf.num_buffers = mBatchSize;
+        batchBufDef.user_buf.bufs_used = 0;
+        batchBufDef.user_buf.plane_buf = mBufDefs;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : aggregateBufToBatch
+ *
+ * DESCRIPTION: queue batch container to downstream.
+ *
+ * PARAMETERS :
+ *   @bufDef : image buffer to be aggregated into batch
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success always
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::aggregateBufToBatch(mm_camera_buf_def_t& bufDef)
+{
+    int32_t rc = NO_ERROR;
+
+    if (UNLIKELY(!mBatchSize)) {
+        LOGE("Batch mod is not enabled");
+        return INVALID_OPERATION;
+    }
+    if (!mCurrentBatchBufDef) {
+        mCurrentBatchBufDef = (mm_camera_buf_def_t *)mFreeBatchBufQ.dequeue();
+        if (!mCurrentBatchBufDef) {
+            LOGE("No empty batch buffers is available");
+            return NO_MEMORY;
+        }
+        LOGD("batch buffer: %d dequeued from empty buffer list",
+                mCurrentBatchBufDef->buf_idx);
+    }
+    if (mBufsStaged == mCurrentBatchBufDef->user_buf.num_buffers) {
+        LOGE("batch buffer is already full");
+        return NO_MEMORY;
+    }
+
+    mCurrentBatchBufDef->user_buf.buf_idx[mBufsStaged] = bufDef.buf_idx;
+    mBufsStaged++;
+    LOGD("buffer id: %d aggregated into batch buffer id: %d",
+             bufDef.buf_idx, mCurrentBatchBufDef->buf_idx);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : queueBatchBuf
+ *
+ * DESCRIPTION: queue batch container to downstream.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success always
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::queueBatchBuf()
+{
+    int32_t rc = NO_ERROR;
+
+    if (!mCurrentBatchBufDef) {
+        LOGE("No buffers were queued into batch");
+        return INVALID_OPERATION;
+    }
+    //bufs_used: number of valid buffers in the batch buffers
+    mCurrentBatchBufDef->user_buf.bufs_used = mBufsStaged;
+
+    //if mBufsStaged < num_buffers, initialize the buf_idx to -1 for rest of the
+    //buffers
+    for (size_t i = mBufsStaged; i < mCurrentBatchBufDef->user_buf.num_buffers;
+            i++) {
+        mCurrentBatchBufDef->user_buf.buf_idx[i] = -1;
+    }
+
+    rc = mCamOps->qbuf(mCamHandle, mChannelHandle, mCurrentBatchBufDef);
+    if (rc < 0) {
+        LOGE("queueing of batch buffer: %d failed with err: %d",
+                mCurrentBatchBufDef->buf_idx, rc);
+        return FAILED_TRANSACTION;
+    }
+    LOGD("Batch buf id: %d queued. bufs_used: %d",
+            mCurrentBatchBufDef->buf_idx,
+            mCurrentBatchBufDef->user_buf.bufs_used);
+
+    mCurrentBatchBufDef = NULL;
+    mBufsStaged = 0;
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : handleBatchBuffer
+ *
+ * DESCRIPTION: separate individual buffers from the batch and issue callback
+ *
+ * PARAMETERS :
+ *   @superBuf : Received superbuf containing batch buffer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success always
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::handleBatchBuffer(mm_camera_super_buf_t *superBuf)
+{
+    int32_t rc = NO_ERROR;
+    mm_camera_super_buf_t *frame;
+    mm_camera_buf_def_t batchBuf;
+
+    if (LIKELY(!mBatchSize)) {
+        LOGE("Stream: %d not in batch mode, but batch buffer received",
+                 getMyType());
+        return INVALID_OPERATION;
+    }
+    if (!mDataCB) {
+        LOGE("Data callback not set for batch mode");
+        return BAD_VALUE;
+    }
+    if (!superBuf->bufs[0]) {
+        LOGE("superBuf->bufs[0] is NULL!!");
+        return BAD_VALUE;
+    }
+
+    /* Copy the batch buffer to local and queue the batch buffer to  empty queue
+     * to handle the new requests received while callbacks are in progress */
+    batchBuf = *superBuf->bufs[0];
+    if (!mFreeBatchBufQ.enqueue((void*) superBuf->bufs[0])) {
+        LOGE("batchBuf.buf_idx: %d enqueue failed",
+                batchBuf.buf_idx);
+        free(superBuf);
+        return NO_MEMORY;
+    }
+    LOGD("Received batch buffer: %d bufs_used: %d",
+            batchBuf.buf_idx, batchBuf.user_buf.bufs_used);
+    //dummy local bufDef to issue multiple callbacks
+    mm_camera_buf_def_t buf;
+    memset(&buf, 0, sizeof(mm_camera_buf_def_t));
+
+    for (size_t i = 0; i < batchBuf.user_buf.bufs_used; i++) {
+        int32_t buf_idx = batchBuf.user_buf.buf_idx[i];
+        buf = mBufDefs[buf_idx];
+
+        /* this memory is freed inside dataCB. Should not be freed here */
+        frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+        if (!frame) {
+            LOGE("malloc failed. Buffers will be dropped");
+            break;
+        } else {
+            memcpy(frame, superBuf, sizeof(mm_camera_super_buf_t));
+            frame->bufs[0] = &buf;
+
+            mDataCB(frame, this, mUserData);
+        }
+    }
+    LOGD("batch buffer: %d callbacks done",
+            batchBuf.buf_idx);
+
+    free(superBuf);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : flushFreeBatchBufQ
+ *
+ * DESCRIPTION: dequeue all the entries of mFreeBatchBufQ and call flush.
+ *              QCameraQueue::flush calls 'free(node->data)' which should be
+ *              avoided for mFreeBatchBufQ as the entries are not allocated
+ *              during each enqueue
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera3Stream::flushFreeBatchBufQ()
+{
+    while (!mFreeBatchBufQ.isEmpty()) {
+        mFreeBatchBufQ.dequeue();
+    }
+    mFreeBatchBufQ.flush();
+}
+
+}; // namespace qcamera
diff --git a/msmcobalt/QCamera2/HAL3/QCamera3Stream.h b/msmcobalt/QCamera2/HAL3/QCamera3Stream.h
new file mode 100644
index 0000000..1e2be49
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL3/QCamera3Stream.h
@@ -0,0 +1,170 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA3_STREAM_H__
+#define __QCAMERA3_STREAM_H__
+
+// System dependencies
+#include <utils/Mutex.h>
+
+// Camera dependencies
+#include "QCamera3Mem.h"
+#include "QCamera3StreamMem.h"
+#include "QCameraCmdThread.h"
+#include "QCameraQueue.h"
+
+extern "C" {
+#include "mm_camera_interface.h"
+}
+
+namespace qcamera {
+
+class QCamera3Channel;
+class QCamera3Stream;
+
+typedef void (*hal3_stream_cb_routine)(mm_camera_super_buf_t *frame,
+                                  QCamera3Stream *stream,
+                                  void *userdata);
+
+class QCamera3Stream
+{
+public:
+    QCamera3Stream(uint32_t camHandle,
+                  uint32_t chId,
+                  mm_camera_ops_t *camOps,
+                  cam_padding_info_t *paddingInfo,
+                  QCamera3Channel *channel);
+    virtual ~QCamera3Stream();
+    virtual int32_t init(cam_stream_type_t streamType,
+                         cam_format_t streamFormat,
+                         cam_dimension_t streamDim,
+                         cam_rotation_t streamRotation,
+                         cam_stream_reproc_config_t* reprocess_config,
+                         uint8_t minStreamBufNum,
+                         cam_feature_mask_t postprocess_mask,
+                         cam_is_type_t is_type,
+                         uint32_t batchSize,
+                         hal3_stream_cb_routine stream_cb,
+                         void *userdata);
+    virtual int32_t bufDone(uint32_t index);
+    virtual int32_t bufRelease(int32_t index);
+    virtual int32_t processDataNotify(mm_camera_super_buf_t *bufs);
+    virtual int32_t start();
+    virtual int32_t stop();
+    virtual int32_t queueBatchBuf();
+
+    static void dataNotifyCB(mm_camera_super_buf_t *recvd_frame, void *userdata);
+    static void *dataProcRoutine(void *data);
+    uint32_t getMyHandle() const {return mHandle;}
+    cam_stream_type_t getMyType() const;
+    int32_t getFrameOffset(cam_frame_len_offset_t &offset);
+    int32_t getFrameDimension(cam_dimension_t &dim);
+    int32_t getFormat(cam_format_t &fmt);
+    QCamera3StreamMem *getStreamBufs() {return mStreamBufs;};
+    uint32_t getMyServerID();
+
+    int32_t mapBuf(uint8_t buf_type, uint32_t buf_idx,
+            int32_t plane_idx, int fd, void *buffer, size_t size);
+    int32_t unmapBuf(uint8_t buf_type, uint32_t buf_idx, int32_t plane_idx);
+    int32_t setParameter(cam_stream_parm_buffer_t &param);
+    cam_stream_info_t* getStreamInfo() const {return mStreamInfo; };
+
+    static void releaseFrameData(void *data, void *user_data);
+
+private:
+    uint32_t mCamHandle;
+    uint32_t mChannelHandle;
+    uint32_t mHandle; // stream handle from mm-camera-interface
+    mm_camera_ops_t *mCamOps;
+    cam_stream_info_t *mStreamInfo; // ptr to stream info buf
+    mm_camera_stream_mem_vtbl_t mMemVtbl;
+    mm_camera_map_unmap_ops_tbl_t *mMemOps;
+    uint8_t mNumBufs;
+    hal3_stream_cb_routine mDataCB;
+    void *mUserData;
+
+    QCameraQueue     mDataQ;
+    QCameraCmdThread mProcTh; // thread for dataCB
+
+    QCamera3HeapMemory *mStreamInfoBuf;
+    QCamera3StreamMem *mStreamBufs;
+    mm_camera_buf_def_t *mBufDefs;
+    cam_frame_len_offset_t mFrameLenOffset;
+    cam_padding_info_t mPaddingInfo;
+    QCamera3Channel *mChannel;
+    Mutex mLock;    //Lock controlling access to 'mBufDefs'
+
+    uint32_t mBatchSize; // 0: No batch, non-0: Number of imaage bufs in a batch
+    uint8_t mNumBatchBufs; //Number of batch buffers which can hold image bufs
+    QCamera3HeapMemory *mStreamBatchBufs; //Pointer to batch buffers memory
+    mm_camera_buf_def_t *mBatchBufDefs; //Pointer to array of batch bufDefs
+    mm_camera_buf_def_t *mCurrentBatchBufDef; //batch buffer in progress during
+                                              //aggregation
+    uint32_t    mBufsStaged; //Number of image buffers aggregated into
+                             //currentBatchBufDef
+    QCameraQueue mFreeBatchBufQ; //Buffer queue containing empty batch buffers
+
+    static int32_t get_bufs(
+                     cam_frame_len_offset_t *offset,
+                     uint8_t *num_bufs,
+                     uint8_t **initial_reg_flag,
+                     mm_camera_buf_def_t **bufs,
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                     void *user_data);
+    static int32_t put_bufs(
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                     void *user_data);
+    static int32_t invalidate_buf(uint32_t index, void *user_data);
+    static int32_t clean_invalidate_buf(uint32_t index, void *user_data);
+
+    int32_t getBufs(cam_frame_len_offset_t *offset,
+                     uint8_t *num_bufs,
+                     uint8_t **initial_reg_flag,
+                     mm_camera_buf_def_t **bufs,
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl);
+    int32_t putBufs(mm_camera_map_unmap_ops_tbl_t *ops_tbl);
+    int32_t invalidateBuf(uint32_t index);
+    int32_t cleanInvalidateBuf(uint32_t index);
+    int32_t getBatchBufs(
+            uint8_t *num_bufs, uint8_t **initial_reg_flag,
+            mm_camera_buf_def_t **bufs,
+            mm_camera_map_unmap_ops_tbl_t *ops_tbl);
+    int32_t putBatchBufs(mm_camera_map_unmap_ops_tbl_t *ops_tbl);
+    int32_t getBatchBufDef(mm_camera_buf_def_t& batchBufDef,
+            int32_t index);
+    int32_t aggregateBufToBatch(mm_camera_buf_def_t& bufDef);
+    int32_t handleBatchBuffer(mm_camera_super_buf_t *superBuf);
+
+    static const char* mStreamNames[CAM_STREAM_TYPE_MAX];
+    void flushFreeBatchBufQ();
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA3_STREAM_H__ */
diff --git a/msmcobalt/QCamera2/HAL3/QCamera3StreamMem.cpp b/msmcobalt/QCamera2/HAL3/QCamera3StreamMem.cpp
new file mode 100644
index 0000000..3843afb
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL3/QCamera3StreamMem.cpp
@@ -0,0 +1,477 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#define LOG_TAG "QCamera3StreamMem"
+
+// System dependencies
+#include "gralloc_priv.h"
+
+// Camera dependencies
+#include "QCamera3StreamMem.h"
+
+using namespace android;
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : QCamera3StreamMem
+ *
+ * DESCRIPTION: default constructor of QCamera3StreamMem
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera3StreamMem::QCamera3StreamMem(uint32_t maxHeapBuffer, bool queueHeapBuffers) :
+        mHeapMem(maxHeapBuffer),
+        mGrallocMem(maxHeapBuffer),
+        mMaxHeapBuffers(maxHeapBuffer),
+        mQueueHeapBuffers(queueHeapBuffers)
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : QCamera3StreamMem
+ *
+ * DESCRIPTION: destructor of QCamera3StreamMem
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera3StreamMem::~QCamera3StreamMem()
+{
+    clear();
+}
+
+/*===========================================================================
+ * FUNCTION   : getCnt
+ *
+ * DESCRIPTION: query number of buffers allocated/registered
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of buffers allocated
+ *==========================================================================*/
+uint32_t QCamera3StreamMem::getCnt()
+{
+    Mutex::Autolock lock(mLock);
+
+    return (mHeapMem.getCnt() + mGrallocMem.getCnt());
+}
+
+/*===========================================================================
+ * FUNCTION   : getRegFlags
+ *
+ * DESCRIPTION: query initial reg flags
+ *
+ * PARAMETERS :
+ *   @regFlags: initial reg flags of the allocated/registered buffers
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3StreamMem::getRegFlags(uint8_t * regFlags)
+{
+    // Assume that all buffers allocated can be queued.
+    for (uint32_t i = 0; i < mHeapMem.getCnt(); i ++)
+        regFlags[i] = (mQueueHeapBuffers ? 1 : 0);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFd
+ *
+ * DESCRIPTION: return file descriptor of the indexed buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : file descriptor
+ *==========================================================================*/
+int QCamera3StreamMem::getFd(uint32_t index)
+{
+    Mutex::Autolock lock(mLock);
+
+    if (index < mMaxHeapBuffers)
+        return mHeapMem.getFd(index);
+    else
+        return mGrallocMem.getFd(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : getSize
+ *
+ * DESCRIPTION: return buffer size of the indexed buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : buffer size
+ *==========================================================================*/
+ssize_t QCamera3StreamMem::getSize(uint32_t index)
+{
+    Mutex::Autolock lock(mLock);
+
+    if (index < mMaxHeapBuffers)
+        return mHeapMem.getSize(index);
+    else
+        return mGrallocMem.getSize(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : invalidateCache
+ *
+ * DESCRIPTION: invalidate the cache of the indexed buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3StreamMem::invalidateCache(uint32_t index)
+{
+    Mutex::Autolock lock(mLock);
+
+    if (index < mMaxHeapBuffers)
+        return mHeapMem.invalidateCache(index);
+    else
+        return mGrallocMem.invalidateCache(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : cleanInvalidateCache
+ *
+ * DESCRIPTION: clean and invalidate the cache of the indexed buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3StreamMem::cleanInvalidateCache(uint32_t index)
+{
+    Mutex::Autolock lock(mLock);
+
+    if (index < mMaxHeapBuffers)
+        return mHeapMem.cleanInvalidateCache(index);
+    else
+        return mGrallocMem.cleanInvalidateCache(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : getBufDef
+ *
+ * DESCRIPTION: query detailed buffer information
+ *
+ * PARAMETERS :
+ *   @offset  : [input] frame buffer offset
+ *   @bufDef  : [output] reference to struct to store buffer definition
+ *   @index   : [input] index of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3StreamMem::getBufDef(const cam_frame_len_offset_t &offset,
+        mm_camera_buf_def_t &bufDef, uint32_t index)
+{
+    int32_t ret = NO_ERROR;
+
+    if (index < mMaxHeapBuffers)
+        ret = mHeapMem.getBufDef(offset, bufDef, index);
+    else
+        ret = mGrallocMem.getBufDef(offset, bufDef, index);
+
+    bufDef.mem_info = (void *)this;
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : getPtr
+ *
+ * DESCRIPTION: return virtual address of the indexed buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : virtual address
+ *==========================================================================*/
+void* QCamera3StreamMem::getPtr(uint32_t index)
+{
+    Mutex::Autolock lock(mLock);
+
+    if (index < mMaxHeapBuffers)
+        return mHeapMem.getPtr(index);
+    else
+        return mGrallocMem.getPtr(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : valid
+ *
+ * DESCRIPTION: return whether there is a valid buffer at the current index
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : true if there is a buffer, false otherwise
+ *==========================================================================*/
+bool QCamera3StreamMem::valid(uint32_t index)
+{
+    Mutex::Autolock lock(mLock);
+
+    if (index < mMaxHeapBuffers)
+        return (mHeapMem.getSize(index) > 0);
+    else
+        return (mGrallocMem.getSize(index) > 0);
+}
+
+/*===========================================================================
+ * FUNCTION   : registerBuffer
+ *
+ * DESCRIPTION: registers frameworks-allocated gralloc buffer_handle_t
+ *
+ * PARAMETERS :
+ *   @buffers : buffer_handle_t pointer
+ *   @type :    cam_stream_type_t
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3StreamMem::registerBuffer(buffer_handle_t *buffer,
+        cam_stream_type_t type)
+{
+    Mutex::Autolock lock(mLock);
+    return mGrallocMem.registerBuffer(buffer, type);
+}
+
+
+/*===========================================================================
+ * FUNCTION   : unregisterBuffer
+ *
+ * DESCRIPTION: unregister buffer
+ *
+ * PARAMETERS :
+ *   @idx     : unregister buffer at index 'idx'
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3StreamMem::unregisterBuffer(size_t idx)
+{
+    Mutex::Autolock lock(mLock);
+    return mGrallocMem.unregisterBuffer(idx);
+}
+
+/*===========================================================================
+ * FUNCTION   : getMatchBufIndex
+ *
+ * DESCRIPTION: query buffer index by object ptr
+ *
+ * PARAMETERS :
+ *   @opaque  : opaque ptr
+ *
+ * RETURN     : buffer index if match found,
+ *              -1 if failed
+ *==========================================================================*/
+int QCamera3StreamMem::getMatchBufIndex(void *object)
+{
+    Mutex::Autolock lock(mLock);
+    return mGrallocMem.getMatchBufIndex(object);
+}
+
+/*===========================================================================
+ * FUNCTION   : getBufferHandle
+ *
+ * DESCRIPTION: return framework pointer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : buffer ptr if match found
+                NULL if failed
+ *==========================================================================*/
+void *QCamera3StreamMem::getBufferHandle(uint32_t index)
+{
+    Mutex::Autolock lock(mLock);
+    return mGrallocMem.getBufferHandle(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : unregisterBuffers
+ *
+ * DESCRIPTION: unregister buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3StreamMem::unregisterBuffers()
+{
+    Mutex::Autolock lock(mLock);
+    mGrallocMem.unregisterBuffers();
+}
+
+
+/*===========================================================================
+ * FUNCTION   : allocate
+ *
+ * DESCRIPTION: allocate requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3StreamMem::allocateAll(size_t size)
+{
+    Mutex::Autolock lock(mLock);
+    return mHeapMem.allocate(size);
+}
+
+int QCamera3StreamMem::allocateOne(size_t size)
+{
+    Mutex::Autolock lock(mLock);
+    return mHeapMem.allocateOne(size);
+}
+
+/*===========================================================================
+ * FUNCTION   : deallocate
+ *
+ * DESCRIPTION: deallocate heap buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3StreamMem::deallocate()
+{
+    Mutex::Autolock lock(mLock);
+    mHeapMem.deallocate();
+}
+
+/*===========================================================================
+ * FUNCTION   : markFrameNumber
+ *
+ * DESCRIPTION: We use this function from the request call path to mark the
+ *              buffers with the frame number they are intended for this info
+ *              is used later when giving out callback & it is duty of PP to
+ *              ensure that data for that particular frameNumber/Request is
+ *              written to this buffer.
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *   @frame#  : Frame number from the framework
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3StreamMem::markFrameNumber(uint32_t index, uint32_t frameNumber)
+{
+    Mutex::Autolock lock(mLock);
+    if (index < mMaxHeapBuffers)
+        return mHeapMem.markFrameNumber(index, frameNumber);
+    else
+        return mGrallocMem.markFrameNumber(index, frameNumber);
+}
+
+/*===========================================================================
+ * FUNCTION   : getFrameNumber
+ *
+ * DESCRIPTION: We use this to fetch the frameNumber for the request with which
+ *              this buffer was given to HAL
+ *
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : int32_t frameNumber
+ *              positive/zero  -- success
+ *              negative failure
+ *==========================================================================*/
+int32_t QCamera3StreamMem::getFrameNumber(uint32_t index)
+{
+    Mutex::Autolock lock(mLock);
+    if (index < mMaxHeapBuffers)
+        return mHeapMem.getFrameNumber(index);
+    else
+        return mGrallocMem.getFrameNumber(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : getGrallocBufferIndex
+ *
+ * DESCRIPTION: We use this to fetch the gralloc buffer index based on frameNumber
+ *
+ * PARAMETERS :
+ *   @frameNumber : frame Number
+ *
+ * RETURN     : int32_t buffer index
+ *              positive/zero  -- success
+ *              negative failure
+ *==========================================================================*/
+int32_t QCamera3StreamMem::getGrallocBufferIndex(uint32_t frameNumber)
+{
+    Mutex::Autolock lock(mLock);
+    int32_t index = mGrallocMem.getBufferIndex(frameNumber);
+    return index;
+}
+
+/*===========================================================================
+ * FUNCTION   : getHeapBufferIndex
+ *
+ * DESCRIPTION: We use this to fetch the heap buffer index based on frameNumber
+ *
+ * PARAMETERS :
+ *   @frameNumber : frame Number
+ *
+ * RETURN     : int32_t buffer index
+ *              positive/zero  -- success
+ *              negative failure
+ *==========================================================================*/
+int32_t QCamera3StreamMem::getHeapBufferIndex(uint32_t frameNumber)
+{
+    Mutex::Autolock lock(mLock);
+    int32_t index = mHeapMem.getBufferIndex(frameNumber);
+    return index;
+}
+
+}; //namespace qcamera
diff --git a/msmcobalt/QCamera2/HAL3/QCamera3StreamMem.h b/msmcobalt/QCamera2/HAL3/QCamera3StreamMem.h
new file mode 100644
index 0000000..74bab06
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL3/QCamera3StreamMem.h
@@ -0,0 +1,97 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA3_STREAMMEM_H__
+#define __QCAMERA3_STREAMMEM_H__
+
+// System dependencies
+#include <utils/Mutex.h>
+
+// Camera dependencies
+#include "QCamera3Mem.h"
+
+extern "C" {
+#include "mm_camera_interface.h"
+}
+
+using namespace android;
+
+namespace qcamera {
+
+class QCamera3StreamMem {
+public:
+    QCamera3StreamMem(uint32_t maxHeapBuffer, bool queueAll = true);
+    virtual ~QCamera3StreamMem();
+
+    uint32_t getCnt();
+    int getRegFlags(uint8_t *regFlags);
+
+    // Helper function to access individual QCamera3Buffer object
+    int getFd(uint32_t index);
+    ssize_t getSize(uint32_t index);
+    int invalidateCache(uint32_t index);
+    int cleanInvalidateCache(uint32_t index);
+    int32_t getBufDef(const cam_frame_len_offset_t &offset,
+            mm_camera_buf_def_t &bufDef, uint32_t index);
+    void *getPtr(uint32_t index);
+
+    bool valid(uint32_t index);
+
+    // Gralloc buffer related functions
+    int registerBuffer(buffer_handle_t *buffer, cam_stream_type_t type);
+    int unregisterBuffer(uint32_t index);
+    int getMatchBufIndex(void *object);
+    void *getBufferHandle(uint32_t index);
+    void unregisterBuffers(); //TODO: relace with unififed clear() function?
+
+    // Heap buffer related functions
+    int allocateAll(size_t size);
+    int allocateOne(size_t size);
+    void deallocate(); //TODO: replace with unified clear() function?
+
+    // Clear function: unregister for gralloc buffer, and deallocate for heap buffer
+    void clear() {unregisterBuffers(); deallocate(); }
+
+    // Frame number getter and setter
+    int32_t markFrameNumber(uint32_t index, uint32_t frameNumber);
+    int32_t getFrameNumber(uint32_t index);
+    int32_t getGrallocBufferIndex(uint32_t frameNumber);
+    int32_t getHeapBufferIndex(uint32_t frameNumber);
+
+private:
+    //variables
+    QCamera3HeapMemory mHeapMem;
+    QCamera3GrallocMemory mGrallocMem;
+    uint32_t mMaxHeapBuffers;
+    Mutex mLock;
+    bool mQueueHeapBuffers;
+};
+
+};
+#endif // __QCAMERA3_STREAMMEM_H__
diff --git a/msmcobalt/QCamera2/HAL3/QCamera3VendorTags.cpp b/msmcobalt/QCamera2/HAL3/QCamera3VendorTags.cpp
new file mode 100644
index 0000000..1621d24
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL3/QCamera3VendorTags.cpp
@@ -0,0 +1,429 @@
+/* Copyright (c) 2014-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCamera3VendorTags"
+
+// Camera dependencies
+#include "QCamera3HWI.h"
+#include "QCamera3VendorTags.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+using namespace android;
+
+namespace qcamera {
+
+enum qcamera3_ext_tags qcamera3_ext3_section_bounds[QCAMERA3_SECTIONS_END -
+    VENDOR_SECTION] = {
+        QCAMERA3_PRIVATEDATA_END,
+        QCAMERA3_CDS_END,
+        QCAMERA3_OPAQUE_RAW_END,
+        QCAMERA3_CROP_END,
+        QCAMERA3_TUNING_META_DATA_END,
+        QCAMERA3_TEMPORAL_DENOISE_END,
+        QCAMERA3_AV_TIMER_END,
+        QCAMERA3_SENSOR_META_DATA_END,
+        NEXUS_EXPERIMENTAL_2015_END,
+        QCAMERA3_DUALCAM_LINK_META_DATA_END,
+        QCAMERA3_DUALCAM_CALIB_META_DATA_END,
+        QCAMERA3_HAL_PRIVATEDATA_END,
+        QCAMERA3_JPEG_ENCODE_CROP_END
+} ;
+
+typedef struct vendor_tag_info {
+    const char *tag_name;
+    uint8_t     tag_type;
+} vendor_tag_info_t;
+
+const char *qcamera3_ext_section_names[QCAMERA3_SECTIONS_END -
+        VENDOR_SECTION] = {
+    "org.codeaurora.qcamera3.privatedata",
+    "org.codeaurora.qcamera3.CDS",
+    "org.codeaurora.qcamera3.opaque_raw",
+    "org.codeaurora.qcamera3.crop",
+    "org.codeaurora.qcamera3.tuning_meta_data",
+    "org.codeaurora.qcamera3.temporal_denoise",
+    "org.codeaurora.qcamera3.av_timer",
+    "org.codeaurora.qcamera3.sensor_meta_data",
+    "com.google.nexus.experimental2015",
+    "org.codeaurora.qcamera3.dualcam_link_meta_data",
+    "org.codeaurora.qcamera3.dualcam_calib_meta_data",
+    "org.codeaurora.qcamera3.hal_private_data",
+    "org.codeaurora.qcamera3.jpeg_encode_crop"
+};
+
+vendor_tag_info_t qcamera3_privatedata[QCAMERA3_PRIVATEDATA_END - QCAMERA3_PRIVATEDATA_START] = {
+    { "privatedata_reprocess", TYPE_INT32 }
+};
+
+vendor_tag_info_t qcamera3_cds[QCAMERA3_CDS_END - QCAMERA3_CDS_START] = {
+    { "cds_mode", TYPE_INT32 },
+    { "cds_info", TYPE_BYTE }
+};
+
+vendor_tag_info_t qcamera3_opaque_raw[QCAMERA3_OPAQUE_RAW_END -
+        QCAMERA3_OPAQUE_RAW_START] = {
+    { "opaque_raw_strides", TYPE_INT32 },
+    { "opaque_raw_format", TYPE_BYTE }
+};
+
+vendor_tag_info_t qcamera3_crop[QCAMERA3_CROP_END- QCAMERA3_CROP_START] = {
+    { "count", TYPE_INT32 },
+    { "data", TYPE_INT32},
+    { "roimap", TYPE_INT32 }
+};
+
+vendor_tag_info_t qcamera3_tuning_meta_data[QCAMERA3_TUNING_META_DATA_END -
+        QCAMERA3_TUNING_META_DATA_START] = {
+    { "tuning_meta_data_blob", TYPE_INT32 }
+};
+
+vendor_tag_info_t qcamera3_temporal_denoise[QCAMERA3_TEMPORAL_DENOISE_END -
+        QCAMERA3_TEMPORAL_DENOISE_START] = {
+    { "enable", TYPE_BYTE },
+    { "process_type", TYPE_INT32 }
+};
+
+vendor_tag_info qcamera3_av_timer[QCAMERA3_AV_TIMER_END -
+                                  QCAMERA3_AV_TIMER_START] = {
+   {"use_av_timer", TYPE_BYTE }
+};
+
+vendor_tag_info qcamera3_sensor_meta_data[QCAMERA3_SENSOR_META_DATA_END -
+                                  QCAMERA3_SENSOR_META_DATA_START] = {
+   {"dynamic_black_level_pattern", TYPE_FLOAT },
+   {"is_mono_only",                TYPE_BYTE }
+};
+
+vendor_tag_info_t nexus_experimental_2015[NEXUS_EXPERIMENTAL_2015_END -
+        NEXUS_EXPERIMENTAL_2015_START] = {
+    {"sensor.dynamicBlackLevel", TYPE_FLOAT },
+    {"sensor.info.opticallyShieldedRegions", TYPE_INT32 }
+};
+
+vendor_tag_info_t
+        qcamera3_dualcam_link_meta_data[QCAMERA3_DUALCAM_LINK_META_DATA_END -
+        QCAMERA3_DUALCAM_LINK_META_DATA_START] = {
+    { "enable",            TYPE_BYTE },
+    { "is_main",           TYPE_BYTE },
+    { "related_camera_id", TYPE_INT32 }
+};
+
+vendor_tag_info_t
+        qcamera3_dualcam_calib_meta_data[QCAMERA3_DUALCAM_CALIB_META_DATA_END -
+        QCAMERA3_DUALCAM_CALIB_META_DATA_START] = {
+    { "dualcam_calib_meta_data_blob", TYPE_BYTE }
+};
+
+vendor_tag_info_t
+        qcamera3_hal_privatedata[QCAMERA3_HAL_PRIVATEDATA_END -
+        QCAMERA3_HAL_PRIVATEDATA_START] = {
+    { "reprocess_flags",      TYPE_BYTE },
+    { "ddm_data_blob",        TYPE_BYTE }
+};
+
+vendor_tag_info_t
+        qcamera3_jpep_encode_crop[QCAMERA3_JPEG_ENCODE_CROP_END -
+        QCAMERA3_JPEG_ENCODE_CROP_START] = {
+    { "enable", TYPE_BYTE },
+    { "rect",   TYPE_INT32 },
+    { "roi",    TYPE_INT32}
+};
+
+vendor_tag_info_t *qcamera3_tag_info[QCAMERA3_SECTIONS_END -
+        VENDOR_SECTION] = {
+    qcamera3_privatedata,
+    qcamera3_cds,
+    qcamera3_opaque_raw,
+    qcamera3_crop,
+    qcamera3_tuning_meta_data,
+    qcamera3_temporal_denoise,
+    qcamera3_av_timer,
+    qcamera3_sensor_meta_data,
+    nexus_experimental_2015,
+    qcamera3_dualcam_link_meta_data,
+    qcamera3_dualcam_calib_meta_data,
+    qcamera3_hal_privatedata,
+    qcamera3_jpep_encode_crop
+};
+
+uint32_t qcamera3_all_tags[] = {
+    // QCAMERA3_PRIVATEDATA
+    (uint32_t)QCAMERA3_PRIVATEDATA_REPROCESS,
+
+    // QCAMERA3_CDS
+    (uint32_t)QCAMERA3_CDS_MODE,
+    (uint32_t)QCAMERA3_CDS_INFO,
+
+    // QCAMERA3_OPAQUE_RAW
+    (uint32_t)QCAMERA3_OPAQUE_RAW_STRIDES,
+    (uint32_t)QCAMERA3_OPAQUE_RAW_FORMAT,
+
+    // QCAMERA3_CROP
+    (uint32_t)QCAMERA3_CROP_COUNT_REPROCESS,
+    (uint32_t)QCAMERA3_CROP_REPROCESS,
+    (uint32_t)QCAMERA3_CROP_ROI_MAP_REPROCESS,
+
+    // QCAMERA3_TUNING_META_DATA
+    (uint32_t)QCAMERA3_TUNING_META_DATA_BLOB,
+
+    // QCAMERA3_TEMPORAL_DENOISE
+    (uint32_t)QCAMERA3_TEMPORAL_DENOISE_ENABLE,
+    (uint32_t)QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE,
+    //QCAMERA3_AVTIMER
+    (uint32_t)QCAMERA3_USE_AV_TIMER,
+
+    //QCAMERA3_SENSOR_META_DATA
+    (uint32_t)QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN,
+    (uint32_t)QCAMERA3_SENSOR_IS_MONO_ONLY,
+
+    //NEXUS_EXPERIMENTAL_2015
+    (uint32_t)NEXUS_EXPERIMENTAL_2015_SENSOR_DYNAMIC_BLACK_LEVEL,
+    (uint32_t)NEXUS_EXPERIMENTAL_2015_SENSOR_INFO_OPTICALLY_SHIELDED_REGIONS,
+
+    // QCAMERA3_DUALCAM_LINK_META_DATA
+    (uint32_t)QCAMERA3_DUALCAM_LINK_ENABLE,
+    (uint32_t)QCAMERA3_DUALCAM_LINK_IS_MAIN,
+    (uint32_t)QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID,
+
+    // QCAMERA3_DUALCAM_CALIB_META_DATA
+    (uint32_t)QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
+
+    // QCAMERA3_HAL_PRIVATEDATA
+    (uint32_t)QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
+    (uint32_t)QCAMERA3_HAL_PRIVATEDATA_DDM_DATA_BLOB,
+
+    // QCAMERA3_JPEG_ENCODE_CROP
+    (uint32_t)QCAMERA3_JPEG_ENCODE_CROP_ENABLE,
+    (uint32_t)QCAMERA3_JPEG_ENCODE_CROP_RECT,
+    (uint32_t)QCAMERA3_JPEG_ENCODE_CROP_ROI
+
+};
+
+const vendor_tag_ops_t* QCamera3VendorTags::Ops = NULL;
+
+/*===========================================================================
+ * FUNCTION   : get_vendor_tag_ops
+ *
+ * DESCRIPTION: Get the metadata vendor tag function pointers
+ *
+ * PARAMETERS :
+ *    @ops   : function pointer table to be filled by HAL
+ *
+ *
+ * RETURN     : NONE
+ *==========================================================================*/
+void QCamera3VendorTags::get_vendor_tag_ops(
+                                vendor_tag_ops_t* ops)
+{
+    LOGL("E");
+
+    Ops = ops;
+
+    ops->get_tag_count = get_tag_count;
+    ops->get_all_tags = get_all_tags;
+    ops->get_section_name = get_section_name;
+    ops->get_tag_name = get_tag_name;
+    ops->get_tag_type = get_tag_type;
+    ops->reserved[0] = NULL;
+
+    LOGL("X");
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : get_tag_count
+ *
+ * DESCRIPTION: Get number of vendor tags supported
+ *
+ * PARAMETERS :
+ *    @ops   :  Vendor tag ops data structure
+ *
+ *
+ * RETURN     : Number of vendor tags supported
+ *==========================================================================*/
+
+int QCamera3VendorTags::get_tag_count(
+                const vendor_tag_ops_t * ops)
+{
+    size_t count = 0;
+    if (ops == Ops)
+        count = sizeof(qcamera3_all_tags)/sizeof(qcamera3_all_tags[0]);
+
+    LOGL("count is %d", count);
+    return (int)count;
+}
+
+/*===========================================================================
+ * FUNCTION   : get_all_tags
+ *
+ * DESCRIPTION: Fill array with all supported vendor tags
+ *
+ * PARAMETERS :
+ *    @ops      :  Vendor tag ops data structure
+ *    @tag_array:  array of metadata tags
+ *
+ * RETURN     : Success: the section name of the specific tag
+ *              Failure: NULL
+ *==========================================================================*/
+void QCamera3VendorTags::get_all_tags(
+                const vendor_tag_ops_t * ops,
+                uint32_t *g_array)
+{
+    if (ops != Ops)
+        return;
+
+    for (size_t i = 0;
+            i < sizeof(qcamera3_all_tags)/sizeof(qcamera3_all_tags[0]);
+            i++) {
+        g_array[i] = qcamera3_all_tags[i];
+        LOGD("g_array[%d] is %d", i, g_array[i]);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : get_section_name
+ *
+ * DESCRIPTION: Get section name for vendor tag
+ *
+ * PARAMETERS :
+ *    @ops   :  Vendor tag ops structure
+ *    @tag   :  Vendor specific tag
+ *
+ *
+ * RETURN     : Success: the section name of the specific tag
+ *              Failure: NULL
+ *==========================================================================*/
+
+const char* QCamera3VendorTags::get_section_name(
+                const vendor_tag_ops_t * ops,
+                uint32_t tag)
+{
+    LOGL("E");
+    if (ops != Ops)
+        return NULL;
+
+    const char *ret;
+    uint32_t section = tag >> 16;
+
+    if (section < VENDOR_SECTION || section >= QCAMERA3_SECTIONS_END)
+        ret = NULL;
+    else
+        ret = qcamera3_ext_section_names[section - VENDOR_SECTION];
+
+    if (ret)
+        LOGL("section_name[%d] is %s", tag, ret);
+    LOGL("X");
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : get_tag_name
+ *
+ * DESCRIPTION: Get name of a vendor specific tag
+ *
+ * PARAMETERS :
+ *    @tag   :  Vendor specific tag
+ *
+ *
+ * RETURN     : Success: the name of the specific tag
+ *              Failure: NULL
+ *==========================================================================*/
+const char* QCamera3VendorTags::get_tag_name(
+                const vendor_tag_ops_t * ops,
+                uint32_t tag)
+{
+    LOGL("E");
+    const char *ret;
+    uint32_t section = tag >> 16;
+    uint32_t section_index = section - VENDOR_SECTION;
+    uint32_t tag_index = tag & 0xFFFF;
+
+    if (ops != Ops) {
+        ret = NULL;
+        goto done;
+    }
+
+    if (section < VENDOR_SECTION || section >= QCAMERA3_SECTIONS_END)
+        ret = NULL;
+    else if (tag >= (uint32_t)qcamera3_ext3_section_bounds[section_index])
+        ret = NULL;
+    else
+        ret = qcamera3_tag_info[section_index][tag_index].tag_name;
+
+    if (ret)
+        LOGL("tag name for tag %d is %s", tag, ret);
+    LOGL("X");
+
+done:
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : get_tag_type
+ *
+ * DESCRIPTION: Get type of a vendor specific tag
+ *
+ * PARAMETERS :
+ *    @tag   :  Vendor specific tag
+ *
+ *
+ * RETURN     : Success: the type of the specific tag
+ *              Failure: -1
+ *==========================================================================*/
+int QCamera3VendorTags::get_tag_type(
+                const vendor_tag_ops_t *ops,
+                uint32_t tag)
+{
+    LOGL("E");
+    int ret;
+    uint32_t section = tag >> 16;
+    uint32_t section_index = section - VENDOR_SECTION;
+    uint32_t tag_index = tag & 0xFFFF;
+
+    if (ops != Ops) {
+        ret = -1;
+        goto done;
+    }
+    if (section < VENDOR_SECTION || section >= QCAMERA3_SECTIONS_END)
+        ret = -1;
+    else if (tag >= (uint32_t )qcamera3_ext3_section_bounds[section_index])
+        ret = -1;
+    else
+        ret = qcamera3_tag_info[section_index][tag_index].tag_type;
+
+    LOGL("tag type for tag %d is %d", tag, ret);
+    LOGL("X");
+done:
+    return ret;
+}
+
+}; //end namespace qcamera
diff --git a/msmcobalt/QCamera2/HAL3/QCamera3VendorTags.h b/msmcobalt/QCamera2/HAL3/QCamera3VendorTags.h
new file mode 100644
index 0000000..9fc180e
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL3/QCamera3VendorTags.h
@@ -0,0 +1,223 @@
+/* Copyright (c) 2014-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#ifndef __QCAMERA3VENDORTAGS_H__
+#define __QCAMERA3VENDORTAGS_H__
+
+// Camera dependencies
+#include "system/camera_metadata.h"
+
+namespace qcamera {
+
+enum qcamera3_ext_section {
+    QCAMERA3_PRIVATEDATA = VENDOR_SECTION,
+    QCAMERA3_CDS,
+    QCAMERA3_OPAQUE_RAW,
+    QCAMERA3_CROP,
+    QCAMERA3_TUNING_META_DATA,
+    QCAMERA3_TEMPORAL_DENOISE,
+    QCAMERA3_AV_TIMER,
+    QCAMERA3_SENSOR_META_DATA,
+    NEXUS_EXPERIMENTAL_2015,
+    QCAMERA3_DUALCAM_LINK_META_DATA,
+    QCAMERA3_DUALCAM_CALIB_META_DATA,
+    QCAMERA3_HAL_PRIVATEDATA,
+    QCAMERA3_JPEG_ENCODE_CROP,
+    QCAMERA3_SECTIONS_END
+};
+
+enum qcamera3_ext_section_ranges {
+    QCAMERA3_PRIVATEDATA_START = QCAMERA3_PRIVATEDATA << 16,
+    QCAMERA3_CDS_START = QCAMERA3_CDS << 16,
+    QCAMERA3_OPAQUE_RAW_START = QCAMERA3_OPAQUE_RAW << 16,
+    QCAMERA3_CROP_START = QCAMERA3_CROP << 16,
+    QCAMERA3_TUNING_META_DATA_START = QCAMERA3_TUNING_META_DATA << 16,
+    QCAMERA3_TEMPORAL_DENOISE_START = QCAMERA3_TEMPORAL_DENOISE << 16,
+    QCAMERA3_AV_TIMER_START = QCAMERA3_AV_TIMER << 16,
+    QCAMERA3_SENSOR_META_DATA_START = QCAMERA3_SENSOR_META_DATA << 16,
+    NEXUS_EXPERIMENTAL_2015_START = NEXUS_EXPERIMENTAL_2015 << 16,
+    QCAMERA3_DUALCAM_LINK_META_DATA_START = QCAMERA3_DUALCAM_LINK_META_DATA << 16,
+    QCAMERA3_DUALCAM_CALIB_META_DATA_START = QCAMERA3_DUALCAM_CALIB_META_DATA << 16,
+    QCAMERA3_HAL_PRIVATEDATA_START = QCAMERA3_HAL_PRIVATEDATA << 16,
+    QCAMERA3_JPEG_ENCODE_CROP_START = QCAMERA3_JPEG_ENCODE_CROP << 16
+};
+
+enum qcamera3_ext_tags {
+    QCAMERA3_PRIVATEDATA_REPROCESS = QCAMERA3_PRIVATEDATA_START,
+    QCAMERA3_PRIVATEDATA_END,
+    QCAMERA3_CDS_MODE = QCAMERA3_CDS_START,
+    QCAMERA3_CDS_INFO,
+    QCAMERA3_CDS_END,
+
+    //Property Name:  org.codeaurora.qcamera3.opaque_raw.opaque_raw_strides
+    //
+    //Type: int32 * n * 3 [public]
+    //
+    //Description: Distance in bytes from the beginning of one row of opaque
+    //raw image data to the beginning of next row.
+    //
+    //Details: The strides are listed as (raw_width, raw_height, stride)
+    //triplets. For each supported raw size, there will be a stride associated
+    //with it.
+    QCAMERA3_OPAQUE_RAW_STRIDES = QCAMERA3_OPAQUE_RAW_START,
+
+    //Property Name: org.codeaurora.qcamera3.opaque_raw.opaque_raw_format
+    //
+    //Type: byte(enum) [public]
+    //  * LEGACY - The legacy raw format where 8, 10, or 12-bit
+    //    raw data is packed into a 64-bit word.
+    //  * MIPI - raw format matching the data packing described
+    //    in MIPI CSI-2 specification. In memory, the data
+    //    is constructed by packing sequentially received pixels
+    //    into least significant parts of the words first.
+    //    Within each pixel, the least significant bits are also
+    //    placed towards the least significant part of the word.
+    //
+    //Details: Lay out of opaque raw data in memory is decided by two factors:
+    //         opaque_raw_format and bit depth (implied by whiteLevel). Below
+    //         list illustrates their relationship:
+    //  LEGACY8:  P7(7:0) P6(7:0) P5(7:0) P4(7:0) P3(7:0) P2(7:0) P1(7:0) P0(7:0)
+    //            8 pixels occupy 8 bytes, no padding needed
+    //            min_stride = CEILING8(raw_width)
+    // LEGACY10:  0000 P5(9:0) P4(9:0) P3(9:0) P2(9:0) P1(9:0) P0(9:0)
+    //            6 pixels occupy 8 bytes, 4 bits padding at MSB
+    //            min_stride = (raw_width+5)/6 * 8
+    // LEGACY12:  0000 P4(11:0) P3(11:0) P2(11:0) P1(11:0) P0(11:0)
+    //            5 pixels occupy 8 bytes, 4 bits padding at MSB
+    //            min_stride = (raw_width+4)/5 * 8
+    //    MIPI8:  P0(7:0)
+    //            1 pixel occupy 1 byte
+    //            min_stride = raw_width
+    //   MIPI10:  P3(1:0) P2(1:0) P1(1:0) P0(1:0) P3(9:2) P2(9:2) P1(9:2) P0(9:2)
+    //            4 pixels occupy 5 bytes
+    //            min_stride = (raw_width+3)/4 * 5
+    //   MIPI12:  P1(3:0) P0(3:0) P1(11:4) P0(11:4)
+    //            2 pixels occupy 3 bytes
+    //            min_stride = (raw_width+1)/2 * 3
+    //Note that opaque_raw_stride needs to be at least the required minimum
+    //stride from the table above. ISP hardware may need more generous stride
+    //setting. For example, for LEGACY8, the actual stride may be
+    //CEILING16(raw_width) due to bus burst length requirement.
+    QCAMERA3_OPAQUE_RAW_FORMAT,
+    QCAMERA3_OPAQUE_RAW_END,
+
+    QCAMERA3_CROP_COUNT_REPROCESS = QCAMERA3_CROP_START,
+    QCAMERA3_CROP_REPROCESS,
+    QCAMERA3_CROP_ROI_MAP_REPROCESS,
+    QCAMERA3_CROP_END,
+
+    QCAMERA3_TUNING_META_DATA_BLOB = QCAMERA3_TUNING_META_DATA_START,
+    QCAMERA3_TUNING_META_DATA_END,
+
+    QCAMERA3_TEMPORAL_DENOISE_ENABLE = QCAMERA3_TEMPORAL_DENOISE_START,
+    QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE,
+    QCAMERA3_TEMPORAL_DENOISE_END,
+
+    QCAMERA3_USE_AV_TIMER = QCAMERA3_AV_TIMER_START,
+    QCAMERA3_AV_TIMER_END,
+
+    QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN = QCAMERA3_SENSOR_META_DATA_START,
+    QCAMERA3_SENSOR_IS_MONO_ONLY,
+    QCAMERA3_SENSOR_META_DATA_END,
+
+    NEXUS_EXPERIMENTAL_2015_SENSOR_DYNAMIC_BLACK_LEVEL = NEXUS_EXPERIMENTAL_2015_START,
+    NEXUS_EXPERIMENTAL_2015_SENSOR_INFO_OPTICALLY_SHIELDED_REGIONS,
+    NEXUS_EXPERIMENTAL_2015_END,
+
+    QCAMERA3_DUALCAM_LINK_ENABLE = QCAMERA3_DUALCAM_LINK_META_DATA_START,
+    QCAMERA3_DUALCAM_LINK_IS_MAIN,
+    QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID,
+    QCAMERA3_DUALCAM_LINK_META_DATA_END,
+
+    QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB = QCAMERA3_DUALCAM_CALIB_META_DATA_START,
+    QCAMERA3_DUALCAM_CALIB_META_DATA_END,
+
+    QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS = QCAMERA3_HAL_PRIVATEDATA_START,
+    QCAMERA3_HAL_PRIVATEDATA_DDM_DATA_BLOB,
+    QCAMERA3_HAL_PRIVATEDATA_END,
+
+    /* Property Name:  org.codeaurora.qcamera3.jpeg_encode_crop.enable
+       Type: byte
+       Description: If JPEG crop is enable
+    */
+    QCAMERA3_JPEG_ENCODE_CROP_ENABLE = QCAMERA3_JPEG_ENCODE_CROP_START,
+    /* Property Name:  org.codeaurora.qcamera3.jpeg_encode_crop.rect
+       Type: int32[4]
+       Description: Crop image into size width x height
+                    from [left, top] coordinate
+       rect[0] = left
+       rect[1] = top
+       rect[2] = width
+       rect[3] = height
+    */
+    QCAMERA3_JPEG_ENCODE_CROP_RECT,
+    /* Property Name:  org.codeaurora.qcamera3.jpeg_encode_crop.roi
+       Type: int32[4]
+       Description: Scale the crop image into size width x height
+                    from [left, top] coordinate.
+       roi[0] = left
+       roi[1] = top
+       roi[2] = width
+       roi[3] = height
+    */
+    QCAMERA3_JPEG_ENCODE_CROP_ROI,
+    QCAMERA3_JPEG_ENCODE_CROP_END
+};
+
+// QCAMERA3_OPAQUE_RAW_FORMAT
+typedef enum qcamera3_ext_opaque_raw_format {
+    QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY,
+    QCAMERA3_OPAQUE_RAW_FORMAT_MIPI
+} qcamera3_ext_opaque_raw_format_t;
+
+class QCamera3VendorTags {
+
+public:
+    static void get_vendor_tag_ops(vendor_tag_ops_t* ops);
+    static int get_tag_count(
+            const vendor_tag_ops_t *ops);
+    static void get_all_tags(
+            const vendor_tag_ops_t *ops,
+            uint32_t *tag_array);
+    static const char* get_section_name(
+            const vendor_tag_ops_t *ops,
+            uint32_t tag);
+    static const char* get_tag_name(
+            const vendor_tag_ops_t *ops,
+            uint32_t tag);
+    static int get_tag_type(
+            const vendor_tag_ops_t *ops,
+            uint32_t tag);
+
+    static const vendor_tag_ops_t *Ops;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA3VENDORTAGS_H__ */
diff --git a/msmcobalt/QCamera2/HAL3/android/QCamera3External.h b/msmcobalt/QCamera2/HAL3/android/QCamera3External.h
new file mode 100644
index 0000000..2553eae
--- /dev/null
+++ b/msmcobalt/QCamera2/HAL3/android/QCamera3External.h
@@ -0,0 +1,47 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#ifndef __QCAMERA3EXTERNAL_H__
+#define __QCAMERA3EXTERNAL_H__
+
+// System dependencies
+#include <utils/Errors.h>
+
+// Display dependencies
+#include "QServiceUtils.h"
+
+namespace qcamera {
+
+inline android::status_t setCameraLaunchStatus(uint32_t on) {
+    return ::setCameraLaunchStatus(on);
+}
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA3EXTERNAL_H__ */
diff --git a/msmcobalt/QCamera2/QCamera2Factory.cpp b/msmcobalt/QCamera2/QCamera2Factory.cpp
new file mode 100644
index 0000000..33b051a
--- /dev/null
+++ b/msmcobalt/QCamera2/QCamera2Factory.cpp
@@ -0,0 +1,635 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCamera2Factory"
+
+// System dependencies
+#include <stdlib.h>
+#include <utils/Errors.h>
+#include <cutils/properties.h>
+
+// Camera dependencies
+#ifdef QCAMERA_HAL1_SUPPORT
+#include "camera.h"
+#include "HAL/QCamera2HWI.h"
+#include "QCameraMuxer.h"
+#endif
+
+#include "hardware/camera3.h"
+#include "HAL3/QCamera3HWI.h"
+#include "util/QCameraFlash.h"
+#include "QCamera2Factory.h"
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+using namespace android;
+
+namespace qcamera {
+
+QCamera2Factory *gQCamera2Factory = NULL;
+pthread_mutex_t gCamLock = PTHREAD_MUTEX_INITIALIZER;
+#ifdef QCAMERA_HAL1_SUPPORT
+QCameraMuxer *gQCameraMuxer = NULL;
+#endif
+
+//Total number of cameras opened simultaneously.
+//This variable updation is protected by gCamLock.
+uint8_t gNumCameraSessions = 0;
+
+volatile uint32_t gKpiDebugLevel = 1;
+
+/*===========================================================================
+ * FUNCTION   : QCamera2Factory
+ *
+ * DESCRIPTION: default constructor of QCamera2Factory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera2Factory::QCamera2Factory()
+{
+    mHalDescriptors = NULL;
+    mCallbacks = NULL;
+    mNumOfCameras = get_num_of_cameras();
+    int bDualCamera = 0;
+    char propDefault[PROPERTY_VALUE_MAX];
+    char prop[PROPERTY_VALUE_MAX];
+    property_get("persist.camera.HAL3.enabled", prop, "1");
+    int isHAL3Enabled = atoi(prop);
+#ifndef QCAMERA_HAL1_SUPPORT
+    isHAL3Enabled = 1;
+#endif
+
+    // Signifies whether system has to enable dual camera mode
+    snprintf(propDefault, PROPERTY_VALUE_MAX, "%d", isDualCamAvailable(isHAL3Enabled));
+    property_get("persist.camera.dual.camera", prop, propDefault);
+    bDualCamera = atoi(prop);
+    LOGH("dualCamera:%d ", bDualCamera);
+#ifndef QCAMERA_HAL1_SUPPORT
+    bDualCamera = 0;
+#endif
+
+    if(bDualCamera) {
+        LOGI("Enabling QCamera Muxer");
+#ifdef QCAMERA_HAL1_SUPPORT
+        if (!gQCameraMuxer) {
+            QCameraMuxer::getCameraMuxer(&gQCameraMuxer, mNumOfCameras);
+            if (!gQCameraMuxer) {
+                LOGE("Error !! Failed to get QCameraMuxer");
+            }
+        }
+#endif
+    }
+#ifdef QCAMERA_HAL1_SUPPORT
+    if (!gQCameraMuxer && (mNumOfCameras > 0) &&(mNumOfCameras <= MM_CAMERA_MAX_NUM_SENSORS)) {
+#else
+    if ((mNumOfCameras > 0) &&(mNumOfCameras <= MM_CAMERA_MAX_NUM_SENSORS)) {
+#endif
+        mHalDescriptors = new hal_desc[mNumOfCameras];
+        if ( NULL != mHalDescriptors) {
+            uint32_t cameraId = 0;
+
+            for (int i = 0; i < mNumOfCameras ; i++, cameraId++) {
+                mHalDescriptors[i].cameraId = cameraId;
+                // Set Device version to 3.x when both HAL3 is enabled & its BAYER sensor
+                if (isHAL3Enabled && !(is_yuv_sensor(cameraId))) {
+                    mHalDescriptors[i].device_version =
+                            CAMERA_DEVICE_API_VERSION_3_0;
+                } else {
+                    mHalDescriptors[i].device_version =
+                            CAMERA_DEVICE_API_VERSION_1_0;
+                }
+            }
+        } else {
+            LOGE("Not enough resources to allocate HAL descriptor table!");
+        }
+    } else {
+        LOGI("%d camera devices detected!", mNumOfCameras);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera2Factory
+ *
+ * DESCRIPTION: deconstructor of QCamera2Factory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera2Factory::~QCamera2Factory()
+{
+    if ( NULL != mHalDescriptors ) {
+        delete [] mHalDescriptors;
+    }
+#ifdef QCAMERA_HAL1_SUPPORT
+    if (gQCameraMuxer) {
+        delete gQCameraMuxer;
+        gQCameraMuxer = NULL;
+    }
+#endif
+}
+
+/*===========================================================================
+ * FUNCTION   : get_number_of_cameras
+ *
+ * DESCRIPTION: static function to query number of cameras detected
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of cameras detected
+ *==========================================================================*/
+int QCamera2Factory::get_number_of_cameras()
+{
+    int numCameras = 0;
+
+    if (!gQCamera2Factory) {
+        gQCamera2Factory = new QCamera2Factory();
+        if (!gQCamera2Factory) {
+            LOGE("Failed to allocate Camera2Factory object");
+            return 0;
+        }
+    }
+#ifdef QCAMERA_HAL1_SUPPORT
+    if(gQCameraMuxer)
+        numCameras = gQCameraMuxer->get_number_of_cameras();
+    else
+#endif
+        numCameras = gQCamera2Factory->getNumberOfCameras();
+
+    LOGH("num of cameras: %d", numCameras);
+    return numCameras;
+}
+
+/*===========================================================================
+ * FUNCTION   : get_camera_info
+ *
+ * DESCRIPTION: static function to query camera information with its ID
+ *
+ * PARAMETERS :
+ *   @camera_id : camera ID
+ *   @info      : ptr to camera info struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2Factory::get_camera_info(int camera_id, struct camera_info *info)
+{
+    int rc = NO_ERROR;
+#ifdef QCAMERA_HAL1_SUPPORT
+    if(gQCameraMuxer)
+        rc = gQCameraMuxer->get_camera_info(camera_id, info);
+    else
+#endif
+        rc =  gQCamera2Factory->getCameraInfo(camera_id, info);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : set_callbacks
+ *
+ * DESCRIPTION: static function to set callbacks function to camera module
+ *
+ * PARAMETERS :
+ *   @callbacks : ptr to callback functions
+ *
+ * RETURN     : NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2Factory::set_callbacks(const camera_module_callbacks_t *callbacks)
+{
+    int rc = NO_ERROR;
+#ifdef QCAMERA_HAL1_SUPPORT
+    if(gQCameraMuxer)
+        rc = gQCameraMuxer->set_callbacks(callbacks);
+    else
+#endif
+        rc =  gQCamera2Factory->setCallbacks(callbacks);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : open_legacy
+ *
+ * DESCRIPTION: Function to open older hal version implementation
+ *
+ * PARAMETERS :
+ *   @hw_device : ptr to struct storing camera hardware device info
+ *   @camera_id : camera ID
+ *   @halVersion: Based on camera_module_t.common.module_api_version
+ *
+ * RETURN     : 0  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2Factory::open_legacy(const struct hw_module_t* module,
+            const char* id, uint32_t halVersion, struct hw_device_t** device)
+{
+    int rc = NO_ERROR;
+    if (module != &HAL_MODULE_INFO_SYM.common) {
+        LOGE("Invalid module. Trying to open %p, expect %p",
+            module, &HAL_MODULE_INFO_SYM.common);
+        return INVALID_OPERATION;
+    }
+    if (!id) {
+        LOGE("Invalid camera id");
+        return BAD_VALUE;
+    }
+#ifdef QCAMERA_HAL1_SUPPORT
+    if(gQCameraMuxer)
+        rc =  gQCameraMuxer->open_legacy(module, id, halVersion, device);
+    else
+#endif
+        rc =  gQCamera2Factory->openLegacy(atoi(id), halVersion, device);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : set_torch_mode
+ *
+ * DESCRIPTION: Attempt to turn on or off the torch mode of the flash unit.
+ *
+ * PARAMETERS :
+ *   @camera_id : camera ID
+ *   @on        : Indicates whether to turn the flash on or off
+ *
+ * RETURN     : 0  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2Factory::set_torch_mode(const char* camera_id, bool on)
+{
+    return gQCamera2Factory->setTorchMode(camera_id, on);
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumberOfCameras
+ *
+ * DESCRIPTION: query number of cameras detected
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of cameras detected
+ *==========================================================================*/
+int QCamera2Factory::getNumberOfCameras()
+{
+    return mNumOfCameras;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCameraInfo
+ *
+ * DESCRIPTION: query camera information with its ID
+ *
+ * PARAMETERS :
+ *   @camera_id : camera ID
+ *   @info      : ptr to camera info struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2Factory::getCameraInfo(int camera_id, struct camera_info *info)
+{
+    int rc;
+
+    if (!mNumOfCameras || camera_id >= mNumOfCameras || !info ||
+        (camera_id < 0)) {
+        LOGE("Error getting camera info!! mNumOfCameras = %d,"
+                "camera_id = %d, info = %p",
+                 mNumOfCameras, camera_id, info);
+        return -ENODEV;
+    }
+
+    if ( NULL == mHalDescriptors ) {
+        LOGE("Hal descriptor table is not initialized!");
+        return NO_INIT;
+    }
+
+    LOGI("Camera id %d API version %d",
+            camera_id, mHalDescriptors[camera_id].device_version);
+
+    // Need ANDROID_FLASH_INFO_AVAILABLE property for flashlight widget to
+    // work and so get the static data regardless of HAL version
+    rc = QCamera3HardwareInterface::getCamInfo(
+            mHalDescriptors[camera_id].cameraId, info);
+    if (mHalDescriptors[camera_id].device_version ==
+            CAMERA_DEVICE_API_VERSION_1_0) {
+        info->device_version = CAMERA_DEVICE_API_VERSION_1_0;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setCallbacks
+ *
+ * DESCRIPTION: set callback functions to send asynchronous notifications to
+ *              frameworks.
+ *
+ * PARAMETERS :
+ *   @callbacks : callback function pointer
+ *
+ * RETURN     :
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2Factory::setCallbacks(const camera_module_callbacks_t *callbacks)
+{
+    int rc = NO_ERROR;
+    mCallbacks = callbacks;
+
+    rc = QCameraFlash::getInstance().registerCallbacks(callbacks);
+    if (rc != 0) {
+        LOGE("Failed to register callbacks with flash module!");
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : cameraDeviceOpen
+ *
+ * DESCRIPTION: open a camera device with its ID
+ *
+ * PARAMETERS :
+ *   @camera_id : camera ID
+ *   @hw_device : ptr to struct storing camera hardware device info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2Factory::cameraDeviceOpen(int camera_id,
+                    struct hw_device_t **hw_device)
+{
+    int rc = NO_ERROR;
+    if (camera_id < 0 || camera_id >= mNumOfCameras)
+        return -ENODEV;
+
+    if ( NULL == mHalDescriptors ) {
+        LOGE("Hal descriptor table is not initialized!");
+        return NO_INIT;
+    }
+
+    LOGI("Open camera id %d API version %d",
+            camera_id, mHalDescriptors[camera_id].device_version);
+
+    if ( mHalDescriptors[camera_id].device_version == CAMERA_DEVICE_API_VERSION_3_0 ) {
+        QCamera3HardwareInterface *hw = new QCamera3HardwareInterface(mHalDescriptors[camera_id].cameraId,
+                mCallbacks);
+        if (!hw) {
+            LOGE("Allocation of hardware interface failed");
+            return NO_MEMORY;
+        }
+        rc = hw->openCamera(hw_device);
+        if (rc != 0) {
+            delete hw;
+        }
+    }
+#ifdef QCAMERA_HAL1_SUPPORT
+    else if (mHalDescriptors[camera_id].device_version == CAMERA_DEVICE_API_VERSION_1_0) {
+        QCamera2HardwareInterface *hw = new QCamera2HardwareInterface((uint32_t)camera_id);
+        if (!hw) {
+            LOGE("Allocation of hardware interface failed");
+            return NO_MEMORY;
+        }
+        rc = hw->openCamera(hw_device);
+        if (rc != NO_ERROR) {
+            delete hw;
+        }
+    }
+#endif
+    else {
+        LOGE("Device version for camera id %d invalid %d",
+              camera_id,
+              mHalDescriptors[camera_id].device_version);
+        return BAD_VALUE;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : camera_device_open
+ *
+ * DESCRIPTION: static function to open a camera device by its ID
+ *
+ * PARAMETERS :
+ *   @camera_id : camera ID
+ *   @hw_device : ptr to struct storing camera hardware device info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2Factory::camera_device_open(
+    const struct hw_module_t *module, const char *id,
+    struct hw_device_t **hw_device)
+{
+    int rc = NO_ERROR;
+    if (module != &HAL_MODULE_INFO_SYM.common) {
+        LOGE("Invalid module. Trying to open %p, expect %p",
+            module, &HAL_MODULE_INFO_SYM.common);
+        return INVALID_OPERATION;
+    }
+    if (!id) {
+        LOGE("Invalid camera id");
+        return BAD_VALUE;
+    }
+#ifdef QCAMERA_HAL1_SUPPORT
+    if(gQCameraMuxer)
+        rc =  gQCameraMuxer->camera_device_open(module, id, hw_device);
+    else
+#endif
+        rc = gQCamera2Factory->cameraDeviceOpen(atoi(id), hw_device);
+    return rc;
+}
+
+struct hw_module_methods_t QCamera2Factory::mModuleMethods = {
+    .open = QCamera2Factory::camera_device_open,
+};
+
+/*===========================================================================
+ * FUNCTION   : openLegacy
+ *
+ * DESCRIPTION: Function to open older hal version implementation
+ *
+ * PARAMETERS :
+ *   @camera_id : camera ID
+ *   @halVersion: Based on camera_module_t.common.module_api_version
+ *   @hw_device : ptr to struct storing camera hardware device info
+ *
+ * RETURN     : 0  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2Factory::openLegacy(
+        int32_t cameraId, uint32_t halVersion, struct hw_device_t** hw_device)
+{
+    int rc = NO_ERROR;
+
+    LOGI("openLegacy halVersion: %d", halVersion);
+    //Assumption: all cameras can support legacy API version
+    if (cameraId < 0 || cameraId >= gQCamera2Factory->getNumberOfCameras())
+        return -ENODEV;
+
+    switch(halVersion)
+    {
+#ifdef QCAMERA_HAL1_SUPPORT
+        case CAMERA_DEVICE_API_VERSION_1_0:
+        {
+            QCamera2HardwareInterface *hw =
+                new QCamera2HardwareInterface((uint32_t)cameraId);
+            if (!hw) {
+                LOGE("Allocation of hardware interface failed");
+                return NO_MEMORY;
+            }
+            rc = hw->openCamera(hw_device);
+            if (rc != NO_ERROR) {
+                delete hw;
+            }
+            break;
+        }
+#endif
+        default:
+            LOGE("Device API version: %d for camera id %d invalid",
+                 halVersion, cameraId);
+            return BAD_VALUE;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setTorchMode
+ *
+ * DESCRIPTION: Attempt to turn on or off the torch mode of the flash unit.
+ *
+ * PARAMETERS :
+ *   @camera_id : camera ID
+ *   @on        : Indicates whether to turn the flash on or off
+ *
+ * RETURN     : 0  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2Factory::setTorchMode(const char* camera_id, bool on)
+{
+    int retVal(0);
+    long cameraIdLong(-1);
+    int cameraIdInt(-1);
+    char* endPointer = NULL;
+    errno = 0;
+    QCameraFlash& flash = QCameraFlash::getInstance();
+
+    cameraIdLong = strtol(camera_id, &endPointer, 10);
+
+    if ((errno == ERANGE) ||
+            (cameraIdLong < 0) ||
+            (cameraIdLong >= static_cast<long>(get_number_of_cameras())) ||
+            (endPointer == camera_id) ||
+            (*endPointer != '\0')) {
+        retVal = -EINVAL;
+    } else if (on) {
+        cameraIdInt = static_cast<int>(cameraIdLong);
+        retVal = flash.initFlash(cameraIdInt);
+
+        if (retVal == 0) {
+            retVal = flash.setFlashMode(cameraIdInt, on);
+            if ((retVal == 0) && (mCallbacks != NULL)) {
+                mCallbacks->torch_mode_status_change(mCallbacks,
+                        camera_id,
+                        TORCH_MODE_STATUS_AVAILABLE_ON);
+            } else if (retVal == -EALREADY) {
+                // Flash is already on, so treat this as a success.
+                retVal = 0;
+            }
+        }
+    } else {
+        cameraIdInt = static_cast<int>(cameraIdLong);
+        retVal = flash.setFlashMode(cameraIdInt, on);
+
+        if (retVal == 0) {
+            retVal = flash.deinitFlash(cameraIdInt);
+            if ((retVal == 0) && (mCallbacks != NULL)) {
+                mCallbacks->torch_mode_status_change(mCallbacks,
+                        camera_id,
+                        TORCH_MODE_STATUS_AVAILABLE_OFF);
+            }
+        } else if (retVal == -EALREADY) {
+            // Flash is already off, so treat this as a success.
+            retVal = 0;
+        }
+    }
+
+    return retVal;
+}
+
+/*===========================================================================
+ * FUNCTION   : isDualCamAvailable
+ *
+ * DESCRIPTION: Function to check whether we have dual Camera HW available
+ *
+ * PARAMETERS :
+ *   @hal3Enabled : HAL3 enable flag
+ *
+ * RETURN     : bool - true : have Dual Camera HW available
+ *                           false : not have Dual Camera HW available
+ *==========================================================================*/
+bool QCamera2Factory::isDualCamAvailable(int hal3Enabled)
+{
+    bool rc = false;
+    int i = 0;
+    camera_info info;
+    cam_sync_type_t cam_type = CAM_TYPE_MAIN;
+
+    for (i = 0; i < mNumOfCameras; i++) {
+        if (!hal3Enabled) {
+#ifdef QCAMERA_HAL1_SUPPORT
+            QCamera2HardwareInterface::getCapabilities(i, &info, &cam_type);
+#endif
+        }
+
+        if(cam_type == CAM_TYPE_AUX) {
+            LOGH("Have Dual Camera HW Avaiable.");
+            rc = true;
+            break;
+        }
+    }
+#ifdef QCAMERA_HAL1_SUPPORT
+    return rc;
+#else
+    return false;
+#endif
+}
+
+}; // namespace qcamera
+
diff --git a/msmcobalt/QCamera2/QCamera2Factory.h b/msmcobalt/QCamera2/QCamera2Factory.h
new file mode 100644
index 0000000..a091439
--- /dev/null
+++ b/msmcobalt/QCamera2/QCamera2Factory.h
@@ -0,0 +1,80 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#ifndef __QCAMERA2FACTORY_H__
+#define __QCAMERA2FACTORY_H__
+
+// Camera dependencies
+#include "hardware/camera_common.h"
+
+namespace qcamera {
+
+typedef struct {
+    uint32_t cameraId;
+    uint32_t device_version;
+} hal_desc;
+
+class QCamera2Factory
+{
+public:
+    QCamera2Factory();
+    virtual ~QCamera2Factory();
+
+    static int get_number_of_cameras();
+    static int get_camera_info(int camera_id, struct camera_info *info);
+    static int set_callbacks(const camera_module_callbacks_t *callbacks);
+    static int open_legacy(const struct hw_module_t* module,
+            const char* id, uint32_t halVersion, struct hw_device_t** device);
+    static int set_torch_mode(const char* camera_id, bool on);
+    bool isDualCamAvailable(int hal3Enabled);
+
+private:
+    int getNumberOfCameras();
+    int getCameraInfo(int camera_id, struct camera_info *info);
+    int setCallbacks(const camera_module_callbacks_t *callbacks);
+    int cameraDeviceOpen(int camera_id, struct hw_device_t **hw_device);
+    static int camera_device_open(const struct hw_module_t *module, const char *id,
+                struct hw_device_t **hw_device);
+    static int openLegacy(
+            int32_t cameraId, uint32_t halVersion, struct hw_device_t** hw_device);
+    int setTorchMode(const char* camera_id, bool on);
+public:
+    static struct hw_module_methods_t mModuleMethods;
+
+private:
+    int mNumOfCameras;
+    hal_desc *mHalDescriptors;
+    const camera_module_callbacks_t *mCallbacks;
+};
+
+}; /*namespace qcamera*/
+
+extern camera_module_t HAL_MODULE_INFO_SYM;
+
+#endif /* __QCAMERA2FACTORY_H__ */
diff --git a/msmcobalt/QCamera2/QCamera2Hal.cpp b/msmcobalt/QCamera2/QCamera2Hal.cpp
new file mode 100644
index 0000000..bf208bb
--- /dev/null
+++ b/msmcobalt/QCamera2/QCamera2Hal.cpp
@@ -0,0 +1,56 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+// Camera dependencies
+#include "QCamera2Factory.h"
+#include "HAL3/QCamera3VendorTags.h"
+
+static hw_module_t camera_common = {
+    .tag                    = HARDWARE_MODULE_TAG,
+    .module_api_version     = CAMERA_MODULE_API_VERSION_2_4,
+    .hal_api_version        = HARDWARE_HAL_API_VERSION,
+    .id                     = CAMERA_HARDWARE_MODULE_ID,
+    .name                   = "QCamera Module",
+    .author                 = "Qualcomm Innovation Center Inc",
+    .methods                = &qcamera::QCamera2Factory::mModuleMethods,
+    .dso                    = NULL,
+    .reserved               = {0}
+};
+
+camera_module_t HAL_MODULE_INFO_SYM = {
+    .common                 = camera_common,
+    .get_number_of_cameras  = qcamera::QCamera2Factory::get_number_of_cameras,
+    .get_camera_info        = qcamera::QCamera2Factory::get_camera_info,
+    .set_callbacks          = qcamera::QCamera2Factory::set_callbacks,
+    .get_vendor_tag_ops     = qcamera::QCamera3VendorTags::get_vendor_tag_ops,
+    .open_legacy            = qcamera::QCamera2Factory::open_legacy,
+    .set_torch_mode         = qcamera::QCamera2Factory::set_torch_mode,
+    .init                   = NULL,
+    .reserved               = {0}
+};
diff --git a/msmcobalt/QCamera2/QCameraFormat.h b/msmcobalt/QCamera2/QCameraFormat.h
new file mode 100644
index 0000000..b3869b2
--- /dev/null
+++ b/msmcobalt/QCamera2/QCameraFormat.h
@@ -0,0 +1,44 @@
+/* Copyright (c) 2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+
+/* Macros exposed to gralloc to query camera HAL for gralloc format to be
+used for vedor specific camera formats. */
+
+#define PREFERRED_IMPLEMENTATION_DEFINED_CAMERA_FORMAT HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS
+#define PREFERRED_YCBCR_420_888_CAMERA_FORMAT HAL_PIXEL_FORMAT_NV21_ZSL
+
+/* Macros exposed to camera HAL to get the preview and callback stream
+formats. Please ensure that if the macros below are changed then the
+corresponding change should be done in the above macros and vice versa
+to prevent format mismatch between Gralloc and Camera HAL for stream
+buffers */
+
+#define PREVIEW_STREAM_FORMAT CAM_FORMAT_YUV_420_NV12_VENUS
+#define CALLBACK_STREAM_FORMAT CAM_FORMAT_YUV_420_NV21
diff --git a/msmcobalt/QCamera2/stack/Android.mk b/msmcobalt/QCamera2/stack/Android.mk
new file mode 100644
index 0000000..a357417
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/Android.mk
@@ -0,0 +1,5 @@
+LOCAL_PATH:= $(call my-dir)
+include $(LOCAL_PATH)/mm-camera-interface/Android.mk
+include $(LOCAL_PATH)/mm-jpeg-interface/Android.mk
+include $(LOCAL_PATH)/mm-jpeg-interface/test/Android.mk
+include $(LOCAL_PATH)/mm-camera-test/Android.mk
diff --git a/msmcobalt/QCamera2/stack/common/cam_intf.h b/msmcobalt/QCamera2/stack/common/cam_intf.h
new file mode 100644
index 0000000..54fcb6f
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/common/cam_intf.h
@@ -0,0 +1,1060 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_INTF_H__
+#define __QCAMERA_INTF_H__
+
+// System dependencies
+#include <string.h>
+#include <media/msmb_isp.h>
+
+// Camera dependencies
+#include "cam_types.h"
+
+#define CAM_PRIV_IOCTL_BASE (V4L2_CID_PRIVATE_BASE + MSM_CAMERA_PRIV_CMD_MAX)
+typedef enum {
+    /* session based parameters */
+    CAM_PRIV_PARM = CAM_PRIV_IOCTL_BASE,
+    /* session based action: do auto focus.*/
+    CAM_PRIV_DO_AUTO_FOCUS,
+    /* session based action: cancel auto focus.*/
+    CAM_PRIV_CANCEL_AUTO_FOCUS,
+    /* session based action: prepare for snapshot.*/
+    CAM_PRIV_PREPARE_SNAPSHOT,
+    /* sync stream info.*/
+    CAM_PRIV_STREAM_INFO_SYNC,
+    /* stream based parameters*/
+    CAM_PRIV_STREAM_PARM,
+    /* start ZSL snapshot.*/
+    CAM_PRIV_START_ZSL_SNAPSHOT,
+    /* stop ZSL snapshot.*/
+    CAM_PRIV_STOP_ZSL_SNAPSHOT,
+    /* event for related sensors synchronization. */
+    CAM_PRIV_SYNC_RELATED_SENSORS,
+    /* flush */
+    CAM_PRIV_FLUSH
+} cam_private_ioctl_enum_t;
+
+typedef enum {
+    /* start syncing for related cameras */
+    CAM_SYNC_RELATED_SENSORS_ON = 1,
+    /* stop syncing for related cameras */
+    CAM_SYNC_RELATED_SENSORS_OFF
+} cam_sync_related_sensors_control_t;
+
+typedef enum {
+    /* Driving camera of the related camera sub-system */
+    /* Certain features are enabled only for primary camera
+       such as display mode for preview, autofocus etc
+       In certain configurations for eg. when optical zoom
+       limit is reached, Aux Camera would become
+       the driving camera and there will be role switch.*/
+    CAM_MODE_PRIMARY = 0,
+    /* Non-driving camera of the related camera sub-system
+       no display mode set for secondary camera */
+    CAM_MODE_SECONDARY
+} cam_sync_mode_t;
+
+/* Payload for sending bundling info to backend */
+typedef struct {
+    cam_sync_related_sensors_control_t sync_control;
+    cam_sync_type_t type;
+    cam_sync_mode_t mode;
+    /* session Id of the other camera session
+       Linking will be done with this session in the
+       backend */
+    uint32_t related_sensor_session_id;
+    uint8_t is_frame_sync_enabled;
+}cam_sync_related_sensors_event_info_t;
+
+/* Related camera sensor specific calibration data */
+// Align bytes according to API document.
+#pragma pack(2)
+typedef struct {
+    /* Focal length in pixels @ calibration resolution.*/
+    float       normalized_focal_length;
+    /* Native sensor resolution W that was used to capture calibration image */
+    uint16_t    native_sensor_resolution_width;
+    /* Native sensor resolution H that was used to capture calibration image */
+    uint16_t    native_sensor_resolution_height;
+    /* Image size W used internally by calibration tool */
+    uint16_t    calibration_sensor_resolution_width;
+    /* Image size H used internally by calibration tool */
+    uint16_t    calibration_sensor_resolution_height;
+    /* Focal length ratio @ Calibration */
+    float       focal_length_ratio;
+}cam_related_sensor_calibration_data_t;
+#pragma pack()
+
+/* Related Camera System Calibration data
+   Calibration data for the entire related cam sub-system is
+   in a shared EEPROM. We have 2 fields which are specific to
+   each sensor followed by a set of common calibration of the
+   entire related cam system*/
+// Align bytes according to API document.
+#pragma pack(2)
+typedef struct {
+    /* Version information */
+    uint32_t    calibration_format_version;
+    /* Main Camera Sensor specific calibration */
+    cam_related_sensor_calibration_data_t  main_cam_specific_calibration;
+    /* Aux Camera Sensor specific calibration */
+    cam_related_sensor_calibration_data_t  aux_cam_specific_calibration;
+    /* Relative viewpoint matching matrix w.r.t Main */
+    float      relative_rotation_matrix[RELCAM_CALIB_ROT_MATRIX_MAX];
+    /* Relative geometric surface description parameters */
+    float      relative_geometric_surface_parameters[
+            RELCAM_CALIB_SURFACE_PARMS_MAX];
+    /* Relative offset of sensor center from optical axis along horizontal dimension */
+    float      relative_principle_point_x_offset;
+    /* Relative offset of sensor center from optical axis along vertical dimension */
+    float      relative_principle_point_y_offset;
+    /* 0=Main Camera is on the left of Aux; 1=Main Camera is on the right of Aux */
+    uint16_t   relative_position_flag;
+    /* Camera separation in mm */
+    float      relative_baseline_distance;
+    /* main sensor setting during cal: 0-none, 1-hor-mirror, 2-ver-flip, 3-both */
+    uint16_t   main_sensor_mirror_flip_setting;
+    /* aux sensor setting during cal: 0-none, 1-hor-mirror, 2-ver-flip, 3-both */
+    uint16_t   aux_sensor_mirror_flip_setting;
+    /* module orientation during cal: 0-sensors in landscape, 1-sensors in portrait */
+    uint16_t   module_orientation_during_calibration;
+    /* cal images required rotation: 0-no, 1-90 degrees right, 2-90 degrees left */
+    uint16_t   rotation_flag;
+    /* AEC sync OTP data */
+    /* AEC sync brightness ration. Fixed Point Q10*/
+    int16_t    brightness_ratio;
+    /* Reference mono gain value obtained from setup stage and used during calibration stage */
+    /* Fixed Point Q10 */
+    int16_t    ref_mono_gain;
+    /* Reference mono line count obtained from setup stage and used during calibration stage */
+    uint16_t   ref_mono_linecount;
+    /* Reference bayer gain value obtained from setup stage and used during calibration stage */
+    /* Fixed Point Q10 */
+    int16_t    ref_bayer_gain;
+    /* Reference bayer line count obtained from setup stage and used during calibration stage */
+    uint16_t   ref_bayer_linecount;
+    /* Reference bayer color temperature */
+    uint16_t   ref_bayer_color_temperature;
+    /* Reserved for future use */
+    float      reserved[RELCAM_CALIB_RESERVED_MAX];
+} cam_related_system_calibration_data_t;
+#pragma pack()
+
+typedef struct {
+  uint32_t default_sensor_flip;
+  uint32_t sensor_mount_angle;
+  cam_related_system_calibration_data_t otp_calibration_data;
+} cam_jpeg_metadata_t;
+
+/* capability struct definition for HAL 1*/
+typedef struct{
+    cam_hal_version_t version;
+
+    cam_position_t position;                                /* sensor position: front, back */
+
+    uint8_t auto_hdr_supported;
+
+    uint16_t isWnrSupported;
+    /* supported iso modes */
+    size_t supported_iso_modes_cnt;
+    cam_iso_mode_type supported_iso_modes[CAM_ISO_MODE_MAX];
+
+    /* supported flash modes */
+    size_t supported_flash_modes_cnt;
+    cam_flash_mode_t supported_flash_modes[CAM_FLASH_MODE_MAX];
+
+    size_t zoom_ratio_tbl_cnt;                              /* table size for zoom ratios */
+    uint32_t zoom_ratio_tbl[MAX_ZOOMS_CNT];                 /* zoom ratios table */
+
+    /* supported effect modes */
+    size_t supported_effects_cnt;
+    cam_effect_mode_type supported_effects[CAM_EFFECT_MODE_MAX];
+
+    /* supported scene modes */
+    size_t supported_scene_modes_cnt;
+    cam_scene_mode_type supported_scene_modes[CAM_SCENE_MODE_MAX];
+
+    /* supported auto exposure modes */
+    size_t supported_aec_modes_cnt;
+    cam_auto_exposure_mode_type supported_aec_modes[CAM_AEC_MODE_MAX];
+
+    size_t fps_ranges_tbl_cnt;                              /* fps ranges table size */
+    cam_fps_range_t fps_ranges_tbl[MAX_SIZES_CNT];          /* fps ranges table */
+
+    /* supported antibanding modes */
+    size_t supported_antibandings_cnt;
+    cam_antibanding_mode_type supported_antibandings[CAM_ANTIBANDING_MODE_MAX];
+
+    /* supported white balance modes */
+    size_t supported_white_balances_cnt;
+    cam_wb_mode_type supported_white_balances[CAM_WB_MODE_MAX];
+
+    /* Capability list of supported insensor HDR types
+     * Backend is expected to fill in all the supported types and set appropriate
+     * count, see cam_sensor_hdr_type_t for valid types
+    */
+    size_t supported_sensor_hdr_types_cnt;
+    cam_sensor_hdr_type_t supported_sensor_hdr_types[CAM_SENSOR_HDR_MAX];
+
+    /* supported manual wb cct */
+    int32_t min_wb_cct;
+    int32_t max_wb_cct;
+
+    /* supported manual wb rgb gains */
+    float min_wb_gain;
+    float max_wb_gain;
+
+    /* supported focus modes */
+    size_t supported_focus_modes_cnt;
+    cam_focus_mode_type supported_focus_modes[CAM_FOCUS_MODE_MAX];
+
+    /* supported manual focus position */
+    float min_focus_pos[CAM_MANUAL_FOCUS_MODE_MAX];
+    float max_focus_pos[CAM_MANUAL_FOCUS_MODE_MAX];
+
+    int32_t exposure_compensation_min;       /* min value of exposure compensation index */
+    int32_t exposure_compensation_max;       /* max value of exposure compensation index */
+    int32_t exposure_compensation_default;   /* default value of exposure compensation index */
+    float exposure_compensation_step;
+    cam_rational_type_t exp_compensation_step;    /* exposure compensation step value */
+
+    uint8_t video_stablization_supported; /* flag id video stablization is supported */
+
+    size_t picture_sizes_tbl_cnt;                           /* picture sizes table size */
+    cam_dimension_t picture_sizes_tbl[MAX_SIZES_CNT];       /* picture sizes table */
+    /* The minimum frame duration that is supported for each
+     * resolution in availableProcessedSizes. Should correspond
+     * to the frame duration when only that processed stream
+     * is active, with all processing set to FAST */
+    int64_t picture_min_duration[MAX_SIZES_CNT];
+
+    /* capabilities specific to HAL 1 */
+
+    int32_t modes_supported;                                /* mask of modes supported: 2D, 3D */
+    uint32_t sensor_mount_angle;                            /* sensor mount angle */
+
+    float focal_length;                                     /* focal length */
+    float hor_view_angle;                                   /* horizontal view angle */
+    float ver_view_angle;                                   /* vertical view angle */
+
+    size_t preview_sizes_tbl_cnt;                           /* preview sizes table size */
+    cam_dimension_t preview_sizes_tbl[MAX_SIZES_CNT];       /* preiew sizes table */
+
+    size_t video_sizes_tbl_cnt;                             /* video sizes table size */
+    cam_dimension_t video_sizes_tbl[MAX_SIZES_CNT];         /* video sizes table */
+
+
+    size_t livesnapshot_sizes_tbl_cnt;                      /* livesnapshot sizes table size */
+    cam_dimension_t livesnapshot_sizes_tbl[MAX_SIZES_CNT];  /* livesnapshot sizes table */
+
+    size_t vhdr_livesnapshot_sizes_tbl_cnt;                 /* vhdr_livesnapshot sizes table size */
+    cam_dimension_t vhdr_livesnapshot_sizes_tbl[MAX_SIZES_CNT];  /* vhdr_livesnapshot sizes table */
+
+    size_t hfr_tbl_cnt;                                     /* table size for HFR */
+    cam_hfr_info_t hfr_tbl[CAM_HFR_MODE_MAX];               /* HFR table */
+
+    size_t zzhdr_sizes_tbl_cnt;                             /* Number of resolutions in zzHDR mode*/
+    cam_dimension_t zzhdr_sizes_tbl[MAX_SIZES_CNT];         /* Table for ZZHDR supported sizes */
+
+    /* supported preview formats */
+    size_t supported_preview_fmt_cnt;
+    cam_format_t supported_preview_fmts[CAM_FORMAT_MAX];
+
+    /* supported picture formats */
+    size_t supported_picture_fmt_cnt;
+    cam_format_t supported_picture_fmts[CAM_FORMAT_MAX];
+
+    uint8_t max_downscale_factor;
+
+    /* dimension and supported output format of raw dump from camif */
+    size_t supported_raw_dim_cnt;
+    cam_dimension_t raw_dim[MAX_SIZES_CNT];
+    size_t supported_raw_fmt_cnt;
+    cam_format_t supported_raw_fmts[CAM_FORMAT_MAX];
+    /* The minimum frame duration that is supported for above
+       raw resolution */
+    int64_t raw_min_duration[MAX_SIZES_CNT];
+
+    /* 3A version*/
+    cam_q3a_version_t q3a_version;
+    /* supported focus algorithms */
+    size_t supported_focus_algos_cnt;
+    cam_focus_algorithm_type supported_focus_algos[CAM_FOCUS_ALGO_MAX];
+
+
+    uint8_t auto_wb_lock_supported;       /* flag if auto white balance lock is supported */
+    uint8_t zoom_supported;               /* flag if zoom is supported */
+    uint8_t smooth_zoom_supported;        /* flag if smooth zoom is supported */
+    uint8_t auto_exposure_lock_supported; /* flag if auto exposure lock is supported */
+    uint8_t video_snapshot_supported;     /* flag if video snapshot is supported */
+
+    uint8_t max_num_roi;                  /* max number of roi can be detected */
+    uint8_t max_num_focus_areas;          /* max num of focus areas */
+    uint8_t max_num_metering_areas;       /* max num opf metering areas */
+    uint8_t max_zoom_step;                /* max zoom step value */
+
+    /* QCOM specific control */
+    cam_control_range_t brightness_ctrl;  /* brightness */
+    cam_control_range_t sharpness_ctrl;   /* sharpness */
+    cam_control_range_t contrast_ctrl;    /* contrast */
+    cam_control_range_t saturation_ctrl;  /* saturation */
+    cam_control_range_t sce_ctrl;         /* skintone enhancement factor */
+
+    /* QCOM HDR specific control. Indicates number of frames and exposure needs for the frames */
+    cam_hdr_bracketing_info_t hdr_bracketing_setting;
+
+    cam_feature_mask_t qcom_supported_feature_mask; /* mask of qcom specific features supported:
+                                                     * such as CAM_QCOM_FEATURE_SUPPORTED_FACE_DETECTION*/
+    cam_padding_info_t padding_info;      /* padding information from PP */
+    uint32_t min_num_pp_bufs;             /* minimum number of buffers needed by postproc module */
+    cam_format_t rdi_mode_stream_fmt;  /* stream format supported in rdi mode */
+
+    /* capabilities specific to HAL 3 */
+
+    float min_focus_distance;
+    float hyper_focal_distance;
+
+    float focal_lengths[CAM_FOCAL_LENGTHS_MAX];
+    uint8_t focal_lengths_count;
+
+    /* Needs to be regular f number instead of APEX */
+    float apertures[CAM_APERTURES_MAX];
+    uint8_t apertures_count;
+
+    float filter_densities[CAM_FILTER_DENSITIES_MAX];
+    uint8_t filter_densities_count;
+
+    uint8_t optical_stab_modes[CAM_OPT_STAB_MAX];
+    uint8_t optical_stab_modes_count;
+
+    cam_dimension_t lens_shading_map_size;
+
+    cam_dimension_t geo_correction_map_size;
+    float geo_correction_map[2 * 3 * CAM_MAX_MAP_WIDTH *
+              CAM_MAX_MAP_HEIGHT];
+
+    float lens_position[3];
+
+    /* nano seconds */
+    int64_t exposure_time_range[EXPOSURE_TIME_RANGE_CNT];
+
+    /* nano seconds */
+    int64_t max_frame_duration;
+
+    cam_color_filter_arrangement_t color_arrangement;
+    uint8_t num_color_channels;
+
+    /* parameters required to calculate S and O co-efficients */
+    double gradient_S;
+    double offset_S;
+    double gradient_O;
+    double offset_O;
+
+    float sensor_physical_size[SENSOR_PHYSICAL_SIZE_CNT];
+
+    /* Dimensions of full pixel array, possibly including
+       black calibration pixels */
+    cam_dimension_t pixel_array_size;
+    /* Area of raw data which corresponds to only active
+       pixels; smaller or equal to pixelArraySize. */
+    cam_rect_t active_array_size;
+
+    /* Maximum raw value output by sensor */
+    int32_t white_level;
+
+    /* A fixed black level offset for each of the Bayer
+       mosaic channels */
+    int32_t black_level_pattern[BLACK_LEVEL_PATTERN_CNT];
+
+    /* Time taken before flash can fire again in nano secs */
+    int64_t flash_charge_duration;
+
+    /* flash firing power */
+    size_t supported_flash_firing_level_cnt;
+    cam_format_t supported_firing_levels[CAM_FLASH_FIRING_LEVEL_MAX];
+
+    /* Flash Firing Time */
+    int64_t flash_firing_time;
+
+    /* Flash Ciolor Temperature */
+    uint8_t flash_color_temp;
+
+    /* Flash max Energy */
+    uint8_t flash_max_energy;
+
+    /* Maximum number of supported points in the tonemap
+       curve */
+    int32_t max_tone_map_curve_points;
+
+    /* supported formats */
+    size_t supported_scalar_format_cnt;
+    cam_format_t supported_scalar_fmts[CAM_FORMAT_MAX];
+
+    uint32_t max_face_detection_count;
+
+    uint8_t histogram_supported;
+    /* Number of histogram buckets supported */
+    int32_t histogram_size;
+    /* Maximum value possible for a histogram bucket */
+    int32_t max_histogram_count;
+
+    cam_dimension_t sharpness_map_size;
+
+    /* Maximum value possible for a sharpness map region */
+    int32_t max_sharpness_map_value;
+
+    /*Autoexposure modes for camera 3 api*/
+    size_t supported_ae_modes_cnt;
+    cam_ae_mode_type supported_ae_modes[CAM_AE_MODE_MAX];
+
+
+    cam_sensitivity_range_t sensitivity_range;
+    int32_t max_analog_sensitivity;
+
+    /* ISP digital gain */
+    cam_sensitivity_range_t isp_sensitivity_range;
+
+    /* picture sizes need scale*/
+    cam_scene_mode_overrides_t scene_mode_overrides[CAM_SCENE_MODE_MAX];
+    size_t scale_picture_sizes_cnt;
+    cam_dimension_t scale_picture_sizes[MAX_SCALE_SIZES_CNT];
+
+    uint8_t flash_available;
+
+    cam_rational_type_t base_gain_factor;    /* sensor base gain factor */
+    /* AF Bracketing info */
+    cam_af_bracketing_t  ubifocus_af_bracketing_need;
+    cam_af_bracketing_t  refocus_af_bracketing_need;
+    /* opti Zoom info */
+    cam_opti_zoom_t      opti_zoom_settings_need;
+    /* still more info */
+    cam_still_more_t  stillmore_settings_need;
+    /* chroma flash info */
+    cam_chroma_flash_t chroma_flash_settings_need;
+
+    cam_rational_type_t forward_matrix[3][3];
+    cam_rational_type_t color_transform[3][3];
+
+    uint8_t focus_dist_calibrated;
+    uint8_t supported_test_pattern_modes_cnt;
+    cam_test_pattern_mode_t supported_test_pattern_modes[MAX_TEST_PATTERN_CNT];
+
+    int64_t stall_durations[MAX_SIZES_CNT];
+
+    cam_illuminat_t reference_illuminant1;
+    cam_illuminat_t reference_illuminant2;
+
+    int64_t jpeg_stall_durations[MAX_SIZES_CNT];
+    int64_t raw16_stall_durations[MAX_SIZES_CNT];
+    cam_rational_type_t forward_matrix1[FORWARD_MATRIX_ROWS][FORWARD_MATRIX_COLS];
+    cam_rational_type_t forward_matrix2[FORWARD_MATRIX_ROWS][FORWARD_MATRIX_COLS];
+    cam_rational_type_t color_transform1[COLOR_TRANSFORM_ROWS][COLOR_TRANSFORM_COLS];
+    cam_rational_type_t color_transform2[COLOR_TRANSFORM_ROWS][COLOR_TRANSFORM_COLS];
+    cam_rational_type_t calibration_transform1[CAL_TRANSFORM_ROWS][CAL_TRANSFORM_COLS];
+    cam_rational_type_t calibration_transform2[CAL_TRANSFORM_ROWS][CAL_TRANSFORM_COLS];
+    uint16_t isCacSupported;
+
+    cam_opaque_raw_format_t opaque_raw_fmt;
+
+    /* true Portrait info */
+    cam_true_portrait_t  true_portrait_settings_need;
+
+    /* Sensor type information */
+    cam_sensor_type_t sensor_type;
+
+    cam_aberration_mode_t aberration_modes[CAM_COLOR_CORRECTION_ABERRATION_MAX];
+    uint32_t aberration_modes_count;
+
+    /* Can the sensor timestamp be compared to
+     * timestamps from other sub-systems (gyro, accelerometer etc.) */
+    uint8_t isTimestampCalibrated;
+
+    /* Max size supported by ISP viewfinder path */
+    cam_dimension_t max_viewfinder_size;
+
+    /* Analysis buffer requirements */
+    cam_analysis_info_t analysis_info[CAM_ANALYSIS_INFO_MAX];
+
+    /* This is set to 'true' if sensor cannot guarantee per frame control */
+    /* Default value of this capability is 'false' indicating per-frame */
+    /* control is supported */
+    uint8_t no_per_frame_control_support;
+
+    /* EIS information */
+    uint8_t supported_is_types_cnt;
+    uint32_t supported_is_types[IS_TYPE_MAX];
+    /*for each type, specify the margin needed. Margin will be
+      the decimal representation of a percentage
+      ex: 10% margin = 0.1 */
+    float supported_is_type_margins[IS_TYPE_MAX];
+
+    /* Max cpp batch size */
+    uint8_t max_batch_bufs_supported;
+    uint32_t buf_alignment;
+    uint32_t min_stride;
+    uint32_t min_scanline;
+    uint8_t flash_dev_name[QCAMERA_MAX_FILEPATH_LENGTH];
+    uint8_t eeprom_version_info[MAX_EEPROM_VERSION_INFO_LEN];
+
+    /* maximum pixel bandwidth shared between cameras */
+    uint64_t max_pixel_bandwidth;
+
+    /* Array of K integers, where K%4==0,
+      as a list of rectangles in the pixelArray co-ord system
+      left, top, right, bottom */
+    int32_t optical_black_regions[MAX_OPTICAL_BLACK_REGIONS * 4];
+    /* Count is K/4 */
+    uint8_t optical_black_region_count;
+
+    /* hot pixel */
+    uint8_t hotPixel_mode;
+    uint32_t hotPixel_count;
+    cam_coordinate_type_t hotPixelMap[512];
+
+    /* supported instant capture/AEC convergence modes */
+    size_t supported_instant_aec_modes_cnt;
+    cam_aec_convergence_type supported_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
+
+    /* Dual cam calibration data */
+    cam_related_system_calibration_data_t related_cam_calibration;
+} cam_capability_t;
+
+typedef enum {
+    CAM_STREAM_PARAM_TYPE_DO_REPROCESS = CAM_INTF_PARM_DO_REPROCESS,
+    CAM_STREAM_PARAM_TYPE_SET_BUNDLE_INFO = CAM_INTF_PARM_SET_BUNDLE,
+    CAM_STREAM_PARAM_TYPE_SET_FLIP = CAM_INTF_PARM_STREAM_FLIP,
+    CAM_STREAM_PARAM_TYPE_GET_OUTPUT_CROP = CAM_INTF_PARM_GET_OUTPUT_CROP,
+    CAM_STREAM_PARAM_TYPE_GET_IMG_PROP = CAM_INTF_PARM_GET_IMG_PROP,
+    CAM_STREAM_PARAM_TYPE_REQUEST_FRAMES = CAM_INTF_PARM_REQUEST_FRAMES,
+    CAM_STREAM_PARAM_TYPE_REQUEST_OPS_MODE = CAM_INTF_PARM_REQUEST_OPS_MODE,
+    CAM_STREAM_PARAM_TYPE_MAX
+} cam_stream_param_type_e;
+
+typedef struct {
+    uint32_t buf_index;           /* buf index to the source frame buffer that needs reprocess,
+                                    (assume buffer is already mapped)*/
+    uint32_t frame_idx;           /* frame id of source frame to be reprocessed */
+    int32_t ret_val;              /* return value from reprocess. Could have different meanings.
+                                     i.e., faceID in the case of face registration. */
+    uint8_t meta_present;         /* if there is meta data associated with this reprocess frame */
+    uint32_t meta_stream_handle;  /* meta data stream ID. only valid if meta_present != 0 */
+    uint32_t meta_buf_index;      /* buf index to meta data buffer. only valid if meta_present != 0 */
+
+    /* opaque metadata required for reprocessing */
+    int32_t private_data[MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES];
+    cam_rect_t crop_rect;
+} cam_reprocess_param;
+
+typedef struct {
+    uint32_t flip_mask;
+} cam_flip_mode_t;
+
+#define IMG_NAME_SIZE 32
+typedef struct {
+    cam_rect_t crop;  /* crop info for the image */
+    cam_dimension_t input; /* input dimension of the image */
+    cam_dimension_t output; /* output dimension of the image */
+    char name[IMG_NAME_SIZE]; /* optional name of the ext*/
+    cam_format_t format; /* image format */
+} cam_stream_img_prop_t;
+
+typedef struct {
+    uint8_t enableStream; /*0 – stop and 1-start */
+} cam_request_frames;
+
+typedef struct {
+    cam_stream_param_type_e type;
+    union {
+        cam_reprocess_param reprocess;  /* do reprocess */
+        cam_bundle_config_t bundleInfo; /* set bundle info*/
+        cam_flip_mode_t flipInfo;       /* flip mode */
+        cam_crop_data_t outputCrop;     /* output crop for current frame */
+        cam_stream_img_prop_t imgProp;  /* image properties of current frame */
+        cam_request_frames frameRequest; /*do TNR process*/
+        cam_perf_mode_t perf_mode;       /*request operational mode*/
+    };
+} cam_stream_parm_buffer_t;
+
+/* stream info */
+typedef struct {
+    /* stream ID from server */
+    uint32_t stream_svr_id;
+
+    /* stream type */
+    cam_stream_type_t stream_type;
+
+    /* image format */
+    cam_format_t fmt;
+
+    /* image dimension */
+    cam_dimension_t dim;
+
+    /* buffer plane information, will be calc based on stream_type, fmt,
+       dim, and padding_info(from stream config). Info including:
+       offset_x, offset_y, stride, scanline, plane offset */
+    cam_stream_buf_plane_info_t buf_planes;
+
+    /* number of stream bufs will be allocated */
+    uint32_t num_bufs;
+
+    /* streaming type */
+    cam_streaming_mode_t streaming_mode;
+
+    /* num of frames needs to be generated.
+     * only valid when streaming_mode = CAM_STREAMING_MODE_BURST */
+    uint8_t num_of_burst;
+
+    /* num of frames in one batch.
+     * only valid when streaming_mode = CAM_STREAMING_MODE_BATCH */
+    cam_stream_user_buf_info_t user_buf_info;
+
+    /* stream specific pp config */
+    cam_pp_feature_config_t pp_config;
+
+    /* this section is valid if offline reprocess type stream */
+    cam_stream_reproc_config_t reprocess_config;
+
+    cam_stream_parm_buffer_t parm_buf;    /* stream based parameters */
+
+    uint8_t dis_enable;
+
+    /* Image Stabilization type */
+    cam_is_type_t is_type;
+
+    /* Signifies Secure stream mode */
+    cam_stream_secure_t is_secure;
+
+    /* Preferred Performance mode */
+    cam_perf_mode_t perf_mode;
+
+    /* if frames will not be received */
+    uint8_t noFrameExpected;
+} cam_stream_info_t;
+
+/*****************************************************************************
+ *                 Code for Domain Socket Based Parameters                   *
+ ****************************************************************************/
+#define INCLUDE(PARAM_ID,DATATYPE,COUNT)  \
+        DATATYPE member_variable_##PARAM_ID[ COUNT ]
+
+#define POINTER_OF_META(META_ID, TABLE_PTR) \
+        ((NULL != TABLE_PTR) ? \
+            (&TABLE_PTR->data.member_variable_##META_ID[ 0 ]) : (NULL))
+
+#define SIZE_OF_PARAM(META_ID, TABLE_PTR) \
+        sizeof(TABLE_PTR->data.member_variable_##META_ID)
+
+#define IF_META_AVAILABLE(META_TYPE, META_PTR_NAME, META_ID, TABLE_PTR) \
+        META_TYPE *META_PTR_NAME = \
+        (((NULL != TABLE_PTR) && (TABLE_PTR->is_valid[META_ID])) ? \
+            (&TABLE_PTR->data.member_variable_##META_ID[ 0 ]) : \
+            (NULL)); \
+        if (NULL != META_PTR_NAME) \
+
+#define ADD_SET_PARAM_ENTRY_TO_BATCH(TABLE_PTR, META_ID, DATA) \
+    ((NULL != TABLE_PTR) ? \
+    ((TABLE_PTR->data.member_variable_##META_ID[ 0 ] = DATA), \
+    (TABLE_PTR->is_valid[META_ID] = 1), (0)) : \
+    ((LOGE("Unable to set metadata TABLE_PTR:%p META_ID:%d", \
+            TABLE_PTR, META_ID)), (-1))) \
+
+#define ADD_SET_PARAM_ARRAY_TO_BATCH(TABLE_PTR, META_ID, PDATA, COUNT, RCOUNT) \
+{ \
+    if ((NULL != TABLE_PTR) && \
+            (0 < COUNT) && \
+            ((sizeof(TABLE_PTR->data.member_variable_##META_ID) / \
+            sizeof(TABLE_PTR->data.member_variable_##META_ID[ 0 ])) \
+            >= COUNT))  { \
+        for (size_t _i = 0; _i < COUNT ; _i++) { \
+            TABLE_PTR->data.member_variable_##META_ID[ _i ] = PDATA [ _i ]; \
+        } \
+        TABLE_PTR->is_valid[META_ID] = 1; \
+        RCOUNT = COUNT; \
+    } else { \
+        LOGE("Unable to set metadata TABLE_PTR:%p META_ID:%d COUNT:%zu", \
+              TABLE_PTR, META_ID, COUNT); \
+        RCOUNT = 0; \
+    } \
+}
+
+#define ADD_GET_PARAM_ENTRY_TO_BATCH(TABLE_PTR, META_ID) \
+{ \
+    if (NULL != TABLE_PTR) { \
+        TABLE_PTR->is_reqd[META_ID] = 1; \
+    } else { \
+        LOGE("Unable to get metadata TABLE_PTR:%p META_ID:%d", \
+                  TABLE_PTR, META_ID); \
+    } \
+}
+
+#define READ_PARAM_ENTRY(TABLE_PTR, META_ID, DATA) \
+{ \
+    if (NULL != TABLE_PTR) { \
+        DATA = TABLE_PTR->data.member_variable_##META_ID[ 0 ]; \
+    } else { \
+        LOGE("Unable to read metadata TABLE_PTR:%p META_ID:%d", \
+                  TABLE_PTR, META_ID); \
+    } \
+}
+
+/************************************
+* Custom parameter data definition
+*************************************/
+typedef struct {
+    /*CAM_CUSTOM_PARM_EXAMPLE is added to explain custom param procedure*/
+    INCLUDE(CAM_CUSTOM_PARM_EXAMPLE,            int32_t,           1);
+} custom_parm_data_t;
+
+/************************************
+* Custom Parameter buffer definition
+*************************************/
+typedef struct {
+    union {
+        /* Hash table of 'is valid' flags */
+        uint8_t         is_valid[CAM_CUSTOM_PARM_MAX];
+
+        /* Hash table of 'is required' flags for the GET PARAM */
+        uint8_t         is_reqd[CAM_CUSTOM_PARM_MAX];
+    };
+    custom_parm_data_t data;
+} custom_parm_buffer_t;
+
+
+typedef struct {
+/**************************************************************************************
+ *  ID from (cam_intf_metadata_type_t)                DATATYPE                     COUNT
+ **************************************************************************************/
+    /* common between HAL1 and HAL3 */
+    INCLUDE(CAM_INTF_META_HISTOGRAM,                    cam_hist_stats_t,               1);
+    INCLUDE(CAM_INTF_META_FACE_DETECTION,               cam_face_detection_data_t,      1);
+    INCLUDE(CAM_INTF_META_FACE_RECOG,                   cam_face_recog_data_t,          1);
+    INCLUDE(CAM_INTF_META_FACE_BLINK,                   cam_face_blink_data_t,          1);
+    INCLUDE(CAM_INTF_META_FACE_GAZE,                    cam_face_gaze_data_t,           1);
+    INCLUDE(CAM_INTF_META_FACE_SMILE,                   cam_face_smile_data_t,          1);
+    INCLUDE(CAM_INTF_META_FACE_LANDMARK,                cam_face_landmarks_data_t,      1);
+    INCLUDE(CAM_INTF_META_FACE_CONTOUR,                 cam_face_contour_data_t,        1);
+    INCLUDE(CAM_INTF_META_AUTOFOCUS_DATA,               cam_auto_focus_data_t,          1);
+    INCLUDE(CAM_INTF_META_CDS_DATA,                     cam_cds_data_t,                 1);
+    INCLUDE(CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,           uint32_t,                       1);
+
+    /* Specific to HAl1 */
+    INCLUDE(CAM_INTF_META_CROP_DATA,                    cam_crop_data_t,                1);
+    INCLUDE(CAM_INTF_META_PREP_SNAPSHOT_DONE,           int32_t,                        1);
+    INCLUDE(CAM_INTF_META_GOOD_FRAME_IDX_RANGE,         cam_frame_idx_range_t,          1);
+    INCLUDE(CAM_INTF_META_ASD_HDR_SCENE_DATA,           cam_asd_hdr_scene_data_t,       1);
+    INCLUDE(CAM_INTF_META_ASD_SCENE_INFO,               cam_asd_decision_t,             1);
+    INCLUDE(CAM_INTF_META_CURRENT_SCENE,                cam_scene_mode_type,            1);
+    INCLUDE(CAM_INTF_META_AWB_INFO,                     cam_awb_params_t,               1);
+    INCLUDE(CAM_INTF_META_FOCUS_POSITION,               cam_focus_pos_info_t,           1);
+    INCLUDE(CAM_INTF_META_CHROMATIX_LITE_ISP,           cam_chromatix_lite_isp_t,       1);
+    INCLUDE(CAM_INTF_META_CHROMATIX_LITE_PP,            cam_chromatix_lite_pp_t,        1);
+    INCLUDE(CAM_INTF_META_CHROMATIX_LITE_AE,            cam_chromatix_lite_ae_stats_t,  1);
+    INCLUDE(CAM_INTF_META_CHROMATIX_LITE_AWB,           cam_chromatix_lite_awb_stats_t, 1);
+    INCLUDE(CAM_INTF_META_CHROMATIX_LITE_AF,            cam_chromatix_lite_af_stats_t,  1);
+    INCLUDE(CAM_INTF_META_CHROMATIX_LITE_ASD,           cam_chromatix_lite_asd_stats_t, 1);
+    INCLUDE(CAM_INTF_BUF_DIVERT_INFO,                   cam_buf_divert_info_t,          1);
+
+    /* Specific to HAL3 */
+    INCLUDE(CAM_INTF_META_FRAME_NUMBER_VALID,           int32_t,                     1);
+    INCLUDE(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID,    int32_t,                     1);
+    INCLUDE(CAM_INTF_META_FRAME_DROPPED,                cam_stream_ID_t,             1);
+    INCLUDE(CAM_INTF_META_FRAME_NUMBER,                 uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_URGENT_FRAME_NUMBER,          uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_COLOR_CORRECT_MODE,           uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_COLOR_CORRECT_TRANSFORM,      cam_color_correct_matrix_t,  1);
+    INCLUDE(CAM_INTF_META_COLOR_CORRECT_GAINS,          cam_color_correct_gains_t,   1);
+    INCLUDE(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, cam_color_correct_matrix_t,  1);
+    INCLUDE(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS,     cam_color_correct_gains_t,   1);
+    INCLUDE(CAM_INTF_META_AEC_ROI,                      cam_area_t,                  1);
+    INCLUDE(CAM_INTF_META_AEC_STATE,                    uint32_t,                    1);
+    INCLUDE(CAM_INTF_PARM_FOCUS_MODE,                   uint32_t,                    1);
+    INCLUDE(CAM_INTF_PARM_MANUAL_FOCUS_POS,             cam_manual_focus_parm_t,     1);
+    INCLUDE(CAM_INTF_META_AF_ROI,                       cam_area_t,                  1);
+    INCLUDE(CAM_INTF_META_AF_STATE,                     uint32_t,                    1);
+    INCLUDE(CAM_INTF_PARM_WHITE_BALANCE,                int32_t,                     1);
+    INCLUDE(CAM_INTF_META_AWB_REGIONS,                  cam_area_t,                  1);
+    INCLUDE(CAM_INTF_META_AWB_STATE,                    uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_BLACK_LEVEL_LOCK,             uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_MODE,                         uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_EDGE_MODE,                    cam_edge_application_t,      1);
+    INCLUDE(CAM_INTF_META_FLASH_POWER,                  uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_FLASH_FIRING_TIME,            int64_t,                     1);
+    INCLUDE(CAM_INTF_META_FLASH_MODE,                   uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_FLASH_STATE,                  int32_t,                     1);
+    INCLUDE(CAM_INTF_META_HOTPIXEL_MODE,                uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_LENS_APERTURE,                float,                       1);
+    INCLUDE(CAM_INTF_META_LENS_FILTERDENSITY,           float,                       1);
+    INCLUDE(CAM_INTF_META_LENS_FOCAL_LENGTH,            float,                       1);
+    INCLUDE(CAM_INTF_META_LENS_FOCUS_DISTANCE,          float,                       1);
+    INCLUDE(CAM_INTF_META_LENS_FOCUS_RANGE,             float,                       2);
+    INCLUDE(CAM_INTF_META_LENS_STATE,                   cam_af_lens_state_t,         1);
+    INCLUDE(CAM_INTF_META_LENS_OPT_STAB_MODE,           uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_VIDEO_STAB_MODE,              uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_LENS_FOCUS_STATE,             uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_NOISE_REDUCTION_MODE,         uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_NOISE_REDUCTION_STRENGTH,     uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_SCALER_CROP_REGION,           cam_crop_region_t,           1);
+    INCLUDE(CAM_INTF_META_SCENE_FLICKER,                uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_SENSOR_EXPOSURE_TIME,         int64_t,                     1);
+    INCLUDE(CAM_INTF_META_SENSOR_FRAME_DURATION,        int64_t,                     1);
+    INCLUDE(CAM_INTF_META_SENSOR_SENSITIVITY,           int32_t,                     1);
+    INCLUDE(CAM_INTF_META_ISP_SENSITIVITY ,             int32_t,                     1);
+    INCLUDE(CAM_INTF_META_SENSOR_TIMESTAMP,             int64_t,                     1);
+    INCLUDE(CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW,  int64_t,                     1);
+    INCLUDE(CAM_INTF_META_SHADING_MODE,                 uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_STATS_FACEDETECT_MODE,        uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_STATS_HISTOGRAM_MODE,         uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,     uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_STATS_SHARPNESS_MAP,          cam_sharpness_map_t,         3);
+    INCLUDE(CAM_INTF_META_TONEMAP_CURVES,               cam_rgb_tonemap_curves,      1);
+    INCLUDE(CAM_INTF_META_LENS_SHADING_MAP,             cam_lens_shading_map_t,      1);
+    INCLUDE(CAM_INTF_META_AEC_INFO,                     cam_3a_params_t,             1);
+    INCLUDE(CAM_INTF_META_SENSOR_INFO,                  cam_sensor_params_t,         1);
+    INCLUDE(CAM_INTF_META_EXIF_DEBUG_AE,                cam_ae_exif_debug_t,         1);
+    INCLUDE(CAM_INTF_META_EXIF_DEBUG_AWB,               cam_awb_exif_debug_t,        1);
+    INCLUDE(CAM_INTF_META_EXIF_DEBUG_AF,                cam_af_exif_debug_t,         1);
+    INCLUDE(CAM_INTF_META_EXIF_DEBUG_ASD,               cam_asd_exif_debug_t,        1);
+    INCLUDE(CAM_INTF_META_EXIF_DEBUG_STATS,             cam_stats_buffer_exif_debug_t,   1);
+    INCLUDE(CAM_INTF_META_EXIF_DEBUG_BESTATS,           cam_bestats_buffer_exif_debug_t, 1);
+    INCLUDE(CAM_INTF_META_EXIF_DEBUG_BHIST,             cam_bhist_buffer_exif_debug_t,   1);
+    INCLUDE(CAM_INTF_META_EXIF_DEBUG_3A_TUNING,         cam_q3a_tuning_info_t,       1);
+    INCLUDE(CAM_INTF_META_ASD_SCENE_CAPTURE_TYPE,       cam_auto_scene_t,            1);
+    INCLUDE(CAM_INTF_PARM_EFFECT,                       uint32_t,                    1);
+    /* Defining as int32_t so that this array is 4 byte aligned */
+    INCLUDE(CAM_INTF_META_PRIVATE_DATA,                 int32_t,
+            MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / 4);
+
+    /* Following are Params only and not metadata currently */
+    INCLUDE(CAM_INTF_PARM_HAL_VERSION,                  int32_t,                     1);
+    /* Shared between HAL1 and HAL3 */
+    INCLUDE(CAM_INTF_PARM_ANTIBANDING,                  uint32_t,                    1);
+    INCLUDE(CAM_INTF_PARM_EXPOSURE_COMPENSATION,        int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_EV_STEP,                      cam_rational_type_t,         1);
+    INCLUDE(CAM_INTF_PARM_AEC_LOCK,                     uint32_t,                    1);
+    INCLUDE(CAM_INTF_PARM_FPS_RANGE,                    cam_fps_range_t,             1);
+    INCLUDE(CAM_INTF_PARM_AWB_LOCK,                     uint32_t,                    1);
+    INCLUDE(CAM_INTF_PARM_BESTSHOT_MODE,                uint32_t,                    1);
+    INCLUDE(CAM_INTF_PARM_DIS_ENABLE,                   int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_LED_MODE,                     int32_t,                     1);
+    INCLUDE(CAM_INTF_META_LED_MODE_OVERRIDE,            uint32_t,                    1);
+
+    /* dual camera specific params */
+    INCLUDE(CAM_INTF_PARM_RELATED_SENSORS_CALIBRATION,  cam_related_system_calibration_data_t, 1);
+    INCLUDE(CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,        cam_focal_length_ratio_t, 1);
+    INCLUDE(CAM_INTF_META_SNAP_CROP_INFO_SENSOR,        cam_stream_crop_info_t,   1);
+    INCLUDE(CAM_INTF_META_SNAP_CROP_INFO_CAMIF,         cam_stream_crop_info_t,   1);
+    INCLUDE(CAM_INTF_META_SNAP_CROP_INFO_ISP,           cam_stream_crop_info_t,   1);
+    INCLUDE(CAM_INTF_META_SNAP_CROP_INFO_CPP,           cam_stream_crop_info_t,   1);
+    INCLUDE(CAM_INTF_META_DCRF,                         cam_dcrf_result_t,        1);
+
+    /* HAL1 specific */
+    /* read only */
+    INCLUDE(CAM_INTF_PARM_QUERY_FLASH4SNAP,             int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_EXPOSURE,                     int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_SHARPNESS,                    int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_CONTRAST,                     int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_SATURATION,                   int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_BRIGHTNESS,                   int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_ISO,                          cam_intf_parm_manual_3a_t,   1);
+    INCLUDE(CAM_INTF_PARM_EXPOSURE_TIME,                cam_intf_parm_manual_3a_t,   1);
+    INCLUDE(CAM_INTF_PARM_ZOOM,                         int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_ROLLOFF,                      int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_MODE,                         int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_AEC_ALGO_TYPE,                int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_FOCUS_ALGO_TYPE,              int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_AEC_ROI,                      cam_set_aec_roi_t,           1);
+    INCLUDE(CAM_INTF_PARM_AF_ROI,                       cam_roi_info_t,              1);
+    INCLUDE(CAM_INTF_PARM_SCE_FACTOR,                   int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_FD,                           cam_fd_set_parm_t,           1);
+    INCLUDE(CAM_INTF_PARM_MCE,                          int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_HFR,                          int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_REDEYE_REDUCTION,             int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_WAVELET_DENOISE,              cam_denoise_param_t,         1);
+    INCLUDE(CAM_INTF_PARM_TEMPORAL_DENOISE,             cam_denoise_param_t,         1);
+    INCLUDE(CAM_INTF_PARM_HISTOGRAM,                    int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_ASD_ENABLE,                   int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_RECORDING_HINT,               int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_HDR,                          cam_exp_bracketing_t,        1);
+    INCLUDE(CAM_INTF_PARM_FRAMESKIP,                    int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_ZSL_MODE,                     int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_HDR_NEED_1X,                  int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_LOCK_CAF,                     int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_VIDEO_HDR,                    int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_SENSOR_HDR,                   cam_sensor_hdr_type_t,       1);
+    INCLUDE(CAM_INTF_PARM_VT,                           int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_SET_AUTOFOCUSTUNING,          tune_actuator_t,             1);
+    INCLUDE(CAM_INTF_PARM_SET_VFE_COMMAND,              tune_cmd_t,                  1);
+    INCLUDE(CAM_INTF_PARM_SET_PP_COMMAND,               tune_cmd_t,                  1);
+    INCLUDE(CAM_INTF_PARM_MAX_DIMENSION,                cam_dimension_t,             1);
+    INCLUDE(CAM_INTF_PARM_RAW_DIMENSION,                cam_dimension_t,             1);
+    INCLUDE(CAM_INTF_PARM_TINTLESS,                     int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_WB_MANUAL,                    cam_manual_wb_parm_t,        1);
+    INCLUDE(CAM_INTF_PARM_CDS_MODE,                     int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_EZTUNE_CMD,                   cam_eztune_cmd_data_t,       1);
+    INCLUDE(CAM_INTF_PARM_INT_EVT,                      cam_int_evt_params_t,        1);
+    INCLUDE(CAM_INTF_PARM_RDI_MODE,                     int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_BURST_NUM,                    uint32_t,                    1);
+    INCLUDE(CAM_INTF_PARM_RETRO_BURST_NUM,              uint32_t,                    1);
+    INCLUDE(CAM_INTF_PARM_BURST_LED_ON_PERIOD,          uint32_t,                    1);
+    INCLUDE(CAM_INTF_PARM_LONGSHOT_ENABLE,              int8_t,                      1);
+    INCLUDE(CAM_INTF_PARM_TONE_MAP_MODE,                uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_TOUCH_AE_RESULT,              int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_DUAL_LED_CALIBRATION,         int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_ADV_CAPTURE_MODE,             uint8_t,                     1);
+
+    /* HAL3 specific */
+    INCLUDE(CAM_INTF_META_STREAM_INFO,                  cam_stream_size_info_t,      1);
+    INCLUDE(CAM_INTF_META_AEC_MODE,                     uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,       cam_trigger_t,               1);
+    INCLUDE(CAM_INTF_META_AF_TRIGGER,                   cam_trigger_t,               1);
+    INCLUDE(CAM_INTF_META_CAPTURE_INTENT,               uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_DEMOSAIC,                     int32_t,                     1);
+    INCLUDE(CAM_INTF_META_SHARPNESS_STRENGTH,           int32_t,                     1);
+    INCLUDE(CAM_INTF_META_GEOMETRIC_MODE,               uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_GEOMETRIC_STRENGTH,           uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_LENS_SHADING_MAP_MODE,        uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_SHADING_STRENGTH,             uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_TONEMAP_MODE,                 uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_STREAM_ID,                    cam_stream_ID_t,             1);
+    INCLUDE(CAM_INTF_PARM_STATS_DEBUG_MASK,             uint32_t,                    1);
+    INCLUDE(CAM_INTF_PARM_STATS_AF_PAAF,                uint32_t,                    1);
+    INCLUDE(CAM_INTF_PARM_FOCUS_BRACKETING,             cam_af_bracketing_t,         1);
+    INCLUDE(CAM_INTF_PARM_FLASH_BRACKETING,             cam_flash_bracketing_t,      1);
+    INCLUDE(CAM_INTF_META_JPEG_GPS_COORDINATES,         double,                      3);
+    INCLUDE(CAM_INTF_META_JPEG_GPS_PROC_METHODS,        uint8_t,                     GPS_PROCESSING_METHOD_SIZE);
+    INCLUDE(CAM_INTF_META_JPEG_GPS_TIMESTAMP,           int64_t,                     1);
+    INCLUDE(CAM_INTF_META_JPEG_ORIENTATION,             int32_t,                     1);
+    INCLUDE(CAM_INTF_META_JPEG_QUALITY,                 uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_JPEG_THUMB_QUALITY,           uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_JPEG_THUMB_SIZE,              cam_dimension_t,             1);
+    INCLUDE(CAM_INTF_META_TEST_PATTERN_DATA,            cam_test_pattern_data_t,     1);
+    INCLUDE(CAM_INTF_META_PROFILE_TONE_CURVE,           cam_profile_tone_curve,      1);
+    INCLUDE(CAM_INTF_META_OTP_WB_GRGB,                  float,                       1);
+    INCLUDE(CAM_INTF_META_IMG_HYST_INFO,                cam_img_hysterisis_info_t,   1);
+    INCLUDE(CAM_INTF_META_CAC_INFO,                     cam_cac_info_t,              1);
+    INCLUDE(CAM_INTF_PARM_CAC,                          cam_aberration_mode_t,       1);
+    INCLUDE(CAM_INTF_META_NEUTRAL_COL_POINT,            cam_neutral_col_point_t,     1);
+    INCLUDE(CAM_INTF_PARM_ROTATION,                     cam_rotation_info_t,         1);
+    INCLUDE(CAM_INTF_PARM_HW_DATA_OVERWRITE,            cam_hw_data_overwrite_t,     1);
+    INCLUDE(CAM_INTF_META_IMGLIB,                       cam_intf_meta_imglib_t,      1);
+    INCLUDE(CAM_INTF_PARM_CAPTURE_FRAME_CONFIG,         cam_capture_frame_config_t,  1);
+    INCLUDE(CAM_INTF_PARM_CUSTOM,                       custom_parm_buffer_t,        1);
+    INCLUDE(CAM_INTF_PARM_FLIP,                         int32_t,                     1);
+    INCLUDE(CAM_INTF_META_USE_AV_TIMER,                 uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,    float,                       1);
+    INCLUDE(CAM_INTF_META_LDAF_EXIF,                    uint32_t,                    2);
+    INCLUDE(CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN,   cam_black_level_metadata_t,  1);
+    INCLUDE(CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN,  cam_black_level_metadata_t,  1);
+    INCLUDE(CAM_INTF_META_LOW_LIGHT,                    cam_low_light_mode_t,        1);
+    INCLUDE(CAM_INTF_META_IMG_DYN_FEAT,                 cam_dyn_img_data_t,          1);
+    INCLUDE(CAM_INTF_PARM_MANUAL_CAPTURE_TYPE,          cam_manual_capture_type,     1);
+    INCLUDE(CAM_INTF_AF_STATE_TRANSITION,               uint8_t,                     1);
+    INCLUDE(CAM_INTF_PARM_INITIAL_EXPOSURE_INDEX,       uint32_t,                    1);
+    INCLUDE(CAM_INTF_PARM_INSTANT_AEC,                  uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_REPROCESS_FLAGS,              uint8_t,                     1);
+    INCLUDE(CAM_INTF_PARM_JPEG_ENCODE_CROP,             cam_stream_crop_info_t,      1);
+} metadata_data_t;
+
+/* Update clear_metadata_buffer() function when a new is_xxx_valid is added to
+ * or removed from this structure */
+typedef struct {
+    union{
+        /* Hash table of 'is valid' flags */
+        uint8_t         is_valid[CAM_INTF_PARM_MAX];
+
+        /* Hash table of 'is required' flags for the GET PARAM */
+        uint8_t         is_reqd[CAM_INTF_PARM_MAX];
+    };
+    metadata_data_t data;
+    /*Tuning Data */
+    uint8_t is_tuning_params_valid;
+    tuning_params_t tuning_params;
+
+    /* Mobicat Params */
+    uint8_t is_mobicat_aec_params_valid;
+    cam_3a_params_t mobicat_aec_params;
+
+    /* Stats 3A Debug Params */
+    uint8_t is_statsdebug_ae_params_valid;
+    cam_ae_exif_debug_t statsdebug_ae_data;
+
+    uint8_t is_statsdebug_awb_params_valid;
+    cam_awb_exif_debug_t statsdebug_awb_data;
+
+    uint8_t is_statsdebug_af_params_valid;
+    cam_af_exif_debug_t statsdebug_af_data;
+
+    uint8_t is_statsdebug_asd_params_valid;
+    cam_asd_exif_debug_t statsdebug_asd_data;
+
+    uint8_t is_statsdebug_stats_params_valid;
+    cam_stats_buffer_exif_debug_t statsdebug_stats_buffer_data;
+
+    uint8_t is_statsdebug_bestats_params_valid;
+    cam_bestats_buffer_exif_debug_t statsdebug_bestats_buffer_data;
+
+    uint8_t is_statsdebug_bhist_params_valid;
+    cam_bhist_buffer_exif_debug_t statsdebug_bhist_data;
+
+    uint8_t is_statsdebug_3a_tuning_params_valid;
+    cam_q3a_tuning_info_t statsdebug_3a_tuning_data;
+
+} metadata_buffer_t;
+
+typedef metadata_buffer_t parm_buffer_t;
+
+#ifdef  __cplusplus
+extern "C" {
+#endif
+
+/* Update this inline function when a new is_xxx_valid is added to
+ * or removed from metadata_buffer_t */
+static inline void clear_metadata_buffer(metadata_buffer_t *meta)
+{
+    if (meta) {
+      memset(meta->is_valid, 0, CAM_INTF_PARM_MAX);
+      meta->is_tuning_params_valid = 0;
+      meta->is_mobicat_aec_params_valid = 0;
+      meta->is_statsdebug_ae_params_valid = 0;
+      meta->is_statsdebug_awb_params_valid = 0;
+      meta->is_statsdebug_af_params_valid = 0;
+      meta->is_statsdebug_asd_params_valid = 0;
+      meta->is_statsdebug_stats_params_valid = 0;
+      meta->is_statsdebug_bestats_params_valid = 0;
+      meta->is_statsdebug_bhist_params_valid = 0;
+      meta->is_statsdebug_3a_tuning_params_valid = 0;
+    }
+}
+
+#ifdef  __cplusplus
+}
+#endif
+
+#endif /* __QCAMERA_INTF_H__ */
diff --git a/msmcobalt/QCamera2/stack/common/cam_list.h b/msmcobalt/QCamera2/stack/common/cam_list.h
new file mode 100644
index 0000000..a165262
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/common/cam_list.h
@@ -0,0 +1,85 @@
+/* Copyright (c) 2012, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+/* This file is a slave copy from /vendor/qcom/propreitary/mm-cammera/common,
+ * Please do not modify it directly here. */
+
+#ifndef __CAMLIST_H
+#define __CAMLIST_H
+
+#include <stddef.h>
+// System dependency
+#include <stdlib.h>
+
+#define member_of(ptr, type, member) ({ \
+  const typeof(((type *)0)->member) *__mptr = (ptr); \
+  (type *)((char *)__mptr - offsetof(type,member));})
+
+struct cam_list {
+  struct cam_list *next, *prev;
+};
+
+static inline void cam_list_init(struct cam_list *ptr)
+{
+  ptr->next = ptr;
+  ptr->prev = ptr;
+}
+
+static inline void cam_list_add_tail_node(struct cam_list *item,
+  struct cam_list *head)
+{
+  struct cam_list *prev = head->prev;
+
+  head->prev = item;
+  item->next = head;
+  item->prev = prev;
+  prev->next = item;
+}
+
+static inline void cam_list_insert_before_node(struct cam_list *item,
+  struct cam_list *node)
+{
+  item->next = node;
+  item->prev = node->prev;
+  item->prev->next = item;
+  node->prev = item;
+}
+
+static inline void cam_list_del_node(struct cam_list *ptr)
+{
+  struct cam_list *prev = ptr->prev;
+  struct cam_list *next = ptr->next;
+
+  next->prev = ptr->prev;
+  prev->next = ptr->next;
+  ptr->next = ptr;
+  ptr->prev = ptr;
+}
+
+#endif /* __CAMLIST_H */
diff --git a/msmcobalt/QCamera2/stack/common/cam_queue.h b/msmcobalt/QCamera2/stack/common/cam_queue.h
new file mode 100644
index 0000000..fbb5f63
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/common/cam_queue.h
@@ -0,0 +1,134 @@
+/* Copyright (c) 2012, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <pthread.h>
+
+// Camera dependencies
+#include "cam_list.h"
+
+typedef struct {
+    struct cam_list list;
+    void *data;
+} cam_node_t;
+
+typedef struct {
+    cam_node_t head; /* dummy head */
+    uint32_t size;
+    pthread_mutex_t lock;
+} cam_queue_t;
+
+static inline int32_t cam_queue_init(cam_queue_t *queue)
+{
+    pthread_mutex_init(&queue->lock, NULL);
+    cam_list_init(&queue->head.list);
+    queue->size = 0;
+    return 0;
+}
+
+static inline int32_t cam_queue_enq(cam_queue_t *queue, void *data)
+{
+    cam_node_t *node =
+        (cam_node_t *)malloc(sizeof(cam_node_t));
+    if (NULL == node) {
+        return -1;
+    }
+
+    memset(node, 0, sizeof(cam_node_t));
+    node->data = data;
+
+    pthread_mutex_lock(&queue->lock);
+    cam_list_add_tail_node(&node->list, &queue->head.list);
+    queue->size++;
+    pthread_mutex_unlock(&queue->lock);
+
+    return 0;
+}
+
+static inline void *cam_queue_deq(cam_queue_t *queue)
+{
+    cam_node_t *node = NULL;
+    void *data = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    pthread_mutex_lock(&queue->lock);
+    head = &queue->head.list;
+    pos = head->next;
+    if (pos != head) {
+        node = member_of(pos, cam_node_t, list);
+        cam_list_del_node(&node->list);
+        queue->size--;
+    }
+    pthread_mutex_unlock(&queue->lock);
+
+    if (NULL != node) {
+        data = node->data;
+        free(node);
+    }
+
+    return data;
+}
+
+static inline int32_t cam_queue_flush(cam_queue_t *queue)
+{
+    cam_node_t *node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    pthread_mutex_lock(&queue->lock);
+    head = &queue->head.list;
+    pos = head->next;
+
+    while(pos != head) {
+        node = member_of(pos, cam_node_t, list);
+        pos = pos->next;
+        cam_list_del_node(&node->list);
+        queue->size--;
+
+        /* TODO later to consider ptr inside data */
+        /* for now we only assume there is no ptr inside data
+         * so we free data directly */
+        if (NULL != node->data) {
+            free(node->data);
+        }
+        free(node);
+
+    }
+    queue->size = 0;
+    pthread_mutex_unlock(&queue->lock);
+    return 0;
+}
+
+static inline int32_t cam_queue_deinit(cam_queue_t *queue)
+{
+    cam_queue_flush(queue);
+    pthread_mutex_destroy(&queue->lock);
+    return 0;
+}
diff --git a/msmcobalt/QCamera2/stack/common/cam_semaphore.h b/msmcobalt/QCamera2/stack/common/cam_semaphore.h
new file mode 100644
index 0000000..a35634c
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/common/cam_semaphore.h
@@ -0,0 +1,88 @@
+/* Copyright (c) 2012, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_SEMAPHORE_H__
+#define __QCAMERA_SEMAPHORE_H__
+
+// System dependencies
+#include <pthread.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* Implement semaphore with mutex and conditional variable.
+ * Reason being, POSIX semaphore on Android are not used or
+ * well tested.
+ */
+
+typedef struct {
+    int val;
+    pthread_mutex_t mutex;
+    pthread_cond_t cond;
+} cam_semaphore_t;
+
+static inline void cam_sem_init(cam_semaphore_t *s, int n)
+{
+    pthread_mutex_init(&(s->mutex), NULL);
+    pthread_cond_init(&(s->cond), NULL);
+    s->val = n;
+}
+
+static inline void cam_sem_post(cam_semaphore_t *s)
+{
+    pthread_mutex_lock(&(s->mutex));
+    s->val++;
+    pthread_cond_signal(&(s->cond));
+    pthread_mutex_unlock(&(s->mutex));
+}
+
+static inline int cam_sem_wait(cam_semaphore_t *s)
+{
+    int rc = 0;
+    pthread_mutex_lock(&(s->mutex));
+    while (s->val == 0)
+        rc = pthread_cond_wait(&(s->cond), &(s->mutex));
+    s->val--;
+    pthread_mutex_unlock(&(s->mutex));
+    return rc;
+}
+
+static inline void cam_sem_destroy(cam_semaphore_t *s)
+{
+    pthread_mutex_destroy(&(s->mutex));
+    pthread_cond_destroy(&(s->cond));
+    s->val = 0;
+}
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* __QCAMERA_SEMAPHORE_H__ */
diff --git a/msmcobalt/QCamera2/stack/common/cam_types.h b/msmcobalt/QCamera2/stack/common/cam_types.h
new file mode 100644
index 0000000..0bd0fa5
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/common/cam_types.h
@@ -0,0 +1,2760 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_TYPES_H__
+#define __QCAMERA_TYPES_H__
+
+// System dependencies
+#include <stdint.h>
+#include <media/msmb_camera.h>
+
+#define CAM_MAX_NUM_BUFS_PER_STREAM 64
+#define MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES 8096
+#define AWB_DEBUG_DATA_SIZE               (45000)
+#define AEC_DEBUG_DATA_SIZE               (5000)
+#define AF_DEBUG_DATA_SIZE                (50000)
+#define ASD_DEBUG_DATA_SIZE               (100)
+#define STATS_BUFFER_DEBUG_DATA_SIZE      (75000)
+#define BESTATS_BUFFER_DEBUG_DATA_SIZE    (150000)
+#define BHIST_STATS_DEBUG_DATA_SIZE       (70000)
+#define TUNING_INFO_DEBUG_DATA_SIZE       (4)
+
+#define CEILING64(X) (((X) + 0x0003F) & 0xFFFFFFC0)
+#define CEILING32(X) (((X) + 0x0001F) & 0xFFFFFFE0)
+#define CEILING16(X) (((X) + 0x000F) & 0xFFF0)
+#define CEILING4(X)  (((X) + 0x0003) & 0xFFFC)
+#define CEILING2(X)  (((X) + 0x0001) & 0xFFFE)
+
+#define MAX_ZOOMS_CNT 91
+#define MAX_SIZES_CNT 40
+#define MAX_EXP_BRACKETING_LENGTH 32
+#define MAX_ROI 10
+#define MAX_STREAM_NUM_IN_BUNDLE 8
+#define MAX_NUM_STREAMS          8
+#define CHROMATIX_SIZE 60000
+#define COMMONCHROMATIX_SIZE 45000
+#define CPPCHROMATIX_SIZE 36000
+#define SWPOSTPROCCHROMATIX_SIZE 36000
+#define AFTUNE_SIZE  32768
+#define A3CHROMATIX_SIZE 30000
+#define MAX_SCALE_SIZES_CNT 8
+#define MAX_SAMP_DECISION_CNT     64
+#define SENSOR_PHYSICAL_SIZE_CNT  2
+#define EXPOSURE_TIME_RANGE_CNT   2
+#define BLACK_LEVEL_PATTERN_CNT   4
+#define FORWARD_MATRIX_COLS       3
+#define FORWARD_MATRIX_ROWS       3
+#define COLOR_TRANSFORM_COLS      3
+#define COLOR_TRANSFORM_ROWS      3
+#define CAL_TRANSFORM_COLS        3
+#define CAL_TRANSFORM_ROWS        3
+
+#define MAX_ISP_DATA_SIZE (20*1024)
+#define MAX_PP_DATA_SIZE  16384
+#define MAX_AE_STATS_DATA_SIZE  1000
+#define MAX_AWB_STATS_DATA_SIZE 1000
+#define MAX_AF_STATS_DATA_SIZE  1000
+#define MAX_ASD_STATS_DATA_SIZE 1000
+
+#define MAX_CAPTURE_BATCH_NUM 32
+
+#define TUNING_DATA_VERSION        6
+#define TUNING_SENSOR_DATA_MAX     0x10000 /*(need value from sensor team)*/
+#define TUNING_VFE_DATA_MAX        0x10000 /*(need value from vfe team)*/
+#define TUNING_CPP_DATA_MAX        0x10000 /*(need value from pproc team)*/
+#define TUNING_CAC_DATA_MAX        0x10000 /*(need value from imglib team)*/
+#define TUNING_DATA_MAX            (TUNING_SENSOR_DATA_MAX + \
+                                   TUNING_VFE_DATA_MAX + TUNING_CPP_DATA_MAX + \
+                                   TUNING_CAC_DATA_MAX)
+
+#define TUNING_SENSOR_DATA_OFFSET  0
+#define TUNING_VFE_DATA_OFFSET     TUNING_SENSOR_DATA_MAX
+#define TUNING_CPP_DATA_OFFSET     (TUNING_SENSOR_DATA_MAX + TUNING_VFE_DATA_MAX)
+#define TUNING_CAC_DATA_OFFSET     (TUNING_SENSOR_DATA_MAX + \
+                                   TUNING_VFE_DATA_MAX + TUNING_CPP_DATA_MAX)
+#define MAX_STATS_DATA_SIZE 4000
+
+#define MAX_AF_BRACKETING_VALUES 5
+#define MAX_TEST_PATTERN_CNT     8
+
+#define GPS_PROCESSING_METHOD_SIZE 33
+#define EXIF_IMAGE_DESCRIPTION_SIZE 100
+
+#define MAX_INFLIGHT_REQUESTS  6
+#define MAX_INFLIGHT_BLOB      3
+#define MIN_INFLIGHT_REQUESTS  3
+#define MAX_INFLIGHT_REPROCESS_REQUESTS 1
+#define MAX_INFLIGHT_HFR_REQUESTS (48)
+#define MIN_INFLIGHT_HFR_REQUESTS (40)
+
+#define QCAMERA_DUMP_FRM_LOCATION "/data/misc/camera/"
+#define QCAMERA_MAX_FILEPATH_LENGTH 64
+
+#define LIKELY(x)       __builtin_expect((x), true)
+#define UNLIKELY(x)     __builtin_expect((x), false)
+
+#define RELCAM_CALIB_ROT_MATRIX_MAX 9
+#define RELCAM_CALIB_SURFACE_PARMS_MAX 32
+#define RELCAM_CALIB_RESERVED_MAX 50
+
+#define MAX_NUM_CAMERA_PER_BUNDLE    2 /* Max number of cameras per bundle */
+#define EXTRA_FRAME_SYNC_BUFFERS     4 /* Extra frame sync buffers in dc mode*/
+#define MM_CAMERA_FRAME_SYNC_NODES   EXTRA_FRAME_SYNC_BUFFERS
+
+#define MAX_REPROCESS_STALL 2
+
+#define QCAMERA_MAX_FILEPATH_LENGTH 64
+
+#define MAX_EEPROM_VERSION_INFO_LEN 32
+
+#define MAX_OPTICAL_BLACK_REGIONS 5
+
+/*reprocess pipeline stages are pproc and jpeg */
+#define MAX_REPROCESS_PIPELINE_STAGES 2
+
+/* Defines the number of rows in the color correction matrix (CCM) */
+#define AWB_NUM_CCM_ROWS (3)
+
+/* Defines the number of columns in the color correction matrix (CCM) */
+#define AWB_NUM_CCM_COLS (3)
+
+typedef uint64_t cam_feature_mask_t;
+
+typedef enum {
+    CAM_HAL_V1 = 1,
+    CAM_HAL_V3 = 3
+} cam_hal_version_t;
+
+ typedef enum {
+    CAM_STATUS_INVALID_PARM  = -4, /* Inavlid parameter provided */
+    CAM_STATUS_NOT_SUPPORTED = -3, /* Parameter/operation not supported */
+    CAM_STATUS_BUSY          = -2, /* operation busy */
+    CAM_STATUS_FAILED        = -1, /* Failure in doing operation */
+    CAM_STATUS_SUCCESS       =  0, /* Operation Succeded */
+    CAM_STATUS_ACCEPTED      =  1, /* Parameter accepted */
+    CAM_STATUS_MAX           =  2,
+} cam_status_t;
+
+typedef enum {
+    /* back main camera */
+    CAM_POSITION_BACK,
+    /* front main camera */
+    CAM_POSITION_FRONT,
+    /* back aux camera */
+    CAM_POSITION_BACK_AUX,
+    /* front aux camera */
+    CAM_POSITION_FRONT_AUX
+} cam_position_t;
+
+typedef enum {
+    CAM_FLICKER_NONE,
+    CAM_FLICKER_50_HZ,
+    CAM_FLICKER_60_HZ
+} cam_flicker_t;
+
+typedef enum {
+    CAM_FORMAT_JPEG = 0,
+    CAM_FORMAT_YUV_420_NV12 = 1,
+    CAM_FORMAT_YUV_420_NV21,
+    CAM_FORMAT_YUV_420_NV21_ADRENO,
+    CAM_FORMAT_YUV_420_YV12,
+    CAM_FORMAT_YUV_422_NV16,
+    CAM_FORMAT_YUV_422_NV61,
+    CAM_FORMAT_YUV_420_NV12_VENUS,
+    /* Note: For all raw formats, each scanline needs to be 16 bytes aligned */
+
+    /* Packed YUV/YVU raw format, 16 bpp: 8 bits Y and 8 bits UV.
+     * U and V are interleaved with Y: YUYV or YVYV */
+    CAM_FORMAT_YUV_RAW_8BIT_YUYV,
+    CAM_FORMAT_YUV_RAW_8BIT_YVYU,
+    CAM_FORMAT_YUV_RAW_8BIT_UYVY, //10
+    CAM_FORMAT_YUV_RAW_8BIT_VYUY,
+
+    /* QCOM RAW formats where data is packed into 64bit word.
+     * 8BPP: 1 64-bit word contains 8 pixels p0 - p7, where p0 is
+     *       stored at LSB.
+     * 10BPP: 1 64-bit word contains 6 pixels p0 - p5, where most
+     *       significant 4 bits are set to 0. P0 is stored at LSB.
+     * 12BPP: 1 64-bit word contains 5 pixels p0 - p4, where most
+     *       significant 4 bits are set to 0. P0 is stored at LSB. */
+    CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG,
+    CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GRBG,
+    CAM_FORMAT_BAYER_QCOM_RAW_8BPP_RGGB,
+    CAM_FORMAT_BAYER_QCOM_RAW_8BPP_BGGR,
+    CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG,
+    CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GRBG,
+    CAM_FORMAT_BAYER_QCOM_RAW_10BPP_RGGB,
+    CAM_FORMAT_BAYER_QCOM_RAW_10BPP_BGGR,
+    CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG,  //20
+    CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GRBG,
+    CAM_FORMAT_BAYER_QCOM_RAW_12BPP_RGGB,
+    CAM_FORMAT_BAYER_QCOM_RAW_12BPP_BGGR,
+    /* MIPI RAW formats based on MIPI CSI-2 specifiction.
+     * 8BPP: Each pixel occupies one bytes, starting at LSB.
+     *       Output with of image has no restrictons.
+     * 10BPP: Four pixels are held in every 5 bytes. The output
+     *       with of image must be a multiple of 4 pixels.
+     * 12BPP: Two pixels are held in every 3 bytes. The output
+     *       width of image must be a multiple of 2 pixels. */
+    CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG,
+    CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GRBG,
+    CAM_FORMAT_BAYER_MIPI_RAW_8BPP_RGGB,
+    CAM_FORMAT_BAYER_MIPI_RAW_8BPP_BGGR,
+    CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG,
+    CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GRBG,
+    CAM_FORMAT_BAYER_MIPI_RAW_10BPP_RGGB, //30
+    CAM_FORMAT_BAYER_MIPI_RAW_10BPP_BGGR,
+    CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG,
+    CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GRBG,
+    CAM_FORMAT_BAYER_MIPI_RAW_12BPP_RGGB,
+    CAM_FORMAT_BAYER_MIPI_RAW_12BPP_BGGR,
+    /* Ideal raw formats where image data has gone through black
+     * correction, lens rolloff, demux/channel gain, bad pixel
+     * correction, and ABF.
+     * Ideal raw formats could output any of QCOM_RAW and MIPI_RAW
+     * formats, plus plain8 8bbp, plain16 800, plain16 10bpp, and
+     * plain 16 12bpp */
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GBRG, //40
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_RGGB, //50
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GBRG, //60
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_RGGB, //70
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_BGGR,
+
+    /* generic 8-bit raw */
+    CAM_FORMAT_JPEG_RAW_8BIT,
+    CAM_FORMAT_META_RAW_8BIT,
+
+    /* QCOM RAW formats where data is packed into 64bit word.
+     * 14BPP: 1 64-bit word contains 4 pixels p0 - p3, where most
+     *       significant 4 bits are set to 0. P0 is stored at LSB.
+     */
+    CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GBRG,
+    CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GRBG,
+    CAM_FORMAT_BAYER_QCOM_RAW_14BPP_RGGB, //80
+    CAM_FORMAT_BAYER_QCOM_RAW_14BPP_BGGR,
+    /* MIPI RAW formats based on MIPI CSI-2 specifiction.
+     * 14 BPPP: 1st byte: P0 [13:6]
+     *          2nd byte: P1 [13:6]
+     *          3rd byte: P2 [13:6]
+     *          4th byte: P3 [13:6]
+     *          5th byte: P0 [5:0]
+     *          7th byte: P1 [5:0]
+     *          8th byte: P2 [5:0]
+     *          9th byte: P3 [5:0]
+     */
+    CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GBRG,
+    CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GRBG,
+    CAM_FORMAT_BAYER_MIPI_RAW_14BPP_RGGB,
+    CAM_FORMAT_BAYER_MIPI_RAW_14BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GBRG, //90
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_BGGR,
+    /* 14BPP: 1st byte: P0 [8:0]
+     *        2nd byte: P0 [13:9]
+     *        3rd byte: P1 [8:0]
+     *        4th byte: P1 [13:9]
+     */
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_BGGR,
+
+    CAM_FORMAT_YUV_444_NV24,
+    CAM_FORMAT_YUV_444_NV42,
+
+    /* Y plane only, used for FD, 8BPP */
+    CAM_FORMAT_Y_ONLY, //100
+
+    /* UBWC format */
+    CAM_FORMAT_YUV_420_NV12_UBWC,
+
+    CAM_FORMAT_YUV_420_NV21_VENUS,
+
+    /* RGB formats */
+    CAM_FORMAT_8888_ARGB,
+
+    /* Y plane only */
+    CAM_FORMAT_Y_ONLY_10_BPP,
+    CAM_FORMAT_Y_ONLY_12_BPP,
+    CAM_FORMAT_Y_ONLY_14_BPP,
+    CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GREY,
+    CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GREY,
+    CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GREY,
+    CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GREY,
+    CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GREY,
+    CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GREY,
+    CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GREY,
+    CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GREY,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GREY,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GREY,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GREY,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GREY,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GREY,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GREY,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GREY,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GREY,
+
+    CAM_FORMAT_MAX
+} cam_format_t;
+
+typedef enum {
+    CAM_STREAM_TYPE_DEFAULT,       /* default stream type */
+    CAM_STREAM_TYPE_PREVIEW,       /* preview */
+    CAM_STREAM_TYPE_POSTVIEW,      /* postview */
+    CAM_STREAM_TYPE_SNAPSHOT,      /* snapshot */
+    CAM_STREAM_TYPE_VIDEO,         /* video */
+    CAM_STREAM_TYPE_CALLBACK,      /* app requested callback */
+    CAM_STREAM_TYPE_IMPL_DEFINED, /* opaque format: could be display, video enc, ZSL YUV */
+    CAM_STREAM_TYPE_METADATA,      /* meta data */
+    CAM_STREAM_TYPE_RAW,           /* raw dump from camif */
+    CAM_STREAM_TYPE_OFFLINE_PROC,  /* offline process */
+    CAM_STREAM_TYPE_PARM,         /* mct internal stream */
+    CAM_STREAM_TYPE_ANALYSIS,     /* analysis stream */
+    CAM_STREAM_TYPE_MAX,
+} cam_stream_type_t;
+
+typedef enum {
+    CAM_PAD_NONE = 1,
+    CAM_PAD_TO_2 = 2,
+    CAM_PAD_TO_4 = 4,
+    CAM_PAD_TO_WORD = CAM_PAD_TO_4,
+    CAM_PAD_TO_8 = 8,
+    CAM_PAD_TO_16 = 16,
+    CAM_PAD_TO_32 = 32,
+    CAM_PAD_TO_64 = 64,
+    CAM_PAD_TO_128 = 128,
+    CAM_PAD_TO_256 = 256,
+    CAM_PAD_TO_512 = 512,
+    CAM_PAD_TO_1K = 1024,
+    CAM_PAD_TO_2K = 2048,
+    CAM_PAD_TO_4K = 4096,
+    CAM_PAD_TO_8K = 8192
+} cam_pad_format_t;
+
+typedef enum {
+    /* followings are per camera */
+    CAM_MAPPING_BUF_TYPE_CAPABILITY,  /* mapping camera capability buffer */
+    CAM_MAPPING_BUF_TYPE_PARM_BUF,    /* mapping parameters buffer */
+    /* this buffer is needed for the payload to be sent with bundling related cameras cmd */
+    CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF, /* mapping sync buffer.*/
+
+    /* followings are per stream */
+    CAM_MAPPING_BUF_TYPE_STREAM_BUF,        /* mapping stream buffers */
+    CAM_MAPPING_BUF_TYPE_STREAM_INFO,       /* mapping stream information buffer */
+    CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF, /* mapping offline process input buffer */
+    CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF,  /* mapping offline meta buffer */
+    CAM_MAPPING_BUF_TYPE_MISC_BUF,          /* mapping offline miscellaneous buffer */
+    CAM_MAPPING_BUF_TYPE_STREAM_USER_BUF,   /* mapping user ptr stream buffers */
+    CAM_MAPPING_BUF_TYPE_MAX
+} cam_mapping_buf_type;
+
+typedef enum {
+    CAM_STREAM_BUF_TYPE_MPLANE,  /* Multiplanar Buffer type */
+    CAM_STREAM_BUF_TYPE_USERPTR, /* User specific structure pointer*/
+    CAM_STREAM_BUF_TYPE_MAX
+} cam_stream_buf_type;
+
+typedef struct {
+    cam_mapping_buf_type type;
+    uint32_t stream_id;   /* stream id: valid if STREAM_BUF */
+    uint32_t frame_idx;   /* frame index: valid if type is STREAM_BUF */
+    int32_t plane_idx;    /* planner index. valid if type is STREAM_BUF.
+                           * -1 means all planners shanre the same fd;
+                           * otherwise, each planner has its own fd */
+    uint32_t cookie;      /* could be job_id(uint32_t) to identify mapping job */
+    int32_t fd;           /* origin fd */
+    size_t size;          /* size of the buffer */
+    void *buffer;         /* Buffer pointer */
+} cam_buf_map_type;
+
+typedef struct {
+    uint32_t length;
+    cam_buf_map_type buf_maps[CAM_MAX_NUM_BUFS_PER_STREAM];
+} cam_buf_map_type_list;
+
+typedef struct {
+    cam_mapping_buf_type type;
+    uint32_t stream_id;   /* stream id: valid if STREAM_BUF */
+    uint32_t frame_idx;   /* frame index: valid if STREAM_BUF or HIST_BUF */
+    int32_t plane_idx;    /* planner index. valid if type is STREAM_BUF.
+                           * -1 means all planners shanre the same fd;
+                           * otherwise, each planner has its own fd */
+    uint32_t cookie;      /* could be job_id(uint32_t) to identify unmapping job */
+} cam_buf_unmap_type;
+
+typedef struct {
+    uint32_t length;
+    cam_buf_unmap_type buf_unmaps[CAM_MAX_NUM_BUFS_PER_STREAM];
+} cam_buf_unmap_type_list;
+
+typedef enum {
+    CAM_MAPPING_TYPE_FD_MAPPING,
+    CAM_MAPPING_TYPE_FD_UNMAPPING,
+    CAM_MAPPING_TYPE_FD_BUNDLED_MAPPING,
+    CAM_MAPPING_TYPE_FD_BUNDLED_UNMAPPING,
+    CAM_MAPPING_TYPE_MAX
+} cam_mapping_type;
+
+typedef struct {
+    cam_mapping_type msg_type;
+    union {
+        cam_buf_map_type buf_map;
+        cam_buf_unmap_type buf_unmap;
+        cam_buf_map_type_list buf_map_list;
+        cam_buf_unmap_type_list buf_unmap_list;
+    } payload;
+} cam_sock_packet_t;
+typedef cam_sock_packet_t cam_reg_buf_t;
+
+typedef enum {
+    CAM_MODE_2D = (1<<0),
+    CAM_MODE_3D = (1<<1)
+} cam_mode_t;
+
+typedef struct {
+    uint32_t len;
+    uint32_t y_offset;
+    uint32_t cbcr_offset;
+} cam_sp_len_offset_t;
+
+typedef struct{
+    uint32_t len;
+    uint32_t offset;
+    int32_t offset_x;
+    int32_t offset_y;
+    int32_t stride;
+    int32_t stride_in_bytes;
+    int32_t scanline;
+    int32_t width;    /* width without padding */
+    int32_t height;   /* height without padding */
+    int32_t meta_stride;   /*Meta stride*/
+    int32_t meta_scanline; /*Meta Scanline*/
+    int32_t meta_len;   /*Meta plane length including 4k padding*/
+} cam_mp_len_offset_t;
+
+typedef struct {
+    uint32_t offset_x;
+    uint32_t offset_y;
+} cam_offset_info_t;
+
+typedef struct {
+    uint32_t width_padding;
+    uint32_t height_padding;
+    uint32_t plane_padding;
+    uint32_t min_stride;
+    uint32_t min_scanline;
+    cam_offset_info_t offset_info;
+} cam_padding_info_t;
+
+typedef struct {
+    uint32_t num_planes;    /*Number of planes in planar buffer*/
+    union {
+        cam_sp_len_offset_t sp;
+        cam_mp_len_offset_t mp[VIDEO_MAX_PLANES];
+    };
+    uint32_t frame_len;
+} cam_frame_len_offset_t;
+
+typedef struct {
+    uint8_t frame_buf_cnt;  /*Total plane frames present in 1 batch*/
+    uint32_t size;          /*Size of 1 batch buffer. Kernel structure size*/
+    long frameInterval;     /*frame interval between each frame*/
+} cam_stream_user_buf_info_t;
+
+typedef struct {
+    int32_t width;
+    int32_t height;
+} cam_dimension_t;
+
+typedef struct {
+    cam_frame_len_offset_t plane_info;
+} cam_stream_buf_plane_info_t;
+
+typedef struct {
+    float min_fps;
+    float max_fps;
+    float video_min_fps;
+    float video_max_fps;
+} cam_fps_range_t;
+
+typedef struct {
+    int32_t min_sensitivity;
+    int32_t max_sensitivity;
+} cam_sensitivity_range_t;
+
+typedef enum {
+    CAM_HFR_MODE_OFF,
+    CAM_HFR_MODE_60FPS,
+    CAM_HFR_MODE_90FPS,
+    CAM_HFR_MODE_120FPS,
+    CAM_HFR_MODE_150FPS,
+    CAM_HFR_MODE_180FPS,
+    CAM_HFR_MODE_210FPS,
+    CAM_HFR_MODE_240FPS,
+    CAM_HFR_MODE_480FPS,
+    CAM_HFR_MODE_MAX
+} cam_hfr_mode_t;
+
+typedef struct {
+    cam_hfr_mode_t mode;
+    uint8_t dim_cnt;                                        /* hfr sizes table count */
+    cam_dimension_t dim[MAX_SIZES_CNT];                     /* hfr sizes table */
+    uint8_t livesnapshot_sizes_tbl_cnt;                     /* livesnapshot sizes table count */
+    cam_dimension_t livesnapshot_sizes_tbl[MAX_SIZES_CNT];  /* livesnapshot sizes table */
+} cam_hfr_info_t;
+
+typedef enum {
+    CAM_WB_MODE_AUTO,
+    CAM_WB_MODE_CUSTOM,
+    CAM_WB_MODE_INCANDESCENT,
+    CAM_WB_MODE_FLUORESCENT,
+    CAM_WB_MODE_WARM_FLUORESCENT,
+    CAM_WB_MODE_DAYLIGHT,
+    CAM_WB_MODE_CLOUDY_DAYLIGHT,
+    CAM_WB_MODE_TWILIGHT,
+    CAM_WB_MODE_SHADE,
+    CAM_WB_MODE_MANUAL,
+    CAM_WB_MODE_OFF,
+    CAM_WB_MODE_MAX
+} cam_wb_mode_type;
+
+typedef enum {
+    CAM_ANTIBANDING_MODE_OFF,
+    CAM_ANTIBANDING_MODE_60HZ,
+    CAM_ANTIBANDING_MODE_50HZ,
+    CAM_ANTIBANDING_MODE_AUTO,
+    CAM_ANTIBANDING_MODE_AUTO_50HZ,
+    CAM_ANTIBANDING_MODE_AUTO_60HZ,
+    CAM_ANTIBANDING_MODE_MAX,
+} cam_antibanding_mode_type;
+
+/* Enum Type for different ISO Mode supported */
+typedef enum {
+    CAM_ISO_MODE_AUTO,
+    CAM_ISO_MODE_DEBLUR,
+    CAM_ISO_MODE_100,
+    CAM_ISO_MODE_200,
+    CAM_ISO_MODE_400,
+    CAM_ISO_MODE_800,
+    CAM_ISO_MODE_1600,
+    CAM_ISO_MODE_3200,
+    CAM_ISO_MODE_MAX
+} cam_iso_mode_type;
+
+typedef enum {
+    CAM_AEC_MODE_FRAME_AVERAGE,
+    CAM_AEC_MODE_CENTER_WEIGHTED,
+    CAM_AEC_MODE_SPOT_METERING,
+    CAM_AEC_MODE_SMART_METERING,
+    CAM_AEC_MODE_USER_METERING,
+    CAM_AEC_MODE_SPOT_METERING_ADV,
+    CAM_AEC_MODE_CENTER_WEIGHTED_ADV,
+    CAM_AEC_MODE_MAX
+} cam_auto_exposure_mode_type;
+
+/* enum to select AEC convergence type */
+typedef enum {
+    /* Normal AEC connvergence */
+    CAM_AEC_NORMAL_CONVERGENCE = 0,
+    /* Aggressive AEC connvergence */
+    CAM_AEC_AGGRESSIVE_CONVERGENCE,
+    /* Fast AEC convergence */
+    CAM_AEC_FAST_CONVERGENCE,
+    CAM_AEC_CONVERGENCE_MAX
+} cam_aec_convergence_type;
+
+typedef enum {
+    CAM_AE_MODE_OFF,
+    CAM_AE_MODE_ON,
+    CAM_AE_MODE_MAX
+} cam_ae_mode_type;
+
+typedef enum {
+    CAM_FOCUS_ALGO_AUTO,
+    CAM_FOCUS_ALGO_SPOT,
+    CAM_FOCUS_ALGO_CENTER_WEIGHTED,
+    CAM_FOCUS_ALGO_AVERAGE,
+    CAM_FOCUS_ALGO_MAX
+} cam_focus_algorithm_type;
+
+/* Auto focus mode */
+typedef enum {
+    CAM_FOCUS_MODE_OFF,
+    CAM_FOCUS_MODE_AUTO,
+    CAM_FOCUS_MODE_INFINITY,
+    CAM_FOCUS_MODE_MACRO,
+    CAM_FOCUS_MODE_FIXED,
+    CAM_FOCUS_MODE_EDOF,
+    CAM_FOCUS_MODE_CONTINOUS_VIDEO,
+    CAM_FOCUS_MODE_CONTINOUS_PICTURE,
+    CAM_FOCUS_MODE_MANUAL,
+    CAM_FOCUS_MODE_MAX
+} cam_focus_mode_type;
+
+typedef enum {
+    CAM_MANUAL_FOCUS_MODE_INDEX,
+    CAM_MANUAL_FOCUS_MODE_DAC_CODE,
+    CAM_MANUAL_FOCUS_MODE_RATIO,
+    CAM_MANUAL_FOCUS_MODE_DIOPTER,
+    CAM_MANUAL_FOCUS_MODE_MAX
+} cam_manual_focus_mode_type;
+
+typedef struct {
+    cam_manual_focus_mode_type flag;
+    union{
+        int32_t af_manual_lens_position_index;
+        int32_t af_manual_lens_position_dac;
+        int32_t af_manual_lens_position_ratio;
+        float af_manual_diopter;
+    };
+} cam_manual_focus_parm_t;
+
+typedef enum {
+    CAM_MANUAL_WB_MODE_CCT,
+    CAM_MANUAL_WB_MODE_GAIN,
+    CAM_MANUAL_WB_MODE_MAX
+} cam_manual_wb_mode_type;
+
+typedef struct {
+    float r_gain;
+    float g_gain;
+    float b_gain;
+} cam_awb_gain_t;
+
+typedef struct {
+    cam_manual_wb_mode_type type;
+    union{
+        int32_t cct;
+        cam_awb_gain_t gains;
+    };
+} cam_manual_wb_parm_t;
+
+typedef enum {
+    CAM_SCENE_MODE_OFF,
+    CAM_SCENE_MODE_AUTO,
+    CAM_SCENE_MODE_LANDSCAPE,
+    CAM_SCENE_MODE_SNOW,
+    CAM_SCENE_MODE_BEACH,
+    CAM_SCENE_MODE_SUNSET,
+    CAM_SCENE_MODE_NIGHT,
+    CAM_SCENE_MODE_PORTRAIT,
+    CAM_SCENE_MODE_BACKLIGHT,
+    CAM_SCENE_MODE_SPORTS,
+    CAM_SCENE_MODE_ANTISHAKE,
+    CAM_SCENE_MODE_FLOWERS,
+    CAM_SCENE_MODE_CANDLELIGHT,
+    CAM_SCENE_MODE_FIREWORKS,
+    CAM_SCENE_MODE_PARTY,
+    CAM_SCENE_MODE_NIGHT_PORTRAIT,
+    CAM_SCENE_MODE_THEATRE,
+    CAM_SCENE_MODE_ACTION,
+    CAM_SCENE_MODE_AR,
+    CAM_SCENE_MODE_FACE_PRIORITY,
+    CAM_SCENE_MODE_BARCODE,
+    CAM_SCENE_MODE_HDR,
+    CAM_SCENE_MODE_AQUA,
+    CAM_SCENE_MODE_MAX
+} cam_scene_mode_type;
+
+typedef enum {
+    CAM_EFFECT_MODE_OFF,
+    CAM_EFFECT_MODE_MONO,
+    CAM_EFFECT_MODE_NEGATIVE,
+    CAM_EFFECT_MODE_SOLARIZE,
+    CAM_EFFECT_MODE_SEPIA,
+    CAM_EFFECT_MODE_POSTERIZE,
+    CAM_EFFECT_MODE_WHITEBOARD,
+    CAM_EFFECT_MODE_BLACKBOARD,
+    CAM_EFFECT_MODE_AQUA,
+    CAM_EFFECT_MODE_EMBOSS,
+    CAM_EFFECT_MODE_SKETCH,
+    CAM_EFFECT_MODE_NEON,
+    CAM_EFFECT_MODE_BEAUTY,
+    CAM_EFFECT_MODE_MAX
+} cam_effect_mode_type;
+
+typedef enum {
+    CAM_FLASH_MODE_OFF,
+    CAM_FLASH_MODE_AUTO,
+    CAM_FLASH_MODE_ON,
+    CAM_FLASH_MODE_TORCH,
+    CAM_FLASH_MODE_SINGLE,
+    CAM_FLASH_MODE_MAX
+} cam_flash_mode_t;
+
+// Flash States
+typedef enum {
+    CAM_FLASH_STATE_UNAVAILABLE,
+    CAM_FLASH_STATE_CHARGING,
+    CAM_FLASH_STATE_READY,
+    CAM_FLASH_STATE_FIRED,
+    CAM_FLASH_STATE_PARTIAL,
+    CAM_FLASH_STATE_MAX
+} cam_flash_state_t;
+
+typedef enum {
+    CAM_FLASH_FIRING_LEVEL_0,
+    CAM_FLASH_FIRING_LEVEL_1,
+    CAM_FLASH_FIRING_LEVEL_2,
+    CAM_FLASH_FIRING_LEVEL_3,
+    CAM_FLASH_FIRING_LEVEL_4,
+    CAM_FLASH_FIRING_LEVEL_5,
+    CAM_FLASH_FIRING_LEVEL_6,
+    CAM_FLASH_FIRING_LEVEL_7,
+    CAM_FLASH_FIRING_LEVEL_8,
+    CAM_FLASH_FIRING_LEVEL_9,
+    CAM_FLASH_FIRING_LEVEL_10,
+    CAM_FLASH_FIRING_LEVEL_MAX
+} cam_flash_firing_level_t;
+
+
+typedef enum {
+    CAM_AEC_TRIGGER_IDLE,
+    CAM_AEC_TRIGGER_START
+} cam_aec_trigger_type_t;
+
+typedef enum {
+    CAM_AF_TRIGGER_IDLE,
+    CAM_AF_TRIGGER_START,
+    CAM_AF_TRIGGER_CANCEL
+} cam_af_trigger_type_t;
+
+typedef enum {
+    CAM_AE_STATE_INACTIVE,
+    CAM_AE_STATE_SEARCHING,
+    CAM_AE_STATE_CONVERGED,
+    CAM_AE_STATE_LOCKED,
+    CAM_AE_STATE_FLASH_REQUIRED,
+    CAM_AE_STATE_PRECAPTURE
+} cam_ae_state_t;
+
+typedef enum {
+    CAM_NOISE_REDUCTION_MODE_OFF,
+    CAM_NOISE_REDUCTION_MODE_FAST,
+    CAM_NOISE_REDUCTION_MODE_HIGH_QUALITY,
+    CAM_NOISE_REDUCTION_MODE_MINIMAL,
+    CAM_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG
+} cam_noise_reduction_mode_t;
+
+typedef enum {
+    CAM_EDGE_MODE_OFF,
+    CAM_EDGE_MODE_FAST,
+    CAM_EDGE_MODE_HIGH_QUALITY,
+    CAM_EDGE_MODE_ZERO_SHUTTER_LAG,
+} cam_edge_mode_t;
+
+typedef struct {
+   uint8_t edge_mode;
+   int32_t sharpness;
+} cam_edge_application_t;
+
+typedef enum {
+    CAM_BLACK_LEVEL_LOCK_OFF,
+    CAM_BLACK_LEVEL_LOCK_ON,
+} cam_black_level_lock_t;
+
+typedef enum {
+    CAM_HOTPIXEL_MODE_OFF,
+    CAM_HOTPIXEL_MODE_FAST,
+    CAM_HOTPIXEL_MODE_HIGH_QUALITY,
+} cam_hotpixel_mode_t;
+
+typedef enum {
+    CAM_LENS_SHADING_MAP_MODE_OFF,
+    CAM_LENS_SHADING_MAP_MODE_ON,
+} cam_lens_shading_map_mode_t;
+
+typedef enum {
+    CAM_LENS_SHADING_MODE_OFF,
+    CAM_LENS_SHADING_MODE_FAST,
+    CAM_LENS_SHADING_MODE_HIGH_QUALITY,
+} cam_lens_shading_mode_t;
+
+typedef enum {
+    CAM_FACE_DETECT_MODE_OFF,
+    CAM_FACE_DETECT_MODE_SIMPLE,
+    CAM_FACE_DETECT_MODE_FULL,
+} cam_face_detect_mode_t;
+
+typedef enum {
+    CAM_TONEMAP_MODE_CONTRAST_CURVE,
+    CAM_TONEMAP_MODE_FAST,
+    CAM_TONEMAP_MODE_HIGH_QUALITY,
+} cam_tonemap_mode_t;
+
+typedef enum {
+    CAM_CDS_MODE_OFF,
+    CAM_CDS_MODE_ON,
+    CAM_CDS_MODE_AUTO,
+    CAM_CDS_MODE_LOCK,
+    CAM_CDS_MODE_MAX
+} cam_cds_mode_type_t;
+
+typedef enum {
+    CAM_SENSOR_HDR_OFF,
+    CAM_SENSOR_HDR_IN_SENSOR = 1,
+    CAM_SENSOR_HDR_ZIGZAG,
+    CAM_SENSOR_HDR_MAX,
+} cam_sensor_hdr_type_t;
+
+typedef struct  {
+    int32_t left;
+    int32_t top;
+    int32_t width;
+    int32_t height;
+} cam_rect_t;
+
+typedef struct  {
+    cam_rect_t rect;
+    int32_t weight; /* weight of the area, valid for focusing/metering areas */
+} cam_area_t;
+
+typedef enum {
+    CAM_STREAMING_MODE_CONTINUOUS, /* continous streaming */
+    CAM_STREAMING_MODE_BURST,      /* burst streaming */
+    CAM_STREAMING_MODE_BATCH,      /* stream frames in batches */
+    CAM_STREAMING_MODE_MAX
+} cam_streaming_mode_t;
+
+typedef enum {
+    IS_TYPE_NONE,
+    IS_TYPE_DIS,
+    IS_TYPE_GA_DIS,
+    IS_TYPE_EIS_1_0,
+    IS_TYPE_EIS_2_0,
+    IS_TYPE_EIS_3_0,
+    IS_TYPE_MAX
+} cam_is_type_t;
+
+typedef enum {
+    DIS_DISABLE,
+    DIS_ENABLE
+} cam_dis_mode_t;
+
+typedef enum {
+  NON_SECURE,
+  SECURE
+} cam_stream_secure_t;
+
+#define CAM_REPROCESS_MASK_TYPE_WNR (1<<0)
+
+/* event from server */
+typedef enum {
+    CAM_EVENT_TYPE_MAP_UNMAP_DONE  = (1<<0),
+    CAM_EVENT_TYPE_AUTO_FOCUS_DONE = (1<<1),
+    CAM_EVENT_TYPE_ZOOM_DONE       = (1<<2),
+    CAM_EVENT_TYPE_DAEMON_DIED     = (1<<3),
+    CAM_EVENT_TYPE_INT_TAKE_JPEG   = (1<<4),
+    CAM_EVENT_TYPE_INT_TAKE_RAW    = (1<<5),
+    CAM_EVENT_TYPE_DAEMON_PULL_REQ = (1<<6),
+    CAM_EVENT_TYPE_CAC_DONE        = (1<<7),
+    CAM_EVENT_TYPE_MAX
+} cam_event_type_t;
+
+typedef enum {
+    CAM_EXP_BRACKETING_OFF,
+    CAM_EXP_BRACKETING_ON
+} cam_bracket_mode;
+
+typedef enum {
+    CAM_LOW_LIGHT_OFF = 0,
+    CAM_LOW_LIGHT_ON,
+} cam_low_light_mode_t;
+
+typedef struct {
+    cam_bracket_mode mode;
+    char values[MAX_EXP_BRACKETING_LENGTH];  /* user defined values */
+} cam_exp_bracketing_t;
+
+typedef struct {
+  uint32_t num_frames;
+  cam_exp_bracketing_t exp_val;
+} cam_hdr_bracketing_info_t;
+
+ typedef struct {
+    cam_bracket_mode mode;
+    int32_t values;  /* user defined values */
+} cam_capture_bracketing_t;
+
+typedef enum {
+    CAM_SETTINGS_TYPE_OFF,
+    CAM_SETTINGS_TYPE_ON,
+    CAM_SETTINGS_TYPE_AUTO
+} cam_manual_setting_mode;
+
+typedef struct {
+    cam_manual_setting_mode exp_mode;
+    int64_t exp_time;
+    cam_manual_setting_mode iso_mode;
+    int32_t iso_value;
+} cam_capture_manual_3A_t;
+
+typedef enum {
+    CAM_CAPTURE_NORMAL,
+    CAM_CAPTURE_FLASH,
+    CAM_CAPTURE_BRACKETING,
+    CAM_CAPTURE_LOW_LIGHT,
+    CAM_CAPTURE_RESET,
+    CAM_CAPTURE_MANUAL_3A,
+    CAM_CAPTURE_MAX
+} cam_capture_type;
+
+typedef struct {
+    int32_t num_frames;     /*Num of frames requested on this quality*/
+    cam_capture_type type;  /*type of the capture request*/
+
+    /*union to strore values of capture type*/
+    union {
+        cam_flash_mode_t flash_mode;
+        cam_capture_bracketing_t hdr_mode;
+        cam_low_light_mode_t low_light_mode;
+        cam_capture_manual_3A_t manual_3A_mode;
+    };
+} cam_capture_settings_t;
+
+typedef struct {
+    uint32_t num_batch;  /*Number of frames batch requested*/
+    cam_capture_settings_t configs[MAX_CAPTURE_BATCH_NUM];
+} cam_capture_frame_config_t;
+
+typedef struct {
+    uint8_t stepsize;
+    uint8_t direction;
+    int32_t num_steps;
+    uint8_t ttype;
+} tune_actuator_t;
+
+typedef struct {
+    uint8_t module;
+    uint8_t type;
+    int32_t value;
+} tune_cmd_t;
+
+typedef enum {
+    CAM_AEC_ROI_OFF,
+    CAM_AEC_ROI_ON
+} cam_aec_roi_ctrl_t;
+
+typedef enum {
+    CAM_AEC_ROI_BY_INDEX,
+    CAM_AEC_ROI_BY_COORDINATE,
+} cam_aec_roi_type_t;
+
+typedef struct {
+    uint32_t x;
+    uint32_t y;
+} cam_coordinate_type_t;
+
+typedef struct {
+    int32_t numerator;
+    int32_t denominator;
+} cam_rational_type_t;
+
+typedef struct {
+    cam_aec_roi_ctrl_t aec_roi_enable;
+    cam_aec_roi_type_t aec_roi_type;
+    union {
+        cam_coordinate_type_t coordinate[MAX_ROI];
+        uint32_t aec_roi_idx[MAX_ROI];
+    } cam_aec_roi_position;
+} cam_set_aec_roi_t;
+
+typedef struct {
+    uint32_t frm_id;
+    uint8_t num_roi;
+    cam_rect_t roi[MAX_ROI];
+    int32_t weight[MAX_ROI];
+    uint8_t is_multiwindow;
+} cam_roi_info_t;
+
+typedef enum {
+    CAM_WAVELET_DENOISE_YCBCR_PLANE,
+    CAM_WAVELET_DENOISE_CBCR_ONLY,
+    CAM_WAVELET_DENOISE_STREAMLINE_YCBCR,
+    CAM_WAVELET_DENOISE_STREAMLINED_CBCR
+} cam_denoise_process_type_t;
+
+typedef struct {
+    uint8_t denoise_enable;
+    cam_denoise_process_type_t process_plates;
+} cam_denoise_param_t;
+
+#define CAM_FACE_PROCESS_MASK_DETECTION     (1U<<0)
+#define CAM_FACE_PROCESS_MASK_RECOGNITION   (1U<<1)
+#define CAM_FACE_PROCESS_MASK_FOCUS         (1U<<2)
+#define CAM_FACE_PROCESS_MASK_BLINK         (1U<<3)
+#define CAM_FACE_PROCESS_MASK_SMILE         (1U<<4)
+#define CAM_FACE_PROCESS_MASK_GAZE          (1U<<5)
+
+typedef struct {
+    uint32_t fd_mode;          /* mask of face process */
+    uint32_t num_fd;
+} cam_fd_set_parm_t;
+
+typedef enum {
+    QCAMERA_FD_PREVIEW,
+    QCAMERA_FD_SNAPSHOT
+}qcamera_face_detect_type_t;
+
+typedef enum {
+    CAM_FACE_CT_POINT_EYE_L_PUPIL,
+    CAM_FACE_CT_POINT_EYE_L_IN,
+    CAM_FACE_CT_POINT_EYE_L_OUT,
+    CAM_FACE_CT_POINT_EYE_L_UP,
+    CAM_FACE_CT_POINT_EYE_L_DOWN,
+    CAM_FACE_CT_POINT_EYE_R_PUPIL,
+    CAM_FACE_CT_POINT_EYE_R_IN,
+    CAM_FACE_CT_POINT_EYE_R_OUT,
+    CAM_FACE_CT_POINT_EYE_R_UP,
+    CAM_FACE_CT_POINT_EYE_R_DOWN,
+    CAM_FACE_CT_POINT_EYE_MAX
+} cam_face_ct_point_eye_t;
+
+typedef enum {
+    CAM_FACE_CT_POINT_FOREHEAD,
+    CAM_FACE_CT_POINT_FOREHEAD_MAX
+} cam_face_ct_point_forh_t;
+
+typedef enum {
+    CAM_FACE_CT_POINT_NOSE,
+    CAM_FACE_CT_POINT_NOSE_TIP,
+    CAM_FACE_CT_POINT_NOSE_L,
+    CAM_FACE_CT_POINT_NOSE_R,
+    CAM_FACE_CT_POINT_NOSE_L_0,
+    CAM_FACE_CT_POINT_NOSE_R_0,
+    CAM_FACE_CT_POINT_NOSE_L_1,
+    CAM_FACE_CT_POINT_NOSE_R_1,
+    CAM_FACE_CT_POINT_NOSE_MAX
+} cam_face_ct_point_nose_t;
+
+typedef enum {
+    CAM_FACE_CT_POINT_MOUTH_L,
+    CAM_FACE_CT_POINT_MOUTH_R,
+    CAM_FACE_CT_POINT_MOUTH_UP,
+    CAM_FACE_CT_POINT_MOUTH_DOWN,
+    CAM_FACE_CT_POINT_MOUTH_MAX
+} cam_face_ct_point_mouth_t;
+
+typedef enum {
+    CAM_FACE_CT_POINT_LIP_UP,
+    CAM_FACE_CT_POINT_LIP_DOWN,
+    CAM_FACE_CT_POINT_LIP_MAX
+} cam_face_ct_point_lip_t;
+
+typedef enum {
+    CAM_FACE_CT_POINT_BROW_L_UP,
+    CAM_FACE_CT_POINT_BROW_L_DOWN,
+    CAM_FACE_CT_POINT_BROW_L_IN,
+    CAM_FACE_CT_POINT_BROW_L_OUT,
+    CAM_FACE_CT_POINT_BROW_R_UP,
+    CAM_FACE_CT_POINT_BROW_R_DOWN,
+    CAM_FACE_CT_POINT_BROW_R_IN,
+    CAM_FACE_CT_POINT_BROW_R_OUT,
+    CAM_FACE_CT_POINT_BROW_MAX
+} cam_face_ct_point_brow_t;
+
+typedef enum {
+    CAM_FACE_CT_POINT_CHIN,
+    CAM_FACE_CT_POINT_CHIN_L,
+    CAM_FACE_CT_POINT_CHIN_R,
+    CAM_FACE_CT_POINT_CHIN_MAX
+} cam_face_ct_point_chin_t;
+
+typedef enum {
+    CAM_FACE_CT_POINT_EAR_L_DOWN,
+    CAM_FACE_CT_POINT_EAR_R_DOWN,
+    CAM_FACE_CT_POINT_EAR_L_UP,
+    CAM_FACE_CT_POINT_EAR_R_UP,
+    CAM_FACE_CT_POINT_EAR_MAX
+} cam_face_ct_point_ear_t;
+
+typedef struct {
+  uint8_t is_eye_valid;
+  cam_coordinate_type_t contour_eye_pt[CAM_FACE_CT_POINT_EYE_MAX];
+  uint8_t is_forehead_valid;
+  cam_coordinate_type_t contour_forh_pt[CAM_FACE_CT_POINT_FOREHEAD_MAX];
+  uint8_t is_nose_valid;
+  cam_coordinate_type_t contour_nose_pt[CAM_FACE_CT_POINT_NOSE_MAX];
+  uint8_t is_mouth_valid;
+  cam_coordinate_type_t contour_mouth_pt[CAM_FACE_CT_POINT_MOUTH_MAX];
+  uint8_t is_lip_valid;
+  cam_coordinate_type_t contour_lip_pt[CAM_FACE_CT_POINT_LIP_MAX];
+  uint8_t is_brow_valid;
+  cam_coordinate_type_t contour_brow_pt[CAM_FACE_CT_POINT_BROW_MAX];
+  uint8_t is_chin_valid;
+  cam_coordinate_type_t contour_chin_pt[CAM_FACE_CT_POINT_CHIN_MAX];
+  uint8_t is_ear_valid;
+  cam_coordinate_type_t contour_ear_pt[CAM_FACE_CT_POINT_EAR_MAX];
+} cam_face_contour_info_t;
+
+typedef struct {
+    cam_face_contour_info_t face_contour[MAX_ROI];
+} cam_face_contour_data_t;
+
+typedef struct {
+    cam_coordinate_type_t left_eye_center;  /* coordinate of center of left eye */
+    cam_coordinate_type_t right_eye_center; /* coordinate of center of right eye */
+    cam_coordinate_type_t mouth_center;     /* coordinate of center of mouth */
+} cam_face_landmarks_info_t;
+
+typedef struct {
+    cam_face_landmarks_info_t face_landmarks[MAX_ROI];
+} cam_face_landmarks_data_t;
+
+typedef struct {
+    uint8_t smile_degree;      /* smile degree (0, -100) */
+    uint8_t smile_confidence;  /* smile confidence (0, 100) */
+} cam_face_smile_info_t;
+
+typedef struct {
+    cam_face_smile_info_t smile[MAX_ROI];
+} cam_face_smile_data_t;
+
+typedef struct {
+    int8_t gaze_angle;         /* -90 -45 0 45 90 for head left to rigth tilt */
+    int32_t updown_dir;        /* up down direction (-180, 179) */
+    int32_t leftright_dir;     /* left right direction (-180, 179) */
+    int32_t roll_dir;          /* roll direction (-180, 179) */
+    int8_t left_right_gaze;    /* left right gaze degree (-50, 50) */
+    int8_t top_bottom_gaze;    /* up down gaze degree (-50, 50) */
+} cam_face_gaze_info_t;
+
+typedef struct {
+    cam_face_gaze_info_t gaze[MAX_ROI];
+} cam_face_gaze_data_t;
+
+typedef struct {
+    uint8_t blink_detected;    /* if blink is detected */
+    uint8_t left_blink;        /* left eye blink degeree (0, -100) */
+    uint8_t right_blink;       /* right eye blink degree (0, - 100) */
+} cam_face_blink_info_t;
+
+typedef struct {
+    cam_face_blink_info_t blink[MAX_ROI];
+} cam_face_blink_data_t;
+
+typedef struct {
+    uint8_t face_recognised;   /* if face is recognised */
+    uint32_t unique_id;   /* if face is recognised */
+} cam_face_recog_info_t;
+
+typedef struct {
+    cam_face_recog_info_t face_rec[MAX_ROI];
+} cam_face_recog_data_t;
+
+typedef struct {
+    int32_t face_id;            /* unique id for face tracking within view unless view changes */
+    int8_t score;              /* score of confidence (0, -100) */
+    cam_rect_t face_boundary;  /* boundary of face detected */
+} cam_face_detection_info_t;
+
+typedef struct {
+    uint32_t frame_id;                         /* frame index of which faces are detected */
+    uint8_t num_faces_detected;                /* number of faces detected */
+    cam_face_detection_info_t faces[MAX_ROI];  /* detailed information of faces detected */
+    qcamera_face_detect_type_t fd_type;        /* face detect for preview or snapshot frame*/
+    cam_dimension_t fd_frame_dim;              /* frame dims on which fd is applied */
+    uint8_t update_flag;                       /* flag to inform whether HAL needs to send cb
+                                                * to app or not */
+} cam_face_detection_data_t;
+
+// definition of composite face detection data
+typedef struct {
+    cam_face_detection_data_t detection_data;
+
+    int8_t recog_valid;
+    cam_face_recog_data_t recog_data;
+
+    int8_t blink_valid;
+    cam_face_blink_data_t blink_data;
+
+    int8_t gaze_valid;
+    cam_face_gaze_data_t gaze_data;
+
+    int8_t smile_valid;
+    cam_face_smile_data_t smile_data;
+
+    int8_t landmark_valid;
+    cam_face_landmarks_data_t landmark_data;
+
+    int8_t contour_valid;
+    cam_face_contour_data_t contour_data;
+} cam_faces_data_t;
+
+#define CAM_HISTOGRAM_STATS_SIZE 256
+typedef struct {
+    uint32_t max_hist_value;
+    uint32_t hist_buf[CAM_HISTOGRAM_STATS_SIZE]; /* buf holding histogram stats data */
+} cam_histogram_data_t;
+
+typedef struct {
+    cam_histogram_data_t r_stats;
+    cam_histogram_data_t b_stats;
+    cam_histogram_data_t gr_stats;
+    cam_histogram_data_t gb_stats;
+} cam_bayer_hist_stats_t;
+
+typedef enum {
+    CAM_HISTOGRAM_TYPE_BAYER,
+    CAM_HISTOGRAM_TYPE_YUV
+} cam_histogram_type_t;
+
+typedef struct {
+    cam_histogram_type_t type;
+    union {
+        cam_bayer_hist_stats_t bayer_stats;
+        cam_histogram_data_t yuv_stats;
+    };
+} cam_hist_stats_t;
+
+enum cam_focus_distance_index{
+  CAM_FOCUS_DISTANCE_NEAR_INDEX,  /* 0 */
+  CAM_FOCUS_DISTANCE_OPTIMAL_INDEX,
+  CAM_FOCUS_DISTANCE_FAR_INDEX,
+  CAM_FOCUS_DISTANCE_MAX_INDEX
+};
+
+typedef struct {
+  float focus_distance[CAM_FOCUS_DISTANCE_MAX_INDEX];
+} cam_focus_distances_info_t;
+
+typedef struct {
+    uint32_t scale;
+    float diopter;
+} cam_focus_pos_info_t ;
+
+typedef struct {
+    float focalLengthRatio;
+} cam_focal_length_ratio_t;
+
+typedef struct {
+    uint8_t needFlush;
+    uint32_t focused_frame_idx;
+} cam_af_flush_info_t;
+
+/* Different autofocus cycle when calling do_autoFocus
+ * CAM_AF_COMPLETE_EXISTING_SWEEP: Complete existing sweep
+ * if one is ongoing, and lock.
+ * CAM_AF_DO_ONE_FULL_SWEEP: Do one full sweep, regardless
+ * of the current state, and lock.
+ * CAM_AF_START_CONTINUOUS_SWEEP: Start continous sweep.
+ * After do_autoFocus, HAL receives an event: CAM_AF_FOCUSED,
+ * or CAM_AF_NOT_FOCUSED.
+ * cancel_autoFocus stops any lens movement.
+ * Each do_autoFocus call only produces 1 FOCUSED/NOT_FOCUSED
+ * event, not both.
+ */
+typedef enum {
+    CAM_AF_COMPLETE_EXISTING_SWEEP,
+    CAM_AF_DO_ONE_FULL_SWEEP,
+    CAM_AF_START_CONTINUOUS_SWEEP
+} cam_autofocus_cycle_t;
+
+typedef enum {
+    CAM_AF_SCANNING,
+    CAM_AF_FOCUSED,
+    CAM_AF_NOT_FOCUSED,
+    CAM_CAF_SCANNING,
+    CAM_CAF_FOCUSED,
+    CAM_CAF_NOT_FOCUSED,
+    CAM_AF_INACTIVE
+} cam_autofocus_state_t;
+
+//Don't change the order of the AF states below. It should match
+//with the corresponding enum in frameworks (camera3.h and
+//CameraMetadata.java)
+typedef enum {
+    CAM_AF_STATE_INACTIVE,
+    CAM_AF_STATE_PASSIVE_SCAN,
+    CAM_AF_STATE_PASSIVE_FOCUSED,
+    CAM_AF_STATE_ACTIVE_SCAN,
+    CAM_AF_STATE_FOCUSED_LOCKED,
+    CAM_AF_STATE_NOT_FOCUSED_LOCKED,
+    CAM_AF_STATE_PASSIVE_UNFOCUSED
+} cam_af_state_t;
+
+typedef struct {
+    cam_af_state_t focus_state;           /* state of focus */
+    cam_focus_distances_info_t focus_dist;       /* focus distance */
+    cam_focus_mode_type focus_mode;        /* focus mode from backend */
+    int32_t focus_pos;
+    cam_af_flush_info_t flush_info;
+} cam_auto_focus_data_t;
+
+typedef struct {
+    uint32_t stream_id;
+    cam_rect_t crop;
+    cam_rect_t roi_map;
+} cam_stream_crop_info_t;
+
+typedef struct {
+    uint8_t num_of_streams;
+    cam_stream_crop_info_t crop_info[MAX_NUM_STREAMS];
+} cam_crop_data_t;
+
+typedef struct {
+    uint32_t stream_id;
+    uint32_t cds_enable;
+} cam_stream_cds_info_t;
+
+typedef struct {
+    uint8_t session_cds_enable;
+    uint8_t num_of_streams;
+    cam_stream_cds_info_t cds_info[MAX_NUM_STREAMS];
+} cam_cds_data_t;
+
+typedef enum {
+    DO_NOT_NEED_FUTURE_FRAME,
+    NEED_FUTURE_FRAME,
+} cam_prep_snapshot_state_t;
+
+typedef enum {
+    CC_RED_GAIN,
+    CC_GREEN_RED_GAIN,
+    CC_GREEN_BLUE_GAIN,
+    CC_BLUE_GAIN,
+    CC_GAIN_MAX
+} cam_cc_gains_type_t;
+
+typedef struct {
+    float gains[CC_GAIN_MAX];
+} cam_color_correct_gains_t;
+
+typedef struct {
+    // If LED is ON and Burst Num > 1, this is first LED ON frame
+    uint32_t min_frame_idx;
+    // If LED is ON and Burst Num > 1, this is first LED Off frame after ON
+    uint32_t max_frame_idx;
+    // Used only when LED Is ON and burst num > 1
+    uint32_t num_led_on_frames;
+    // Skip count after LED is turned OFF
+    uint32_t frame_skip_count;
+    // Batch id for each picture request
+    uint32_t config_batch_idx;
+} cam_frame_idx_range_t;
+
+typedef enum {
+  S_NORMAL = 0,
+  S_SCENERY,
+  S_PORTRAIT,
+  S_PORTRAIT_BACKLIGHT,
+  S_SCENERY_BACKLIGHT,
+  S_BACKLIGHT,
+  S_HDR,
+  S_MAX_DEFAULT,
+  S_CUSTOM0 = S_MAX_DEFAULT,
+  S_CUSTOM1,
+  S_CUSTOM2,
+  S_CUSTOM3,
+  S_CUSTOM4,
+  S_CUSTOM5,
+  S_CUSTOM6,
+  S_CUSTOM7,
+  S_CUSTOM8,
+  S_CUSTOM9,
+  S_MAX,
+} cam_auto_scene_t;
+
+typedef struct {
+  uint32_t is_hdr_scene;
+  float    hdr_confidence;
+} cam_asd_hdr_scene_data_t;
+
+typedef struct {
+  uint32_t          detected;
+  float             confidence;
+  uint32_t          auto_compensation;
+} cam_asd_scene_info_t;
+
+typedef struct {
+  cam_auto_scene_t      detected_scene;
+  uint8_t               max_n_scenes;
+  cam_asd_scene_info_t  scene_info[S_MAX];
+} cam_asd_decision_t;
+
+
+typedef struct {
+   uint32_t meta_frame_id;
+} cam_meta_valid_t;
+
+typedef enum {
+    CAM_SENSOR_RAW,
+    CAM_SENSOR_YUV,
+    CAM_SENSOR_MONO
+} cam_sensor_t;
+
+typedef struct {
+    cam_flash_mode_t flash_mode;
+    float            aperture_value;
+    cam_flash_state_t        flash_state;
+    float            focal_length;
+    float            f_number;
+    int32_t          sensing_method;
+    float            crop_factor;
+    cam_sensor_t sens_type;
+} cam_sensor_params_t;
+
+typedef enum {
+    CAM_METERING_MODE_UNKNOWN = 0,
+    CAM_METERING_MODE_AVERAGE = 1,
+    CAM_METERING_MODE_CENTER_WEIGHTED_AVERAGE = 2,
+    CAM_METERING_MODE_SPOT = 3,
+    CAM_METERING_MODE_MULTI_SPOT = 4,
+    CAM_METERING_MODE_PATTERN = 5,
+    CAM_METERING_MODE_PARTIAL = 6,
+    CAM_METERING_MODE_OTHER = 255,
+} cam_metering_mode_t;
+
+typedef struct {
+    float exp_time;
+    int32_t iso_value;
+    uint32_t flash_needed;
+    uint32_t settled;
+    cam_wb_mode_type wb_mode;
+    uint32_t metering_mode;
+    uint32_t exposure_program;
+    uint32_t exposure_mode;
+    uint32_t scenetype;
+    float brightness;
+    float est_snap_exp_time;
+    int32_t est_snap_iso_value;
+    uint32_t est_snap_luma;
+    uint32_t est_snap_target;
+} cam_3a_params_t;
+
+typedef struct {
+    uint64_t sw_version_number;
+    int32_t aec_debug_data_size;
+    char aec_private_debug_data[AEC_DEBUG_DATA_SIZE];
+} cam_ae_exif_debug_t;
+
+typedef struct {
+    int8_t awb_ccm_enable;
+    int8_t hard_awb_ccm_flag;
+    int8_t ccm_update_flag;
+    float  ccm[AWB_NUM_CCM_ROWS][AWB_NUM_CCM_COLS];
+    float  ccm_offset[AWB_NUM_CCM_ROWS];
+} cam_awb_ccm_update_t;
+
+typedef struct {
+    int32_t cct_value;
+    cam_awb_gain_t rgb_gains;
+    cam_awb_ccm_update_t ccm_update;
+} cam_awb_params_t;
+
+typedef struct {
+    int32_t awb_debug_data_size;
+    char awb_private_debug_data[AWB_DEBUG_DATA_SIZE];
+} cam_awb_exif_debug_t;
+
+/* AF debug data for exif*/
+typedef struct {
+    int32_t af_debug_data_size;
+    char af_private_debug_data[AF_DEBUG_DATA_SIZE];
+} cam_af_exif_debug_t;
+
+typedef struct {
+    int32_t asd_debug_data_size;
+    char asd_private_debug_data[ASD_DEBUG_DATA_SIZE];
+} cam_asd_exif_debug_t;
+
+typedef struct {
+    int32_t bg_stats_buffer_size;
+    int32_t bg_config_buffer_size;
+    char stats_buffer_private_debug_data[STATS_BUFFER_DEBUG_DATA_SIZE];
+} cam_stats_buffer_exif_debug_t;
+
+typedef struct {
+    int32_t be_stats_buffer_size;
+    int32_t be_config_buffer_size;
+    char bestats_buffer_private_debug_data[BESTATS_BUFFER_DEBUG_DATA_SIZE];
+} cam_bestats_buffer_exif_debug_t;
+
+typedef struct {
+    int32_t bhist_stats_buffer_size;
+    char bhist_private_debug_data[BHIST_STATS_DEBUG_DATA_SIZE];
+} cam_bhist_buffer_exif_debug_t;
+
+/* 3A version*/
+typedef struct {
+    uint16_t major_version;
+    uint16_t minor_version;
+    uint16_t patch_version;
+    uint16_t new_feature_des;
+} cam_q3a_version_t;
+
+typedef struct {
+    int32_t tuning_info_buffer_size;
+    char tuning_info_private_debug_data[TUNING_INFO_DEBUG_DATA_SIZE];
+} cam_q3a_tuning_info_t;
+
+typedef struct {
+    uint32_t tuning_data_version;
+    size_t tuning_sensor_data_size;
+    size_t tuning_vfe_data_size;
+    size_t tuning_cpp_data_size;
+    size_t tuning_cac_data_size;
+    size_t tuning_cac_data_size2;
+    size_t tuning_mod3_data_size;
+    uint8_t  data[TUNING_DATA_MAX];
+}tuning_params_t;
+
+typedef struct {
+    int32_t event_type;
+    cam_dimension_t dim;
+    size_t size;
+    char path[QCAMERA_MAX_FILEPATH_LENGTH];
+    cam_format_t picture_format;
+} cam_int_evt_params_t;
+
+typedef struct {
+  uint8_t private_isp_data[MAX_ISP_DATA_SIZE];
+} cam_chromatix_lite_isp_t;
+
+typedef struct {
+  uint8_t private_pp_data[MAX_PP_DATA_SIZE];
+} cam_chromatix_lite_pp_t;
+
+typedef struct {
+  uint8_t private_stats_data[MAX_AE_STATS_DATA_SIZE];
+} cam_chromatix_lite_ae_stats_t;
+
+typedef struct {
+  uint8_t private_stats_data[MAX_AWB_STATS_DATA_SIZE];
+} cam_chromatix_lite_awb_stats_t;
+
+typedef struct {
+  uint8_t private_stats_data[MAX_AF_STATS_DATA_SIZE];
+} cam_chromatix_lite_af_stats_t;
+
+typedef struct {
+  uint8_t private_stats_data[MAX_ASD_STATS_DATA_SIZE];
+} cam_chromatix_lite_asd_stats_t;
+
+typedef struct {
+   uint32_t min_buffers;
+   uint32_t max_buffers;
+} cam_buffer_info_t;
+
+typedef enum {
+    /* Standalone camera (won't be linked) */
+    CAM_TYPE_STANDALONE=0,
+    /* Main camera of the related cam subsystem which controls
+       HW sync at sensor level*/
+    CAM_TYPE_MAIN,
+    /* Aux camera of the related cam subsystem */
+    CAM_TYPE_AUX
+} cam_sync_type_t;
+
+typedef struct {
+    cam_dimension_t stream_sizes[MAX_NUM_STREAMS];
+    uint32_t num_streams;
+    cam_stream_type_t type[MAX_NUM_STREAMS];
+    cam_feature_mask_t postprocess_mask[MAX_NUM_STREAMS];
+    cam_buffer_info_t buffer_info;
+    cam_is_type_t is_type[MAX_NUM_STREAMS];
+    cam_hfr_mode_t hfr_mode;
+    cam_format_t format[MAX_NUM_STREAMS];
+    uint32_t buf_alignment;
+    uint32_t min_stride;
+    uint32_t min_scanline;
+    uint8_t batch_size;
+    cam_sync_type_t sync_type;
+} cam_stream_size_info_t;
+
+
+typedef enum {
+    CAM_INTF_OVERWRITE_MINI_CHROMATIX_OFFLINE,
+    CAM_INTF_OVERWRITE_ISP_HW_DATA_OFFLINE,
+    CAM_INTF_OVERWRITE_MINI_CHROMATIX_ONLINE,
+    CAM_INTF_OVERWRITE_ISP_HW_DATA_ONLINE,
+    CAM_INTF_OVERWRITE_MAX,
+} cam_intf_overwrite_type_t;
+
+typedef struct {
+  uint8_t lds_enabled;
+  float rnr_sampling_factor;
+} cam_img_hysterisis_info_t;
+
+typedef struct {
+  /* dynamic feature enablement */
+  uint64_t dyn_feature_mask;
+  /* input frame count for stacking */
+  uint32_t input_count;
+  /* reserved for future use */
+  uint8_t  reserved[32];
+} cam_dyn_img_data_t;
+
+typedef struct {
+  cam_intf_overwrite_type_t overwrite_type;
+  char isp_hw_data_list[4096];     /*add upper bound memory, customer to fill*/
+  char chromatix_data_overwrite[4096]; /*add bound memory, customer fill*/
+} cam_hw_data_overwrite_t;
+
+typedef struct {
+    uint32_t num_streams;
+    uint32_t streamID[MAX_NUM_STREAMS];
+} cam_stream_ID_t;
+
+/*CAC Message posted during pipeline*/
+typedef struct {
+    uint32_t frame_id;
+    int32_t buf_idx;
+} cam_cac_info_t;
+
+typedef struct
+{
+  uint32_t id;            /* Frame ID */
+  uint64_t timestamp;    /* Time stamp */
+  uint32_t distance_in_mm; /* Distance of object in ROI's in milimeters */
+  uint32_t confidence;     /* Confidence on distance from 0(No confidence)to 1024(max) */
+  uint32_t status;        /* Status of DCRF library execution call */
+  cam_rect_t focused_roi; /* ROI's for which distance is estimated */
+  uint32_t focused_x;     /* Focus location X inside ROI with distance estimation */
+  uint32_t focused_y;     /* Focus location Y inside ROI with distance estimation */
+} cam_dcrf_result_t;
+
+typedef struct {
+    uint32_t frame_id;
+    uint32_t num_streams;
+    uint32_t stream_id[MAX_NUM_STREAMS];
+} cam_buf_divert_info_t;
+
+typedef  struct {
+    uint8_t is_stats_valid;               /* if histgram data is valid */
+    cam_hist_stats_t stats_data;          /* histogram data */
+
+    uint8_t is_faces_valid;               /* if face detection data is valid */
+    cam_face_detection_data_t faces_data; /* face detection result */
+
+    uint8_t is_focus_valid;               /* if focus data is valid */
+    cam_auto_focus_data_t focus_data;     /* focus data */
+
+    uint8_t is_crop_valid;                /* if crop data is valid */
+    cam_crop_data_t crop_data;            /* crop data */
+
+    uint8_t is_prep_snapshot_done_valid;  /* if prep snapshot done is valid */
+    cam_prep_snapshot_state_t prep_snapshot_done_state;  /* prepare snapshot done state */
+
+    uint8_t is_cac_valid;                 /* if cac info is valid */
+    cam_cac_info_t cac_info;              /* cac info */
+
+    /* Hysterisis data from Img modules */
+    uint8_t is_hyst_info_valid;           /* if hyst info is valid */
+    cam_img_hysterisis_info_t img_hyst_info; /* hyst info */
+
+    /* if good frame idx range is valid */
+    uint8_t is_good_frame_idx_range_valid;
+    /* good frame idx range, make sure:
+     * 1. good_frame_idx_range.min_frame_idx > current_frame_idx
+     * 2. good_frame_idx_range.min_frame_idx - current_frame_idx < 100 */
+    cam_frame_idx_range_t good_frame_idx_range;
+
+    cam_asd_decision_t cam_asd_info;
+
+    char private_metadata[MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES];
+
+    /* AE parameters */
+    uint8_t is_3a_params_valid;
+    cam_3a_params_t cam_3a_params;
+
+    /* AE exif debug parameters */
+    uint8_t is_ae_exif_debug_valid;
+    cam_ae_exif_debug_t ae_exif_debug_params;
+
+    /* AWB exif debug parameters */
+    uint8_t is_awb_exif_debug_valid;
+    cam_awb_exif_debug_t awb_exif_debug_params;
+
+    /* AF exif debug parameters */
+    uint8_t is_af_exif_debug_valid;
+    cam_af_exif_debug_t af_exif_debug_params;
+
+    /* ASD exif debug parameters */
+    uint8_t is_asd_exif_debug_valid;
+    cam_asd_exif_debug_t asd_exif_debug_params;
+
+    /* Stats buffer exif debug parameters */
+    uint8_t is_stats_buffer_exif_debug_valid;
+    cam_stats_buffer_exif_debug_t stats_buffer_exif_debug_params;
+
+    /* BE stats buffer exif debug parameters */
+    uint8_t is_bestats_buffer_exif_debug_valid;
+    cam_bestats_buffer_exif_debug_t bestats_buffer_exif_debug_params;
+
+    /* Bhist exif debug parameters. */
+    uint8_t is_bhist_exif_debug_valid;
+    cam_bhist_buffer_exif_debug_t bhist_exif_debug_params;
+
+    /* AWB parameters */
+    uint8_t is_awb_params_valid;
+    cam_awb_params_t awb_params;
+
+    /* sensor parameters */
+    uint8_t is_sensor_params_valid;
+    cam_sensor_params_t sensor_params;
+
+    /* Meta valid params */
+    uint8_t is_meta_valid;
+    cam_meta_valid_t meta_valid_params;
+
+    /*Tuning Data*/
+    uint8_t is_tuning_params_valid;
+    tuning_params_t tuning_params;
+
+    uint8_t is_chromatix_lite_isp_valid;
+    cam_chromatix_lite_isp_t chromatix_lite_isp_data;
+
+    uint8_t is_chromatix_lite_pp_valid;
+    cam_chromatix_lite_pp_t chromatix_lite_pp_data;
+
+    uint8_t is_chromatix_lite_ae_stats_valid;
+    cam_chromatix_lite_ae_stats_t chromatix_lite_ae_stats_data;
+
+    uint8_t is_chromatix_lite_awb_stats_valid;
+    cam_chromatix_lite_awb_stats_t chromatix_lite_awb_stats_data;
+
+    uint8_t is_chromatix_lite_af_stats_valid;
+    cam_chromatix_lite_af_stats_t chromatix_lite_af_stats_data;
+
+    uint8_t is_dcrf_result_valid;
+    cam_dcrf_result_t dcrf_result;
+
+    /* Dynamic feature enablement from postprocessing modules */
+    uint8_t is_dyn_img_data_valid;
+    cam_dyn_img_data_t dyn_img_data;
+
+} cam_metadata_info_t;
+
+typedef enum {
+    CAM_INTF_PARM_HAL_VERSION = 0x1,
+
+    /* Overall mode of 3A control routines. We need to have this parameter
+     * because not all android.control.* have an OFF option, for example,
+     * AE_FPS_Range, aePrecaptureTrigger */
+    CAM_INTF_META_MODE,
+    /* Whether AE is currently updating the sensor exposure and sensitivity
+     * fields */
+    CAM_INTF_META_AEC_MODE,
+    CAM_INTF_PARM_WHITE_BALANCE,
+    CAM_INTF_PARM_FOCUS_MODE,
+
+    /* common between HAL1 and HAL3 */
+    CAM_INTF_PARM_ANTIBANDING,
+    CAM_INTF_PARM_EXPOSURE_COMPENSATION,
+    CAM_INTF_PARM_EV_STEP,
+    CAM_INTF_PARM_AEC_LOCK,
+    CAM_INTF_PARM_FPS_RANGE, /* 10 */
+    CAM_INTF_PARM_AWB_LOCK,
+    CAM_INTF_PARM_EFFECT,
+    CAM_INTF_PARM_BESTSHOT_MODE,
+    CAM_INTF_PARM_DIS_ENABLE,
+    CAM_INTF_PARM_LED_MODE,
+    CAM_INTF_META_HISTOGRAM,
+    CAM_INTF_META_FACE_DETECTION,
+    /* Whether optical image stabilization is enabled. */
+    CAM_INTF_META_LENS_OPT_STAB_MODE,
+    /* specific to HAl1 */
+    CAM_INTF_META_AUTOFOCUS_DATA,
+    CAM_INTF_PARM_QUERY_FLASH4SNAP, /* 20 */
+    CAM_INTF_PARM_EXPOSURE,
+    CAM_INTF_PARM_SHARPNESS,
+    CAM_INTF_PARM_CONTRAST,
+    CAM_INTF_PARM_SATURATION,
+    CAM_INTF_PARM_BRIGHTNESS,
+    CAM_INTF_PARM_ISO,
+    CAM_INTF_PARM_ZOOM,
+    CAM_INTF_PARM_ROLLOFF,
+    CAM_INTF_PARM_MODE,             /* camera mode */
+    CAM_INTF_PARM_AEC_ALGO_TYPE, /* 30 */ /* auto exposure algorithm */
+    CAM_INTF_PARM_FOCUS_ALGO_TYPE,  /* focus algorithm */
+    CAM_INTF_PARM_AEC_ROI,
+    CAM_INTF_PARM_AF_ROI,
+    CAM_INTF_PARM_SCE_FACTOR,
+    CAM_INTF_PARM_FD,
+    CAM_INTF_PARM_MCE,
+    CAM_INTF_PARM_HFR,
+    CAM_INTF_PARM_REDEYE_REDUCTION,
+    CAM_INTF_PARM_WAVELET_DENOISE,
+    CAM_INTF_PARM_TEMPORAL_DENOISE, /* 40 */
+    CAM_INTF_PARM_HISTOGRAM,
+    CAM_INTF_PARM_ASD_ENABLE,
+    CAM_INTF_PARM_RECORDING_HINT,
+    CAM_INTF_PARM_HDR,
+    CAM_INTF_PARM_MAX_DIMENSION,
+    CAM_INTF_PARM_RAW_DIMENSION,
+    CAM_INTF_PARM_FRAMESKIP,
+    CAM_INTF_PARM_ZSL_MODE,  /* indicating if it's running in ZSL mode */
+    CAM_INTF_PARM_BURST_NUM,
+    CAM_INTF_PARM_RETRO_BURST_NUM, /* 50 */
+    CAM_INTF_PARM_BURST_LED_ON_PERIOD,
+    CAM_INTF_PARM_HDR_NEED_1X, /* if HDR needs 1x output */
+    CAM_INTF_PARM_LOCK_CAF,
+    CAM_INTF_PARM_VIDEO_HDR,
+    CAM_INTF_PARM_SENSOR_HDR,
+    CAM_INTF_PARM_ROTATION,
+    CAM_INTF_PARM_SCALE,
+    CAM_INTF_PARM_VT, /* indicating if it's a Video Call Apllication */
+    CAM_INTF_META_CROP_DATA,
+    CAM_INTF_META_PREP_SNAPSHOT_DONE, /* 60 */
+    CAM_INTF_META_GOOD_FRAME_IDX_RANGE,
+    CAM_INTF_META_ASD_HDR_SCENE_DATA,
+    CAM_INTF_META_ASD_SCENE_INFO,
+    CAM_INTF_META_CURRENT_SCENE,
+    CAM_INTF_META_AEC_INFO,
+    CAM_INTF_META_SENSOR_INFO,
+    CAM_INTF_META_CHROMATIX_LITE_ISP,
+    CAM_INTF_META_CHROMATIX_LITE_PP,
+    CAM_INTF_META_CHROMATIX_LITE_AE,
+    CAM_INTF_META_CHROMATIX_LITE_AWB, /* 70 */
+    CAM_INTF_META_CHROMATIX_LITE_AF,
+    CAM_INTF_META_CHROMATIX_LITE_ASD,
+    CAM_INTF_META_EXIF_DEBUG_AE,
+    CAM_INTF_META_EXIF_DEBUG_AWB,
+    CAM_INTF_META_EXIF_DEBUG_AF,
+    CAM_INTF_META_EXIF_DEBUG_ASD,
+    CAM_INTF_META_EXIF_DEBUG_STATS,
+    CAM_INTF_META_EXIF_DEBUG_BESTATS,
+    CAM_INTF_META_EXIF_DEBUG_BHIST,
+    CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
+    CAM_INTF_PARM_GET_CHROMATIX,
+    CAM_INTF_PARM_SET_RELOAD_CHROMATIX,
+    CAM_INTF_PARM_SET_AUTOFOCUSTUNING, /* 80 */
+    CAM_INTF_PARM_GET_AFTUNE,
+    CAM_INTF_PARM_SET_RELOAD_AFTUNE,
+    CAM_INTF_PARM_SET_VFE_COMMAND,
+    CAM_INTF_PARM_SET_PP_COMMAND,
+    CAM_INTF_PARM_TINTLESS,
+    CAM_INTF_PARM_LONGSHOT_ENABLE,
+    CAM_INTF_PARM_RDI_MODE,
+    CAM_INTF_PARM_CDS_MODE,
+    CAM_INTF_PARM_TONE_MAP_MODE,
+    CAM_INTF_PARM_CAPTURE_FRAME_CONFIG, /* 90 */
+    CAM_INTF_PARM_DUAL_LED_CALIBRATION,
+    CAM_INTF_PARM_ADV_CAPTURE_MODE,
+
+    /* stream based parameters */
+    CAM_INTF_PARM_DO_REPROCESS,
+    CAM_INTF_PARM_SET_BUNDLE,
+    CAM_INTF_PARM_STREAM_FLIP,
+    CAM_INTF_PARM_GET_OUTPUT_CROP,
+
+    CAM_INTF_PARM_EZTUNE_CMD,
+    CAM_INTF_PARM_INT_EVT,
+
+    /* specific to HAL3 */
+    /* Whether the metadata maps to a valid frame number */
+    CAM_INTF_META_FRAME_NUMBER_VALID,
+    /* Whether the urgent metadata maps to a valid frame number */
+    CAM_INTF_META_URGENT_FRAME_NUMBER_VALID,
+    /* Whether the stream buffer corresponding this frame is dropped or not */
+    CAM_INTF_META_FRAME_DROPPED, /* 100 */
+    /* COLOR CORRECTION.*/
+    CAM_INTF_META_COLOR_CORRECT_MODE,
+    /* A transform matrix to chromatically adapt pixels in the CIE XYZ (1931)
+     * color space from the scene illuminant to the sRGB-standard D65-illuminant. */
+    CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
+    /*Color channel gains in the Bayer raw domain in the order [RGeGoB]*/
+    CAM_INTF_META_COLOR_CORRECT_GAINS,
+    /*The best fit color transform matrix calculated by the stats*/
+    CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM,
+    /*The best fit color channels gains calculated by the stats*/
+    CAM_INTF_META_PRED_COLOR_CORRECT_GAINS,
+    /* CONTROL */
+    /* A frame counter set by the framework. Must be maintained unchanged in
+     * output frame. */
+    CAM_INTF_META_FRAME_NUMBER,
+    /* A frame counter set by the framework. Must be maintained unchanged in
+     * output frame. */
+    CAM_INTF_META_URGENT_FRAME_NUMBER,
+    /*Number of streams and size of streams in current configuration*/
+    CAM_INTF_META_STREAM_INFO,
+    /* List of areas to use for metering */
+    CAM_INTF_META_AEC_ROI,
+    /* Whether the HAL must trigger precapture metering.*/
+    CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, /* 110 */
+    /* The ID sent with the latest CAMERA2_TRIGGER_PRECAPTURE_METERING call */
+    /* Current state of AE algorithm */
+    CAM_INTF_META_AEC_STATE,
+    /* List of areas to use for focus estimation */
+    CAM_INTF_META_AF_ROI,
+    /* Whether the HAL must trigger autofocus. */
+    CAM_INTF_META_AF_TRIGGER,
+    /* Current state of AF algorithm */
+    CAM_INTF_META_AF_STATE,
+    /* List of areas to use for illuminant estimation */
+    CAM_INTF_META_AWB_REGIONS,
+    /* Current state of AWB algorithm */
+    CAM_INTF_META_AWB_STATE,
+    /*Whether black level compensation is frozen or free to vary*/
+    CAM_INTF_META_BLACK_LEVEL_LOCK,
+    /* Information to 3A routines about the purpose of this capture, to help
+     * decide optimal 3A strategy */
+    CAM_INTF_META_CAPTURE_INTENT,
+    /* DEMOSAIC */
+    /* Controls the quality of the demosaicing processing */
+    CAM_INTF_META_DEMOSAIC,
+    /* EDGE */
+    /* Operation mode for edge enhancement */
+    CAM_INTF_META_EDGE_MODE, /* 120 */
+    /* Control the amount of edge enhancement applied to the images.*/
+    /* 1-10; 10 is maximum sharpening */
+    CAM_INTF_META_SHARPNESS_STRENGTH,
+    /* FLASH */
+    /* Power for flash firing/torch, 10 is max power; 0 is no flash. Linear */
+    CAM_INTF_META_FLASH_POWER,
+    /* Firing time of flash relative to start of exposure, in nanoseconds*/
+    CAM_INTF_META_FLASH_FIRING_TIME,
+    /* Current state of the flash unit */
+    CAM_INTF_META_FLASH_STATE,
+    /* GEOMETRIC */
+    /* Operating mode of geometric correction */
+    CAM_INTF_META_GEOMETRIC_MODE,
+    /* Control the amount of shading correction applied to the images */
+    CAM_INTF_META_GEOMETRIC_STRENGTH,
+    /* HOT PIXEL */
+    /* Set operational mode for hot pixel correction */
+    CAM_INTF_META_HOTPIXEL_MODE,
+    /* LENS */
+    /* Size of the lens aperture */
+    CAM_INTF_META_LENS_APERTURE,
+    /* State of lens neutral density filter(s) */
+    CAM_INTF_META_LENS_FILTERDENSITY,
+    /* Lens optical zoom setting */
+    CAM_INTF_META_LENS_FOCAL_LENGTH, /* 130 */
+    /* Distance to plane of sharpest focus, measured from frontmost surface
+     * of the lens */
+    CAM_INTF_META_LENS_FOCUS_DISTANCE,
+    /* The range of scene distances that are in sharp focus (depth of field) */
+    CAM_INTF_META_LENS_FOCUS_RANGE,
+    /*Whether the hal needs to output the lens shading map*/
+    CAM_INTF_META_LENS_SHADING_MAP_MODE,
+    /* Current lens status */
+    CAM_INTF_META_LENS_STATE,
+    /* NOISE REDUCTION */
+    /* Mode of operation for the noise reduction algorithm */
+    CAM_INTF_META_NOISE_REDUCTION_MODE,
+   /* Control the amount of noise reduction applied to the images.
+    * 1-10; 10 is max noise reduction */
+    CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
+    /* SCALER */
+    /* Top-left corner and width of the output region to select from the active
+     * pixel array */
+    CAM_INTF_META_SCALER_CROP_REGION,
+    /* The estimated scene illumination lighting frequency */
+    CAM_INTF_META_SCENE_FLICKER,
+    /* SENSOR */
+    /* Duration each pixel is exposed to light, in nanoseconds */
+    CAM_INTF_META_SENSOR_EXPOSURE_TIME,
+    /* Duration from start of frame exposure to start of next frame exposure,
+     * in nanoseconds */
+    CAM_INTF_META_SENSOR_FRAME_DURATION, /* 140 */
+    /* Gain applied to image data. Must be implemented through analog gain only
+     * if set to values below 'maximum analog sensitivity'. */
+    CAM_INTF_META_SENSOR_SENSITIVITY,
+    /* Time at start of exposure of first row */
+    CAM_INTF_META_SENSOR_TIMESTAMP,
+    /* Duration b/w start of first row exposure and the start of last
+       row exposure in nanoseconds */
+    CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW,
+    /* SHADING */
+    /* Quality of lens shading correction applied to the image data */
+    CAM_INTF_META_SHADING_MODE,
+    /* Control the amount of shading correction applied to the images.
+     * unitless: 1-10; 10 is full shading compensation */
+    CAM_INTF_META_SHADING_STRENGTH,
+    /* STATISTICS */
+    /* State of the face detector unit */
+    CAM_INTF_META_STATS_FACEDETECT_MODE,
+    /* Operating mode for histogram generation */
+    CAM_INTF_META_STATS_HISTOGRAM_MODE,
+    /* Operating mode for sharpness map generation */
+    CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
+    /* A 3-channel sharpness map, based on the raw sensor data,
+     * If only a monochrome sharpness map is supported, all channels
+     * should have the same data
+     */
+    CAM_INTF_META_STATS_SHARPNESS_MAP,
+
+    /* TONEMAP */
+    /* Tone map mode */
+    CAM_INTF_META_TONEMAP_MODE, /* 150 */
+    /* Table mapping RGB input values to output values */
+    CAM_INTF_META_TONEMAP_CURVES,
+
+    CAM_INTF_META_FLASH_MODE,
+    /* 2D array of gain factors for each color channel that was used to
+     * compensate for lens shading for this frame */
+    CAM_INTF_META_LENS_SHADING_MAP,
+    CAM_INTF_META_PRIVATE_DATA,
+    CAM_INTF_PARM_STATS_DEBUG_MASK,
+    CAM_INTF_PARM_STATS_AF_PAAF,
+    /* Indicates streams ID of all the requested buffers */
+    CAM_INTF_META_STREAM_ID,
+    CAM_INTF_PARM_FOCUS_BRACKETING,
+    CAM_INTF_PARM_FLASH_BRACKETING,
+    CAM_INTF_PARM_GET_IMG_PROP, /* 160 */
+    CAM_INTF_META_JPEG_GPS_COORDINATES,
+    CAM_INTF_META_JPEG_GPS_PROC_METHODS,
+    CAM_INTF_META_JPEG_GPS_TIMESTAMP,
+    CAM_INTF_META_JPEG_ORIENTATION,
+    CAM_INTF_META_JPEG_QUALITY,
+    CAM_INTF_META_JPEG_THUMB_QUALITY,
+    CAM_INTF_META_JPEG_THUMB_SIZE,
+
+    CAM_INTF_META_TEST_PATTERN_DATA,
+    /* DNG file support */
+    CAM_INTF_META_PROFILE_TONE_CURVE,
+    CAM_INTF_META_NEUTRAL_COL_POINT, /* 170 */
+
+    /* CAC */
+    CAM_INTF_META_CAC_INFO,
+    CAM_INTF_PARM_CAC,
+    CAM_INTF_META_IMG_HYST_INFO,
+
+    /* trigger for all modules to read the debug/log level properties */
+    CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
+
+    /* OTP : WB gr/gb */
+    CAM_INTF_META_OTP_WB_GRGB,
+    /* LED override for EZTUNE */
+    CAM_INTF_META_LED_MODE_OVERRIDE,
+    /* auto lens position info */
+    CAM_INTF_META_FOCUS_POSITION,
+    /* Manual exposure time */
+    CAM_INTF_PARM_EXPOSURE_TIME,
+    /* AWB meta data info */
+    CAM_INTF_META_AWB_INFO,
+    /* Manual lens position info */
+    CAM_INTF_PARM_MANUAL_FOCUS_POS, /* 180 */
+    /* Manual White balance gains */
+    CAM_INTF_PARM_WB_MANUAL,
+    /* Offline Data Overwrite */
+    CAM_INTF_PARM_HW_DATA_OVERWRITE,
+    /* IMG LIB reprocess debug section */
+    CAM_INTF_META_IMGLIB, /* cam_intf_meta_imglib_t */
+    /* OEM specific parameters */
+    CAM_INTF_PARM_CUSTOM,
+    /* parameters added for related cameras */
+    /* fetch calibration info for related cam subsystem */
+    CAM_INTF_PARM_RELATED_SENSORS_CALIBRATION,
+    /* focal length ratio info */
+    CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
+    /* crop for binning & FOV adjust */
+    CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
+    /* crop for trimming edge pixels */
+    CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
+    /* crop for FOV adjust and zoom */
+    CAM_INTF_META_SNAP_CROP_INFO_ISP,
+    /* crop for image-stabilization and zoom */
+    CAM_INTF_META_SNAP_CROP_INFO_CPP, /* 190 */
+    /* parameter for enabling DCRF */
+    CAM_INTF_PARM_DCRF,
+    /* metadata tag for DCRF info */
+    CAM_INTF_META_DCRF,
+    /* FLIP mode parameter*/
+    CAM_INTF_PARM_FLIP,
+    /*Frame divert info from ISP*/
+    CAM_INTF_BUF_DIVERT_INFO,
+    /* Use AV timer */
+    CAM_INTF_META_USE_AV_TIMER,
+    CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
+    /* Special event to request stream frames*/
+    CAM_INTF_PARM_REQUEST_FRAMES,
+    /* Special event to request operational mode*/
+    CAM_INTF_PARM_REQUEST_OPS_MODE,
+    /*Black level parameters*/
+    CAM_INTF_META_LDAF_EXIF,
+    CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN,
+    CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, /* 200 */
+    CAM_INTF_META_CDS_DATA,
+    /*3A low light level information*/
+    CAM_INTF_META_LOW_LIGHT,
+    /* dynamic feature detection */
+    CAM_INTF_META_IMG_DYN_FEAT, /* 200 */
+    /*Parameter entry to communicate manual
+    capture type*/
+    CAM_INTF_PARM_MANUAL_CAPTURE_TYPE,
+    /*AF state change detected by AF module*/
+    CAM_INTF_AF_STATE_TRANSITION,
+    /* face recognition */
+    CAM_INTF_META_FACE_RECOG,
+    /* face blink detection */
+    CAM_INTF_META_FACE_BLINK,
+    /* face gaze detection */
+    CAM_INTF_META_FACE_GAZE,
+    /* face smile detection */
+    CAM_INTF_META_FACE_SMILE,
+    /* face landmark detection */
+    CAM_INTF_META_FACE_LANDMARK, /* 210 */
+    /* face contour detection */
+    CAM_INTF_META_FACE_CONTOUR,
+    /* Whether EIS is enabled */
+    CAM_INTF_META_VIDEO_STAB_MODE,
+    /* Touch exposure compensation (EV) status */
+    CAM_INTF_META_TOUCH_AE_RESULT,
+    /* Param for updating initial exposure index value*/
+    CAM_INTF_PARM_INITIAL_EXPOSURE_INDEX,
+    /* Gain applied post raw captrue.
+       ISP digital gain */
+    CAM_INTF_META_ISP_SENSITIVITY,
+    /* Param for enabling instant aec*/
+    CAM_INTF_PARM_INSTANT_AEC,
+    /* Param for tracking previous reprocessing activity */
+    CAM_INTF_META_REPROCESS_FLAGS,
+    /* Param of cropping information for JPEG encoder */
+    CAM_INTF_PARM_JPEG_ENCODE_CROP,
+    CAM_INTF_PARM_MAX
+} cam_intf_parm_type_t;
+
+typedef struct {
+    uint32_t forced;
+    union {
+      uint32_t force_linecount_value;
+      float    force_gain_value;
+      float    force_snap_exp_value;
+      float    force_exp_value;
+      uint32_t force_snap_linecount_value;
+      float    force_snap_gain_value;
+    } u;
+} cam_ez_force_params_t;
+
+typedef struct {
+    float cam_black_level[4];
+} cam_black_level_metadata_t;
+
+typedef enum {
+    CAM_EZTUNE_CMD_STATUS,
+    CAM_EZTUNE_CMD_AEC_ENABLE,
+    CAM_EZTUNE_CMD_AWB_ENABLE,
+    CAM_EZTUNE_CMD_AF_ENABLE,
+    CAM_EZTUNE_CMD_AEC_FORCE_LINECOUNT,
+    CAM_EZTUNE_CMD_AEC_FORCE_GAIN,
+    CAM_EZTUNE_CMD_AEC_FORCE_EXP,
+    CAM_EZTUNE_CMD_AEC_FORCE_SNAP_LC,
+    CAM_EZTUNE_CMD_AEC_FORCE_SNAP_GAIN,
+    CAM_EZTUNE_CMD_AEC_FORCE_SNAP_EXP,
+    CAM_EZTUNE_CMD_AWB_MODE,
+    CAM_EZTUNE_CMD_AWB_FORCE_DUAL_LED_IDX,
+} cam_eztune_cmd_type_t;
+
+typedef struct {
+  cam_eztune_cmd_type_t   cmd;
+  union {
+    int32_t running;
+    int32_t aec_enable;
+    int32_t awb_enable;
+    int32_t af_enable;
+    cam_ez_force_params_t ez_force_param;
+    int32_t awb_mode;
+    int32_t ez_force_dual_led_idx;
+  } u;
+} cam_eztune_cmd_data_t;
+
+
+/*****************************************************************************
+ *                 Code for HAL3 data types                                  *
+ ****************************************************************************/
+typedef enum {
+    CAM_INTF_METADATA_MAX
+} cam_intf_metadata_type_t;
+
+typedef enum {
+    CAM_INTENT_CUSTOM,
+    CAM_INTENT_PREVIEW,
+    CAM_INTENT_STILL_CAPTURE,
+    CAM_INTENT_VIDEO_RECORD,
+    CAM_INTENT_VIDEO_SNAPSHOT,
+    CAM_INTENT_ZERO_SHUTTER_LAG,
+    CAM_INTENT_MAX,
+} cam_intent_t;
+
+typedef enum {
+    /* Full application control of pipeline. All 3A routines are disabled,
+     * no other settings in android.control.* have any effect */
+    CAM_CONTROL_OFF,
+    /* Use settings for each individual 3A routine. Manual control of capture
+     * parameters is disabled. All controls in android.control.* besides sceneMode
+     * take effect */
+    CAM_CONTROL_AUTO,
+    /* Use specific scene mode. Enabling this disables control.aeMode,
+     * control.awbMode and control.afMode controls; the HAL must ignore those
+     * settings while USE_SCENE_MODE is active (except for FACE_PRIORITY scene mode).
+     * Other control entries are still active. This setting can only be used if
+     * availableSceneModes != UNSUPPORTED. TODO: Should we remove this and handle this
+     * in HAL ?*/
+    CAM_CONTROL_USE_SCENE_MODE,
+    CAM_CONTROL_MAX
+} cam_control_mode_t;
+
+typedef enum {
+    /* Use the android.colorCorrection.transform matrix to do color conversion */
+    CAM_COLOR_CORRECTION_TRANSFORM_MATRIX,
+    /* Must not slow down frame rate relative to raw bayer output */
+    CAM_COLOR_CORRECTION_FAST,
+    /* Frame rate may be reduced by high quality */
+    CAM_COLOR_CORRECTION_HIGH_QUALITY,
+} cam_color_correct_mode_t;
+
+typedef enum {
+    CAM_COLOR_CORRECTION_ABERRATION_OFF,
+    CAM_COLOR_CORRECTION_ABERRATION_FAST,
+    CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY,
+    CAM_COLOR_CORRECTION_ABERRATION_MAX
+} cam_aberration_mode_t;
+
+#define CC_MATRIX_ROWS 3
+#define CC_MATRIX_COLS 3
+
+typedef struct {
+    /* 3x3 float matrix in row-major order. each element is in range of (0, 1) */
+    cam_rational_type_t transform_matrix[CC_MATRIX_ROWS][CC_MATRIX_COLS];
+} cam_color_correct_matrix_t;
+
+#define CAM_FOCAL_LENGTHS_MAX     1
+#define CAM_APERTURES_MAX         1
+#define CAM_FILTER_DENSITIES_MAX  1
+#define CAM_MAX_MAP_HEIGHT        6
+#define CAM_MAX_MAP_WIDTH         6
+#define CAM_MAX_SHADING_MAP_WIDTH 17
+#define CAM_MAX_SHADING_MAP_HEIGHT 13
+#define CAM_MAX_TONEMAP_CURVE_SIZE    512
+#define CAM_MAX_FLASH_BRACKETING    5
+
+typedef struct {
+    /* A 1D array of pairs of floats.
+     * Mapping a 0-1 input range to a 0-1 output range.
+     * The input range must be monotonically increasing with N,
+     * and values between entries should be linearly interpolated.
+     * For example, if the array is: [0.0, 0.0, 0.3, 0.5, 1.0, 1.0],
+     * then the input->output mapping for a few sample points would be:
+     * 0 -> 0, 0.15 -> 0.25, 0.3 -> 0.5, 0.5 -> 0.64 */
+    float tonemap_points[CAM_MAX_TONEMAP_CURVE_SIZE][2];
+} cam_tonemap_curve_t;
+
+typedef struct {
+   size_t tonemap_points_cnt;
+   cam_tonemap_curve_t curves[3];
+} cam_rgb_tonemap_curves;
+
+typedef struct {
+   size_t tonemap_points_cnt;
+   cam_tonemap_curve_t curve;
+} cam_profile_tone_curve;
+
+#define NEUTRAL_COL_POINTS 3
+
+typedef struct {
+    cam_rational_type_t neutral_col_point[NEUTRAL_COL_POINTS];
+} cam_neutral_col_point_t;
+
+typedef enum {
+    OFF,
+    FAST,
+    QUALITY,
+} cam_quality_preference_t;
+
+typedef enum {
+    CAM_FLASH_CTRL_OFF,
+    CAM_FLASH_CTRL_SINGLE,
+    CAM_FLASH_CTRL_TORCH
+} cam_flash_ctrl_t;
+
+typedef struct {
+    uint8_t ae_mode;
+    uint8_t awb_mode;
+    uint8_t af_mode;
+} cam_scene_mode_overrides_t;
+
+typedef struct {
+    int32_t left;
+    int32_t top;
+    int32_t width;
+    int32_t height;
+} cam_crop_region_t;
+
+typedef struct {
+    /* Estimated sharpness for each region of the input image.
+     * Normalized to be between 0 and maxSharpnessMapValue.
+     * Higher values mean sharper (better focused) */
+    int32_t sharpness[CAM_MAX_MAP_WIDTH][CAM_MAX_MAP_HEIGHT];
+} cam_sharpness_map_t;
+
+typedef struct {
+   float lens_shading[4*CAM_MAX_SHADING_MAP_HEIGHT*CAM_MAX_SHADING_MAP_WIDTH];
+} cam_lens_shading_map_t;
+
+typedef struct {
+    int32_t min_value;
+    int32_t max_value;
+    int32_t def_value;
+    int32_t step;
+} cam_control_range_t;
+
+#define CAM_QCOM_FEATURE_NONE            (cam_feature_mask_t)0UL
+#define CAM_QCOM_FEATURE_FACE_DETECTION ((cam_feature_mask_t)1UL<<0)
+#define CAM_QCOM_FEATURE_DENOISE2D      ((cam_feature_mask_t)1UL<<1)
+#define CAM_QCOM_FEATURE_CROP           ((cam_feature_mask_t)1UL<<2)
+#define CAM_QCOM_FEATURE_ROTATION       ((cam_feature_mask_t)1UL<<3)
+#define CAM_QCOM_FEATURE_FLIP           ((cam_feature_mask_t)1UL<<4)
+#define CAM_QCOM_FEATURE_HDR            ((cam_feature_mask_t)1UL<<5)
+#define CAM_QCOM_FEATURE_REGISTER_FACE  ((cam_feature_mask_t)1UL<<6)
+#define CAM_QCOM_FEATURE_SHARPNESS      ((cam_feature_mask_t)1UL<<7)
+#define CAM_QCOM_FEATURE_VIDEO_HDR      ((cam_feature_mask_t)1UL<<8)
+#define CAM_QCOM_FEATURE_CAC            ((cam_feature_mask_t)1UL<<9)
+#define CAM_QCOM_FEATURE_SCALE          ((cam_feature_mask_t)1UL<<10)
+#define CAM_QCOM_FEATURE_EFFECT         ((cam_feature_mask_t)1UL<<11)
+#define CAM_QCOM_FEATURE_UBIFOCUS       ((cam_feature_mask_t)1UL<<12)
+#define CAM_QCOM_FEATURE_CHROMA_FLASH   ((cam_feature_mask_t)1UL<<13)
+#define CAM_QCOM_FEATURE_OPTIZOOM       ((cam_feature_mask_t)1UL<<14)
+#define CAM_QCOM_FEATURE_SENSOR_HDR     ((cam_feature_mask_t)1UL<<15)
+#define CAM_QCOM_FEATURE_REFOCUS        ((cam_feature_mask_t)1UL<<16)
+#define CAM_QCOM_FEATURE_CPP_TNR        ((cam_feature_mask_t)1UL<<17)
+#define CAM_QCOM_FEATURE_RAW_PROCESSING ((cam_feature_mask_t)1UL<<18)
+#define CAM_QCOM_FEATURE_TRUEPORTRAIT   ((cam_feature_mask_t)1UL<<19)
+#define CAM_QCOM_FEATURE_LLVD           ((cam_feature_mask_t)1UL<<20)
+#define CAM_QCOM_FEATURE_DIS20          ((cam_feature_mask_t)1UL<<21)
+#define CAM_QCOM_FEATURE_STILLMORE      ((cam_feature_mask_t)1UL<<22)
+#define CAM_QCOM_FEATURE_DCRF           ((cam_feature_mask_t)1UL<<23)
+#define CAM_QCOM_FEATURE_CDS            ((cam_feature_mask_t)1UL<<24)
+#define CAM_QCOM_FEATURE_EZTUNE         ((cam_feature_mask_t)1UL<<25)
+#define CAM_QCOM_FEATURE_DSDN           ((cam_feature_mask_t)1UL<<26) //Special CDS in CPP block
+#define CAM_QCOM_FEATURE_SW2D           ((cam_feature_mask_t)1UL<<27)
+#define CAM_OEM_FEATURE_1               ((cam_feature_mask_t)1UL<<28)
+#define CAM_OEM_FEATURE_2               ((cam_feature_mask_t)1UL<<29)
+#define CAM_QTI_FEATURE_SW_TNR          ((cam_feature_mask_t)1UL<<30)
+#define CAM_QCOM_FEATURE_METADATA_PROCESSING ((cam_feature_mask_t)1UL<<31)
+#define CAM_QCOM_FEATURE_PAAF           (((cam_feature_mask_t)1UL)<<32)
+#define CAM_QCOM_FEATURE_PP_SUPERSET    (CAM_QCOM_FEATURE_DENOISE2D|CAM_QCOM_FEATURE_CROP|\
+                                         CAM_QCOM_FEATURE_ROTATION|CAM_QCOM_FEATURE_SHARPNESS|\
+                                         CAM_QCOM_FEATURE_SCALE|CAM_QCOM_FEATURE_CAC|\
+                                         CAM_QCOM_FEATURE_EZTUNE|CAM_QCOM_FEATURE_CPP_TNR|\
+                                         CAM_QCOM_FEATURE_LLVD)
+
+#define CAM_QCOM_FEATURE_PP_PASS_1      CAM_QCOM_FEATURE_PP_SUPERSET
+#define CAM_QCOM_FEATURE_PP_PASS_2      CAM_QCOM_FEATURE_SCALE | CAM_QCOM_FEATURE_CROP;
+
+// Counter clock wise
+typedef enum {
+    ROTATE_0 = 1<<0,
+    ROTATE_90 = 1<<1,
+    ROTATE_180 = 1<<2,
+    ROTATE_270 = 1<<3,
+} cam_rotation_t;
+
+typedef struct {
+   cam_rotation_t rotation;         /* jpeg rotation */
+   cam_rotation_t device_rotation;  /* device rotation */
+   uint32_t streamId;
+} cam_rotation_info_t;
+
+typedef enum {
+    FLIP_NONE = 0, /* 00b */
+    FLIP_H = 1,    /* 01b */
+    FLIP_V = 2,    /* 10b */
+    FLIP_V_H = 3,  /* 11b */
+} cam_flip_t;
+
+typedef struct {
+    uint32_t bundle_id;                            /* bundle id */
+    uint8_t num_of_streams;                        /* number of streams in the bundle */
+    uint32_t stream_ids[MAX_STREAM_NUM_IN_BUNDLE]; /* array of stream ids to be bundled */
+} cam_bundle_config_t;
+
+typedef enum {
+    CAM_ONLINE_REPROCESS_TYPE,    /* online reprocess, frames from running streams */
+    CAM_OFFLINE_REPROCESS_TYPE,   /* offline reprocess, frames from external source */
+} cam_reprocess_type_enum_t;
+
+typedef struct {
+    uint8_t burst_count;
+    uint8_t min_burst_count;
+    uint8_t max_burst_count;
+} cam_still_more_t;
+
+typedef struct {
+    uint8_t burst_count;
+    uint8_t output_count;
+    uint8_t flash_bracketing[CAM_MAX_FLASH_BRACKETING];
+    uint8_t metadata_index;
+} cam_chroma_flash_t;
+
+typedef enum {
+    CAM_HDR_MODE_SINGLEFRAME,    /* Single frame HDR mode which does only tone mapping */
+    CAM_HDR_MODE_MULTIFRAME,     /* Multi frame HDR mode which needs two frames with 0.5x and 2x exposure respectively */
+} cam_hdr_mode_enum_t;
+
+typedef struct {
+    uint32_t hdr_enable;
+    uint32_t hdr_need_1x; /* when CAM_QCOM_FEATURE_HDR enabled, indicate if 1x is needed for output */
+    cam_hdr_mode_enum_t hdr_mode;
+} cam_hdr_param_t;
+
+typedef struct {
+    int32_t output_width;
+    int32_t output_height;
+} cam_scale_param_t;
+
+typedef struct {
+    uint8_t enable;
+    uint8_t burst_count;
+    uint8_t focus_steps[MAX_AF_BRACKETING_VALUES];
+    uint8_t output_count;
+    uint32_t meta_max_size;
+} cam_af_bracketing_t;
+
+typedef struct {
+    uint8_t enable;
+    uint8_t burst_count;
+} cam_flash_bracketing_t;
+
+typedef struct {
+    uint8_t enable;
+    uint8_t burst_count;
+    uint8_t zoom_threshold;
+} cam_opti_zoom_t;
+
+typedef struct {
+    size_t meta_max_size;
+} cam_true_portrait_t;
+
+typedef enum {
+    CAM_FLASH_OFF,
+    CAM_FLASH_ON
+} cam_flash_value_t;
+
+typedef struct {
+    cam_sensor_t sens_type;
+    cam_format_t native_format;
+} cam_sensor_type_t;
+
+typedef struct {
+    uint32_t result;
+    uint32_t header_size;
+    uint32_t width;
+    uint32_t height;
+    uint8_t data[0];
+} cam_misc_buf_t;
+
+typedef struct {
+    uint32_t misc_buffer_index;
+} cam_misc_buf_param_t;
+
+typedef struct {
+    /* reprocess feature mask */
+    cam_feature_mask_t feature_mask;
+
+    /* individual setting for features to be reprocessed */
+    cam_denoise_param_t denoise2d;
+    cam_rect_t input_crop;
+    cam_rotation_t rotation;
+    uint32_t flip;
+    int32_t sharpness;
+    int32_t effect;
+    cam_hdr_param_t hdr_param;
+    cam_scale_param_t scale_param;
+
+    uint8_t zoom_level;
+    cam_flash_value_t flash_value;
+    cam_misc_buf_param_t misc_buf_param;
+    uint32_t burst_cnt;
+    uint8_t cur_reproc_count;
+    uint8_t total_reproc_count;
+} cam_pp_feature_config_t;
+
+typedef struct {
+    uint32_t input_stream_id;
+    /* input source stream type */
+    cam_stream_type_t input_stream_type;
+} cam_pp_online_src_config_t;
+
+typedef struct {
+    /* image format */
+    cam_format_t input_fmt;
+
+    /* image dimension */
+    cam_dimension_t input_dim;
+
+    /* buffer plane information, will be calc based on stream_type, fmt,
+       dim, and padding_info(from stream config). Info including:
+       offset_x, offset_y, stride, scanline, plane offset */
+    cam_stream_buf_plane_info_t input_buf_planes;
+
+    /* number of input reprocess buffers */
+    uint8_t num_of_bufs;
+
+    /* input source type */
+    cam_stream_type_t input_type;
+
+} cam_pp_offline_src_config_t;
+
+/* reprocess stream input configuration */
+typedef struct {
+    /* input source config */
+    cam_reprocess_type_enum_t pp_type;
+    union {
+        cam_pp_online_src_config_t online;
+        cam_pp_offline_src_config_t offline;
+    };
+
+    /* pp feature config */
+    cam_pp_feature_config_t pp_feature_config;
+} cam_stream_reproc_config_t;
+
+typedef struct {
+    uint8_t crop_enabled;
+    cam_rect_t input_crop;
+} cam_crop_param_t;
+
+typedef struct {
+    uint8_t trigger;
+    int32_t trigger_id;
+} cam_trigger_t;
+
+typedef struct {
+    cam_denoise_param_t denoise2d;
+    cam_crop_param_t crop;
+    uint32_t flip;     /* 0 means no flip */
+    uint32_t uv_upsample; /* 0 means no chroma upsampling */
+    int32_t sharpness; /* 0 means no sharpness */
+    int32_t effect;
+    cam_rotation_t rotation;
+    cam_rotation_t device_rotation;
+} cam_per_frame_pp_config_t;
+
+typedef enum {
+    CAM_OPT_STAB_OFF,
+    CAM_OPT_STAB_ON,
+    CAM_OPT_STAB_MAX
+} cam_optical_stab_modes_t;
+
+typedef enum {
+    CAM_FILTER_ARRANGEMENT_RGGB,
+    CAM_FILTER_ARRANGEMENT_GRBG,
+    CAM_FILTER_ARRANGEMENT_GBRG,
+    CAM_FILTER_ARRANGEMENT_BGGR,
+
+    /* Sensor is not Bayer; output has 3 16-bit values for each pixel,
+     * instead of just 1 16-bit value per pixel.*/
+    CAM_FILTER_ARRANGEMENT_RGB,
+    /* Sensor is YUV; SW do not have access to actual RAW,
+     * output is interleaved UYVY */
+    CAM_FILTER_ARRANGEMENT_UYVY,
+    CAM_FILTER_ARRANGEMENT_YUYV,
+    CAM_FILTER_ARRANGEMENT_Y
+} cam_color_filter_arrangement_t;
+
+typedef enum {
+  CAM_AF_LENS_STATE_STATIONARY,
+  CAM_AF_LENS_STATE_MOVING,
+} cam_af_lens_state_t;
+
+typedef enum {
+    CAM_AWB_STATE_INACTIVE,
+    CAM_AWB_STATE_SEARCHING,
+    CAM_AWB_STATE_CONVERGED,
+    CAM_AWB_STATE_LOCKED
+} cam_awb_state_t;
+
+typedef enum {
+    CAM_FOCUS_UNCALIBRATED,
+    CAM_FOCUS_APPROXIMATE,
+    CAM_FOCUS_CALIBRATED
+} cam_focus_calibration_t;
+
+typedef enum {
+    CAM_TEST_PATTERN_OFF,
+    CAM_TEST_PATTERN_SOLID_COLOR,
+    CAM_TEST_PATTERN_COLOR_BARS,
+    CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY,
+    CAM_TEST_PATTERN_PN9,
+    CAM_TEST_PATTERN_CUSTOM1 = 256
+} cam_test_pattern_mode_t;
+
+typedef struct {
+    cam_test_pattern_mode_t mode;
+    int32_t r;
+    int32_t gr;
+    int32_t gb;
+    int32_t b;
+} cam_test_pattern_data_t;
+
+typedef enum {
+    CAM_AWB_D50,
+    CAM_AWB_D65,
+    CAM_AWB_D75,
+    CAM_AWB_A,
+    CAM_AWB_CUSTOM_A,
+    CAM_AWB_WARM_FLO,
+    CAM_AWB_COLD_FLO,
+    CAM_AWB_CUSTOM_FLO,
+    CAM_AWB_NOON,
+    CAM_AWB_CUSTOM_DAYLIGHT,
+    CAM_AWB_INVALID_ALL_LIGHT,
+} cam_illuminat_t;
+
+typedef enum {
+    LEGACY_RAW,
+    MIPI_RAW,
+} cam_opaque_raw_format_t;
+
+typedef enum {
+    CAM_PERF_NORMAL = 0,
+    CAM_PERF_HIGH,
+    CAM_PERF_HIGH_PERFORMANCE,
+} cam_perf_mode_t;
+
+typedef struct {
+    float real_gain;
+    float lux_idx;
+    float exp_time;
+} cam_intf_aec_t;
+
+#define CAM_INTF_AEC_DATA_MAX   (10)
+
+typedef struct {
+    uint32_t frame_count;
+    cam_intf_aec_t aec_data[CAM_INTF_AEC_DATA_MAX];
+} cam_intf_meta_imglib_input_aec_t;
+
+typedef struct {
+    cam_intf_meta_imglib_input_aec_t meta_imglib_input_aec;
+} cam_intf_meta_imglib_t;
+
+typedef struct {
+    uint8_t previewOnly;
+    uint64_t value;
+} cam_intf_parm_manual_3a_t;
+
+typedef enum {
+    CAM_MANUAL_CAPTURE_TYPE_OFF, /*Manual capture mode disabled*/
+    CAM_MANUAL_CAPTURE_TYPE_1,   /*Normal ZSL capture with limited 3A settings*/
+    CAM_MANUAL_CAPTURE_TYPE_2,   /*Low light capture mode */
+    CAM_MANUAL_CAPTURE_TYPE_3,   /*Offline RAW processing */
+    CAM_MANUAL_CAPTURE_TYPE_4    /*Offline RAW processing with multiple RAW*/
+} cam_manual_capture_type;
+
+typedef enum {
+    CAM_ANALYSIS_INFO_FD_STILL,   /*Analysis requirements for STILL PREVIEW*/
+    CAM_ANALYSIS_INFO_FD_VIDEO,   /*Analysis requirements for VIDEO*/
+    CAM_ANALYSIS_INFO_PAAF,       /*Analysis requirements for PAAF*/
+    CAM_ANALYSIS_INFO_MAX,     /*Max number*/
+} cam_analysis_info_type;
+
+typedef struct {
+    /* Whether the information here is valid or not */
+    uint8_t valid;
+
+    /* Whether analysis supported by hw */
+    uint8_t hw_analysis_supported;
+
+    /* Analysis stream max supported size */
+    cam_dimension_t analysis_max_res;
+
+    /* Analysis stream padding info */
+    cam_padding_info_t analysis_padding_info;
+
+    /* Analysis format */
+    cam_format_t analysis_format;
+
+    /* Analysis recommended size */
+    cam_dimension_t analysis_recommended_res;
+} cam_analysis_info_t;
+
+/** mm_camera_event_t: structure for event
+*    @server_event_type : event type from serer
+*    @status : status of an event, value could be
+*              CAM_STATUS_SUCCESS
+*              CAM_STATUS_FAILED
+**/
+typedef struct {
+    cam_event_type_t server_event_type;
+    uint32_t status;
+} cam_event_t;
+
+typedef struct {
+    /* Information for DDM */
+    cam_stream_crop_info_t   sensor_crop_info; /* sensor crop info */
+    cam_stream_crop_info_t   camif_crop_info; /* CAMIF crop info */
+    cam_stream_crop_info_t   isp_crop_info; /* ISP crop info */
+    cam_stream_crop_info_t   cpp_crop_info; /* CPP crop info */
+    cam_focal_length_ratio_t af_focal_length_ratio; /* AF focal length ratio */
+    int32_t                  pipeline_flip; /* current pipeline flip and rotational parameters */
+    cam_rotation_info_t      rotation_info; /* rotation information */
+} cam_ddm_info_t;
+
+/***********************************
+* ENUM definition for custom parameter type
+************************************/
+typedef enum {
+    CAM_CUSTOM_PARM_EXAMPLE,
+    CAM_CUSTOM_PARM_MAX,
+} cam_custom_parm_type;
+
+#endif /* __QCAMERA_TYPES_H__ */
diff --git a/msmcobalt/QCamera2/stack/common/mm_camera_interface.h b/msmcobalt/QCamera2/stack/common/mm_camera_interface.h
new file mode 100644
index 0000000..79283a1
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/common/mm_camera_interface.h
@@ -0,0 +1,918 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_CAMERA_INTERFACE_H__
+#define __MM_CAMERA_INTERFACE_H__
+
+// System dependencies
+#include <media/msmb_camera.h>
+
+// Camera dependencies
+#include "cam_intf.h"
+#include "cam_queue.h"
+
+#define MM_CAMERA_MAX_NUM_SENSORS MSM_MAX_CAMERA_SENSORS
+#define MM_CAMERA_MAX_NUM_FRAMES CAM_MAX_NUM_BUFS_PER_STREAM
+/* num of channels allowed in a camera obj */
+#define MM_CAMERA_CHANNEL_MAX 16
+
+#define PAD_TO_SIZE(size, padding) \
+        ((size + (typeof(size))(padding - 1)) & \
+        (typeof(size))(~(padding - 1)))
+
+#define CEIL_DIVISION(n, d) ((n+d-1)/d)
+
+/** CAM_DUMP_TO_FILE:
+ *  @filename: file name
+ *  @name:filename
+ *  @index: index of the file
+ *  @extn: file extension
+ *  @p_addr: address of the buffer
+ *  @len: buffer length
+ *
+ *  dump the image to the file
+ **/
+#define CAM_DUMP_TO_FILE(path, name, index, extn, p_addr, len) ({ \
+  size_t rc = 0; \
+  char filename[FILENAME_MAX]; \
+  if (index >= 0) \
+    snprintf(filename, FILENAME_MAX, "%s/%s%d.%s", path, name, index, extn); \
+  else \
+    snprintf(filename, FILENAME_MAX, "%s/%s.%s", path, name, extn); \
+  FILE *fp = fopen(filename, "w+"); \
+  if (fp) { \
+    rc = fwrite(p_addr, 1, len, fp); \
+    LOGE("written size %d", len); \
+    fclose(fp); \
+  } else { \
+    LOGE("open %s failed", filename); \
+  } \
+})
+
+/* Declaring Buffer structure */
+struct mm_camera_buf_def;
+
+/** mm_camera_plane_def_t : structure for frame plane info
+*    @num_planes : num of planes for the frame buffer, to be
+*               filled during mem allocation
+*    @planes : plane info for the frame buffer, to be filled
+*               during mem allocation
+**/
+typedef struct {
+    int8_t num_planes;
+    struct v4l2_plane planes[VIDEO_MAX_PLANES];
+} mm_camera_plane_buf_def_t;
+
+/** mm_camera_user_buf_def_t : structure for frame plane info
+*    @num_buffers : num of buffers in this user defined structure
+*    @bufs_used : actual number of buffer filled
+*    @buf_in_use : flag to notify buffer usage status.
+*    @plane_buf : Plane buffer array pointer.
+**/
+typedef struct {
+    uint8_t num_buffers;
+    uint8_t bufs_used;     /*Num of Buffer filled by Kernel*/
+    uint8_t buf_in_use;  /* Container buffer is freed to fill*/
+    int32_t buf_idx[MSM_CAMERA_MAX_USER_BUFF_CNT];
+    struct mm_camera_buf_def *plane_buf;
+} mm_camera_user_buf_def_t;
+
+/** mm_camera_buf_def_t: structure for stream frame buf
+*    @stream_id : stream handler to uniquely identify a stream
+*               object
+*    @buf_idx : index of the buf within the stream bufs, to be
+*               filled during mem allocation
+*    @timespec_ts : time stamp, to be filled when DQBUF is
+*                 called
+*    @frame_idx : frame sequence num, to be filled when DQBUF
+*    @plane_buf  : Frame plane definition
+*    @fd : file descriptor of the frame buffer, to be filled
+*        during mem allocation
+*    @buffer : pointer to the frame buffer, to be filled during
+*            mem allocation
+*    @frame_len : length of the whole frame, to be filled during
+*               mem allocation
+*    @mem_info : user specific pointer to additional mem info
+*    @flags:  v4l2_buffer flags, used to report error in data buffers
+**/
+typedef struct mm_camera_buf_def {
+    uint32_t stream_id;
+    cam_stream_type_t stream_type;
+    cam_stream_buf_type buf_type;
+    uint32_t buf_idx;
+    uint8_t is_uv_subsampled;
+    struct timespec ts;
+    uint32_t frame_idx;
+    union {
+        mm_camera_plane_buf_def_t planes_buf;
+        mm_camera_user_buf_def_t user_buf;
+    };
+    int fd;
+    void *buffer;
+    size_t frame_len;
+    void *mem_info;
+    uint32_t flags;
+} mm_camera_buf_def_t;
+
+/** mm_camera_super_buf_t: super buf structure for bundled
+*   stream frames
+*    @camera_handle : camera handler to uniquely identify
+*              a camera object
+*    @ch_id : channel handler to uniquely ideentify a channel
+*           object
+*    @num_bufs : number of buffers in the super buf, should not
+*              exceeds MAX_STREAM_NUM_IN_BUNDLE
+*    @bufs : array of buffers in the bundle
+**/
+typedef struct {
+    uint32_t camera_handle;
+    uint32_t ch_id;
+    uint32_t num_bufs;
+    uint8_t bUnlockAEC;
+    uint8_t bReadyForPrepareSnapshot;
+    mm_camera_buf_def_t* bufs[MAX_STREAM_NUM_IN_BUNDLE];
+} mm_camera_super_buf_t;
+
+/** mm_camera_req_buf_type_t
+* Request type for super buf from channel
+**/
+typedef enum {
+    MM_CAMERA_REQ_SUPER_BUF,
+    MM_CAMERA_REQ_FRAME_SYNC_BUF
+} mm_camera_req_buf_type_t;
+
+/** mm_camera_req_buf_t: Attributes for super buf request
+*
+*    @type : type of super buf requested
+*    @num_buf_requested : num of super bufs requested
+*    @num_retro_buf_requested : number of retro bufs requested
+*    @primary_only : specifies if only primary camera frame for a dual
+*     camera is requested
+**/
+typedef struct {
+    mm_camera_req_buf_type_t type;
+    uint32_t num_buf_requested;
+    uint32_t num_retro_buf_requested;
+    uint8_t primary_only;
+} mm_camera_req_buf_t;
+
+typedef cam_event_t mm_camera_event_t;
+
+/** mm_camera_event_notify_t: function definition for event
+*   notify handling
+*    @camera_handle : camera handler
+*    @evt : pointer to an event struct
+*    @user_data: user data pointer
+**/
+typedef void (*mm_camera_event_notify_t)(uint32_t camera_handle,
+                                         mm_camera_event_t *evt,
+                                         void *user_data);
+
+/** mm_camera_buf_notify_t: function definition for frame notify
+*   handling
+*    @mm_camera_super_buf_t : received frame buffers
+*    @user_data: user data pointer
+**/
+typedef void (*mm_camera_buf_notify_t) (mm_camera_super_buf_t *bufs,
+                                        void *user_data);
+
+/** map_stream_buf_op_t: function definition for operation of
+*   mapping stream buffers via domain socket
+*    @frame_idx : buffer index within stream buffers
+*    @plane_idx    : plane index. If all planes share the same
+*                   fd, plane_idx = -1; otherwise, plean_idx is
+*                   the index to plane (0..num_of_planes)
+*    @fd : file descriptor of the stream buffer
+*    @size: size of the stream buffer
+*    @buffer: Pointer to buffer to register
+*    @userdata : user data pointer
+**/
+typedef int32_t (*map_stream_buf_op_t) (uint32_t frame_idx,
+                                        int32_t plane_idx,
+                                        int fd,
+                                        size_t size,
+                                        void *buffer,
+                                        cam_mapping_buf_type type,
+                                        void *userdata);
+
+typedef int32_t (*map_stream_bufs_op_t) (const cam_buf_map_type_list *buf_map_list,
+                                         void *userdata);
+
+/** unmap_stream_buf_op_t: function definition for operation of
+*                          unmapping stream buffers via domain
+*                          socket
+*    @frame_idx : buffer index within stream buffers
+*    @plane_idx : plane index. If all planes share the same
+*                 fd, plane_idx = -1; otherwise, plean_idx is
+*                 the index to plane (0..num_of_planes)
+*    @userdata : user data pointer
+**/
+typedef int32_t (*unmap_stream_buf_op_t) (uint32_t frame_idx,
+                                          int32_t plane_idx,
+                                          cam_mapping_buf_type type,
+                                          void *userdata);
+
+/** mm_camera_map_unmap_ops_tbl_t: virtual table
+*                      for mapping/unmapping stream buffers via
+*                      domain socket
+*    @map_ops : operation for mapping
+*    @unmap_ops : operation for unmapping
+*    @userdata: user data pointer
+**/
+typedef struct {
+    map_stream_buf_op_t map_ops;
+    map_stream_bufs_op_t bundled_map_ops;
+    unmap_stream_buf_op_t unmap_ops;
+    void *userdata;
+} mm_camera_map_unmap_ops_tbl_t;
+
+/** mm_camera_stream_mem_vtbl_t: virtual table for stream
+*                      memory allocation and deallocation
+*    @get_bufs : function definition for allocating
+*                stream buffers
+*    @put_bufs : function definition for deallocating
+*                stream buffers
+*    @user_data: user data pointer
+**/
+typedef struct {
+  void *user_data;
+  int32_t (*set_config_ops) (mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+          void *user_data);
+  int32_t (*get_bufs) (cam_frame_len_offset_t *offset,
+                       uint8_t *num_bufs,
+                       uint8_t **initial_reg_flag,
+                       mm_camera_buf_def_t **bufs,
+                       mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                       void *user_data);
+  int32_t (*put_bufs) (mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                       void *user_data);
+  int32_t (*invalidate_buf)(uint32_t index, void *user_data);
+  int32_t (*clean_invalidate_buf)(uint32_t index, void *user_data);
+} mm_camera_stream_mem_vtbl_t;
+
+/** mm_camera_stream_config_t: structure for stream
+*                              configuration
+*    @stream_info : pointer to a stream info structure
+*    @padding_info: padding info obtained from querycapability
+*    @mem_tbl : memory operation table for
+*              allocating/deallocating stream buffers
+*    @stream_cb_sync : SYNC callback handling stream frame notify
+*    @stream_cb : ASYNC callback handling stream frame notify
+*    @userdata : user data pointer
+**/
+typedef struct {
+    cam_stream_info_t *stream_info;
+    cam_padding_info_t padding_info;
+    mm_camera_stream_mem_vtbl_t mem_vtbl;
+    mm_camera_buf_notify_t stream_cb_sync;
+    mm_camera_buf_notify_t stream_cb;
+    void *userdata;
+} mm_camera_stream_config_t;
+
+/** mm_camera_super_buf_notify_mode_t: enum for super uffer
+*                                      notification mode
+*    @MM_CAMERA_SUPER_BUF_NOTIFY_BURST :
+*       ZSL use case: get burst of frames
+*    @MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS :
+*       get continuous frames: when the super buf is ready
+*       dispatch it to HAL
+**/
+typedef enum {
+    MM_CAMERA_SUPER_BUF_NOTIFY_BURST = 0,
+    MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS,
+    MM_CAMERA_SUPER_BUF_NOTIFY_MAX
+} mm_camera_super_buf_notify_mode_t;
+
+/** mm_camera_super_buf_priority_t: enum for super buffer
+*                                   matching priority
+*    @MM_CAMERA_SUPER_BUF_PRIORITY_NORMAL :
+*       Save the frame no matter focused or not. Currently only
+*       this type is supported.
+*    @MM_CAMERA_SUPER_BUF_PRIORITY_FOCUS :
+*       only queue the frame that is focused. Will enable meta
+*       data header to carry focus info
+*    @MM_CAMERA_SUPER_BUF_PRIORITY_EXPOSURE_BRACKETING :
+*       after shutter, only queue matched exposure index
+**/
+typedef enum {
+    MM_CAMERA_SUPER_BUF_PRIORITY_NORMAL = 0,
+    MM_CAMERA_SUPER_BUF_PRIORITY_FOCUS,
+    MM_CAMERA_SUPER_BUF_PRIORITY_EXPOSURE_BRACKETING,
+    MM_CAMERA_SUPER_BUF_PRIORITY_LOW,/* Bundled metadata frame may not match*/
+    MM_CAMERA_SUPER_BUF_PRIORITY_MAX
+} mm_camera_super_buf_priority_t;
+
+/** mm_camera_advanced_capture_t: enum for advanced capture type.
+*    @MM_CAMERA_AF_BRACKETING :
+*       to enable AF Bracketig.
+*    @MM_CAMERA_AE_BRACKETING :
+*       to enable AF Bracketing.
+*    @MM_CAMERA_FLASH_BRACKETING :
+*       to enable Flash Bracketing.
+*    @MM_CAMERA_ZOOM_1X :
+*       to enable zoom 1x capture request
+**/
+typedef enum {
+   MM_CAMERA_AF_BRACKETING = 0,
+   MM_CAMERA_AE_BRACKETING,
+   MM_CAMERA_FLASH_BRACKETING,
+   MM_CAMERA_ZOOM_1X,
+   MM_CAMERA_FRAME_CAPTURE,
+} mm_camera_advanced_capture_t;
+
+/** mm_camera_stream_cb_type: enum for stream buffer callback type.
+*    @MM_CAMERA_STREAM_CB_TYPE_ASYNC :
+*       callback is async type. buffer process done in client thread context
+*    @MM_CAMERA_STREAM_CB_TYPE_SYNC :
+*       callback is sync type. buffer process done interface thread context
+**/
+typedef enum {
+    MM_CAMERA_STREAM_CB_TYPE_ASYNC,
+    MM_CAMERA_STREAM_CB_TYPE_SYNC,
+} mm_camera_stream_cb_type;
+
+
+/** mm_camera_channel_attr_t: structure for defining channel
+*                             attributes
+*    @notify_mode : notify mode: burst or continuous
+*    @water_mark : queue depth. Only valid for burst mode
+*    @look_back : look back how many frames from last buf.
+*                 Only valid for burst mode
+*    @post_frame_skip : after send first frame to HAL, how many
+*                     frames needing to be skipped for next
+*                     delivery. Only valid for burst mode
+*    @max_unmatched_frames : max number of unmatched frames in
+*                     queue
+*    @enable_frame_sync: Enables frame sync for dual camera
+*    @priority : save matched priority frames only
+*    @user_expected_frame_id : Number of frames, camera interface
+*                     will wait for getting the instant capture frame.
+**/
+typedef struct {
+    mm_camera_super_buf_notify_mode_t notify_mode;
+    uint8_t water_mark;
+    uint8_t look_back;
+    uint8_t post_frame_skip;
+    uint8_t max_unmatched_frames;
+    uint8_t enable_frame_sync;
+    mm_camera_super_buf_priority_t priority;
+    uint8_t user_expected_frame_id;
+} mm_camera_channel_attr_t;
+
+typedef struct {
+    /** query_capability: fucntion definition for querying static
+     *                    camera capabilities
+     *    @camera_handle : camer handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     *  Note: would assume cam_capability_t is already mapped
+     **/
+    int32_t (*query_capability) (uint32_t camera_handle);
+
+    /** register_event_notify: fucntion definition for registering
+     *                         for event notification
+     *    @camera_handle : camer handler
+     *    @evt_cb : callback for event notify
+     *    @user_data : user data poiner
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*register_event_notify) (uint32_t camera_handle,
+                                      mm_camera_event_notify_t evt_cb,
+                                      void *user_data);
+
+    /** close_camera: fucntion definition for closing a camera
+     *    @camera_handle : camer handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*close_camera) (uint32_t camera_handle);
+
+    /** map_buf: fucntion definition for mapping a camera buffer
+     *           via domain socket
+     *    @camera_handle : camer handler
+     *    @buf_type : type of mapping buffers, can be value of
+     *                CAM_MAPPING_BUF_TYPE_CAPABILITY
+     *                CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+     *                CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+     *    @fd : file descriptor of the stream buffer
+     *    @size :  size of the stream buffer
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*map_buf) (uint32_t camera_handle,
+                        uint8_t buf_type,
+                        int fd,
+                        size_t size,
+                        void *buffer);
+
+    /** map_bufs: function definition for mapping multiple camera buffers
+     *           via domain socket
+     *    @camera_handle : camera handler
+     *    @buf_map_list : list of buffers to map
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*map_bufs) (uint32_t camera_handle,
+                         const cam_buf_map_type_list *buf_map_list);
+
+    /** unmap_buf: fucntion definition for unmapping a camera buffer
+     *           via domain socket
+     *    @camera_handle : camer handler
+     *    @buf_type : type of mapping buffers, can be value of
+     *                CAM_MAPPING_BUF_TYPE_CAPABILITY
+     *                CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+     *                CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*unmap_buf) (uint32_t camera_handle,
+                          uint8_t buf_type);
+
+    /** set_parms: fucntion definition for setting camera
+     *             based parameters to server
+     *    @camera_handle : camer handler
+     *    @parms : batch for parameters to be set, stored in
+     *               parm_buffer_t
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     *  Note: would assume parm_buffer_t is already mapped, and
+     *       according parameter entries to be set are filled in the
+     *       buf before this call
+     **/
+    int32_t (*set_parms) (uint32_t camera_handle,
+                          parm_buffer_t *parms);
+
+    /** get_parms: fucntion definition for querying camera
+     *             based parameters from server
+     *    @camera_handle : camer handler
+     *    @parms : batch for parameters to be queried, stored in
+     *               parm_buffer_t
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     *  Note: would assume parm_buffer_t is already mapped, and
+     *       according parameter entries to be queried are filled in
+     *       the buf before this call
+     **/
+    int32_t (*get_parms) (uint32_t camera_handle,
+                          parm_buffer_t *parms);
+
+    /** do_auto_focus: fucntion definition for performing auto focus
+     *    @camera_handle : camer handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     *  Note: if this call success, we will always assume there will
+     *        be an auto_focus event following up.
+     **/
+    int32_t (*do_auto_focus) (uint32_t camera_handle);
+
+    /** cancel_auto_focus: fucntion definition for cancelling
+     *                     previous auto focus request
+     *    @camera_handle : camer handler
+    *  Return value: 0 -- success
+    *                -1 -- failure
+     **/
+    int32_t (*cancel_auto_focus) (uint32_t camera_handle);
+
+    /** prepare_snapshot: fucntion definition for preparing hardware
+     *                    for snapshot.
+     *    @camera_handle : camer handler
+     *    @do_af_flag    : flag indicating if AF needs to be done
+     *                     0 -- no AF needed
+     *                     1 -- AF needed
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*prepare_snapshot) (uint32_t camera_handle,
+                                 int32_t do_af_flag);
+
+    /** start_zsl_snapshot: function definition for starting
+     *                    zsl snapshot.
+     *    @camera_handle : camer handler
+     *    @ch_id         : channel id
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*start_zsl_snapshot) (uint32_t camera_handle, uint32_t ch_id);
+
+    /** stop_zsl_snapshot: function definition for stopping
+     *                    zsl snapshot.
+     *    @camera_handle : camer handler
+     *    @ch_id         : channel id
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*stop_zsl_snapshot) (uint32_t camera_handle, uint32_t ch_id);
+
+    /** add_channel: fucntion definition for adding a channel
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @attr : pointer to channel attribute structure
+     *    @channel_cb : callbak to handle bundled super buffer
+     *    @userdata : user data pointer
+     *  Return value: channel id, zero is invalid ch_id
+     * Note: attr, channel_cb, and userdata can be NULL if no
+     *       superbufCB is needed
+     **/
+    uint32_t (*add_channel) (uint32_t camera_handle,
+                             mm_camera_channel_attr_t *attr,
+                             mm_camera_buf_notify_t channel_cb,
+                             void *userdata);
+
+    /** delete_channel: fucntion definition for deleting a channel
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*delete_channel) (uint32_t camera_handle,
+                               uint32_t ch_id);
+
+    /** get_bundle_info: function definition for querying bundle
+     *  info of the channel
+     *    @camera_handle : camera handler
+     *    @ch_id         : channel handler
+     *    @bundle_info   : bundle info to be filled in
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*get_bundle_info) (uint32_t camera_handle,
+                                uint32_t ch_id,
+                                cam_bundle_config_t *bundle_info);
+
+    /** add_stream: fucntion definition for adding a stream
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *  Return value: stream_id. zero is invalid stream_id
+     **/
+    uint32_t (*add_stream) (uint32_t camera_handle,
+                            uint32_t ch_id);
+
+    /** delete_stream: fucntion definition for deleting a stream
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @stream_id : stream handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*delete_stream) (uint32_t camera_handle,
+                              uint32_t ch_id,
+                              uint32_t stream_id);
+
+    /** link_stream: function definition for linking a stream
+     *    @camera_handle : camera handle
+     *    @ch_id : channel handle from which the stream originates
+     *    @stream_id : stream handle
+     *    @linked_ch_id: channel handle in which the stream will be linked
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*link_stream) (uint32_t camera_handle,
+          uint32_t ch_id,
+          uint32_t stream_id,
+          uint32_t linked_ch_id);
+
+    /** config_stream: fucntion definition for configuring a stream
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @stream_id : stream handler
+     *    @confid : pointer to a stream configuration structure
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*config_stream) (uint32_t camera_handle,
+                              uint32_t ch_id,
+                              uint32_t stream_id,
+                              mm_camera_stream_config_t *config);
+
+    /** map_stream_buf: fucntion definition for mapping
+     *                 stream buffer via domain socket
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @stream_id : stream handler
+     *    @buf_type : type of mapping buffers, can be value of
+     *             CAM_MAPPING_BUF_TYPE_STREAM_BUF
+     *             CAM_MAPPING_BUF_TYPE_STREAM_INFO
+     *             CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+     *    @buf_idx : buffer index within the stream buffers
+     *    @plane_idx : plane index. If all planes share the same fd,
+     *               plane_idx = -1; otherwise, plean_idx is the
+     *               index to plane (0..num_of_planes)
+     *    @fd : file descriptor of the stream buffer
+     *    @size :  size of the stream buffer
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*map_stream_buf) (uint32_t camera_handle,
+                               uint32_t ch_id,
+                               uint32_t stream_id,
+                               uint8_t buf_type,
+                               uint32_t buf_idx,
+                               int32_t plane_idx,
+                               int fd,
+                               size_t size,
+                               void *buffer);
+
+    /** map_stream_bufs: function definition for mapping multiple
+     *                 stream buffers via domain socket
+     *    @camera_handle : camera handler
+     *    @ch_id : channel handler
+     *    @buf_map_list : list of buffers to map
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*map_stream_bufs) (uint32_t camera_handle,
+                                uint32_t ch_id,
+                                const cam_buf_map_type_list *buf_map_list);
+
+    /** unmap_stream_buf: fucntion definition for unmapping
+     *                 stream buffer via domain socket
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @stream_id : stream handler
+     *    @buf_type : type of mapping buffers, can be value of
+     *             CAM_MAPPING_BUF_TYPE_STREAM_BUF
+     *             CAM_MAPPING_BUF_TYPE_STREAM_INFO
+     *             CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+     *    @buf_idx : buffer index within the stream buffers
+     *    @plane_idx : plane index. If all planes share the same fd,
+     *               plane_idx = -1; otherwise, plean_idx is the
+     *               index to plane (0..num_of_planes)
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*unmap_stream_buf) (uint32_t camera_handle,
+                                 uint32_t ch_id,
+                                 uint32_t stream_id,
+                                 uint8_t buf_type,
+                                 uint32_t buf_idx,
+                                 int32_t plane_idx);
+
+    /** set_stream_parms: fucntion definition for setting stream
+     *                    specific parameters to server
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @stream_id : stream handler
+     *    @parms : batch for parameters to be set
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     *  Note: would assume parm buffer is already mapped, and
+     *       according parameter entries to be set are filled in the
+     *       buf before this call
+     **/
+    int32_t (*set_stream_parms) (uint32_t camera_handle,
+                                 uint32_t ch_id,
+                                 uint32_t s_id,
+                                 cam_stream_parm_buffer_t *parms);
+
+    /** get_stream_parms: fucntion definition for querying stream
+     *                    specific parameters from server
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @stream_id : stream handler
+     *    @parms : batch for parameters to be queried
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     *  Note: would assume parm buffer is already mapped, and
+     *       according parameter entries to be queried are filled in
+     *       the buf before this call
+     **/
+    int32_t (*get_stream_parms) (uint32_t camera_handle,
+                                 uint32_t ch_id,
+                                 uint32_t s_id,
+                                 cam_stream_parm_buffer_t *parms);
+
+    /** start_channel: fucntion definition for starting a channel
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     * This call will start all streams belongs to the channel
+     **/
+    int32_t (*start_channel) (uint32_t camera_handle,
+                              uint32_t ch_id);
+
+    /** stop_channel: fucntion definition for stopping a channel
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     * This call will stop all streams belongs to the channel
+     **/
+    int32_t (*stop_channel) (uint32_t camera_handle,
+                             uint32_t ch_id);
+
+    /** qbuf: fucntion definition for queuing a frame buffer back to
+     *        kernel for reuse
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @buf : a frame buffer to be queued back to kernel
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*qbuf) (uint32_t camera_handle,
+                     uint32_t ch_id,
+                     mm_camera_buf_def_t *buf);
+
+    /** get_queued_buf_count: fucntion definition for querying queued buf count
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @stream_id : stream handler
+     *  Return value: queued buf count
+     **/
+    int32_t (*get_queued_buf_count) (uint32_t camera_handle,
+            uint32_t ch_id,
+            uint32_t stream_id);
+
+    /** request_super_buf: fucntion definition for requesting frames
+     *                     from superbuf queue in burst mode
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @buf : provides info related to the super buf request
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*request_super_buf) (uint32_t camera_handle,
+                                  uint32_t ch_id,
+                                  mm_camera_req_buf_t *buf);
+
+    /** cancel_super_buf_request: fucntion definition for canceling
+     *                     frames dispatched from superbuf queue in
+     *                     burst mode
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*cancel_super_buf_request) (uint32_t camera_handle,
+                                         uint32_t ch_id);
+
+    /** flush_super_buf_queue: function definition for flushing out
+     *                     all frames in the superbuf queue up to frame_idx,
+     *                     even if frames with frame_idx come in later than
+     *                     this call.
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @frame_idx : frame index up until which all superbufs are flushed
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*flush_super_buf_queue) (uint32_t camera_handle,
+                                      uint32_t ch_id, uint32_t frame_idx);
+
+    /** configure_notify_mode: function definition for configuring the
+     *                         notification mode of channel
+     *    @camera_handle : camera handler
+     *    @ch_id : channel handler
+     *    @notify_mode : notification mode
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*configure_notify_mode) (uint32_t camera_handle,
+                                      uint32_t ch_id,
+                                      mm_camera_super_buf_notify_mode_t notify_mode);
+
+   /** process_advanced_capture: function definition for start/stop advanced capture
+     *                    for snapshot.
+     *    @camera_handle : camera handle
+     *    @ch_id : channel handler
+     *    @type :  advanced capture type.
+     *    @trigger    : flag indicating if advanced capture needs to be done
+     *                     0 -- stop advanced capture
+     *                     1 -- start advanced capture
+     *    @in_value: Input value. Configaration
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*process_advanced_capture) (uint32_t camera_handle,
+             uint32_t ch_id, mm_camera_advanced_capture_t type,
+             int8_t start_flag, void *in_value);
+
+   /** get_session_id: gets the backend session id from the kernel
+     *    @camera_handle : camera handle
+     *    @sessionid : session id to be retrieved
+     *     Return value: 0 -- success
+     *                -1 -- failure
+     *  Note: if this call succeeds, we will get a valid session id
+     **/
+    int32_t (*get_session_id) (uint32_t camera_handle,
+            uint32_t* sessionid);
+
+    /** sync_related_sensors: sends sync cmd
+      *    @camera_handle : camera handle
+      *    @related_cam_info : related cam info to be sent to server
+      *     Return value: 0 -- success
+      *                -1 -- failure
+      *  Note: if this call succeeds, we will get linking established in back end
+      **/
+     int32_t (*sync_related_sensors) (uint32_t camera_handle,
+            cam_sync_related_sensors_event_info_t*
+            related_cam_info);
+    /** flush: function definition for flush
+     *  @camera_handle: camera handler
+     *  Return value: 0 -- success
+     *               -1 -- failure
+     **/
+    int32_t (*flush) (uint32_t camera_handle);
+
+   /** register_stream_buf_cb: fucntion definition for registering special stream callbacks
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @stream_id : stream handler
+     *    @buf_cb : callback function pointer
+     *    @cb_type : Callback type SYNC/ASYNC
+     *    @userdata : user data pointer
+     *    Return value: 0 -- success
+     *                -       1 -- failure
+     **/
+    int32_t (*register_stream_buf_cb) (uint32_t camera_handle,
+            uint32_t ch_id, uint32_t stream_id, mm_camera_buf_notify_t buf_cb,
+            mm_camera_stream_cb_type cb_type, void *userdata);
+} mm_camera_ops_t;
+
+/** mm_camera_vtbl_t: virtual table for camera operations
+*    @camera_handle : camera handler which uniquely identifies a
+*                   camera object
+*    @ops : API call table
+**/
+typedef struct {
+    uint32_t camera_handle;
+    mm_camera_ops_t *ops;
+} mm_camera_vtbl_t;
+
+/* return number of cameras */
+uint8_t get_num_of_cameras();
+
+/* return reference pointer of camera vtbl */
+int32_t camera_open(uint8_t camera_idx, mm_camera_vtbl_t **camera_obj);
+
+/* helper functions */
+int32_t mm_stream_calc_offset_preview(cam_stream_info_t *stream_info,
+        cam_dimension_t *dim,
+        cam_padding_info_t *padding,
+        cam_stream_buf_plane_info_t *buf_planes);
+
+int32_t mm_stream_calc_offset_post_view(cam_format_t fmt,
+        cam_dimension_t *dim,
+        cam_stream_buf_plane_info_t *buf_planes);
+
+int32_t mm_stream_calc_offset_snapshot(cam_format_t fmt,
+        cam_dimension_t *dim,
+        cam_padding_info_t *padding,
+        cam_stream_buf_plane_info_t *buf_planes);
+
+int32_t mm_stream_calc_offset_raw(cam_format_t fmt,
+        cam_dimension_t *dim,
+        cam_padding_info_t *padding,
+        cam_stream_buf_plane_info_t *buf_planes);
+
+int32_t mm_stream_calc_offset_video(cam_format_t fmt,
+        cam_dimension_t *dim,
+        cam_stream_buf_plane_info_t *buf_planes);
+
+int32_t mm_stream_calc_offset_metadata(cam_dimension_t *dim,
+        cam_padding_info_t *padding,
+        cam_stream_buf_plane_info_t *buf_planes);
+
+int32_t mm_stream_calc_offset_postproc(cam_stream_info_t *stream_info,
+        cam_padding_info_t *padding,
+        cam_stream_buf_plane_info_t *buf_planes);
+
+int32_t mm_stream_calc_offset_analysis(cam_format_t fmt,
+        cam_dimension_t *dim,
+        cam_padding_info_t *padding,
+        cam_stream_buf_plane_info_t *buf_planes);
+
+uint32_t mm_stream_calc_lcm (int32_t num1, int32_t num2);
+
+struct camera_info *get_cam_info(uint32_t camera_id, cam_sync_type_t *pCamType);
+
+uint8_t is_yuv_sensor(uint32_t camera_id);
+
+#endif /*__MM_CAMERA_INTERFACE_H__*/
diff --git a/msmcobalt/QCamera2/stack/common/mm_camera_shim.h b/msmcobalt/QCamera2/stack/common/mm_camera_shim.h
new file mode 100644
index 0000000..b7b0eb9
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/common/mm_camera_shim.h
@@ -0,0 +1,99 @@
+/* Copyright (c) 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_CAMERA_SHIMLAYER_H_
+#define __MM_CAMERA_SHIMLAYER_H_
+
+#include "cam_intf.h"
+
+/*
+ * MCT shim layer APIs
+ */
+#define SHIMLAYER_LIB "/system/vendor/lib/libmmcamera2_mct_shimlayer.so"
+
+struct cam_shim_packet;
+
+/*
+* Bundled events structure.
+*/
+typedef struct {
+    uint8_t cmd_count;            /* Total number of events in this packet */
+    struct cam_shim_packet *cmd;  /*Events to process*/
+} cam_shim_cmd_packet_t;
+
+/*
+* Bundled stream event structure
+*/
+typedef struct {
+    uint8_t stream_count;                                /*Number of streams in a bundle*/
+    cam_shim_cmd_packet_t stream_event[MAX_NUM_STREAMS]; /*Event for different streams*/
+} cam_shim_stream_cmd_packet_t;
+
+/*
+* Command types to process in shim layer
+*/
+typedef enum {
+    CAM_SHIM_SET_PARM,   /*v4l2 set parameter*/
+    CAM_SHIM_GET_PARM,   /*v4l2 get parameter*/
+    CAM_SHIM_REG_BUF,    /*Reg/unreg buffers with back-end*/
+    CAM_SHIM_BUNDLE_CMD, /*Bundled command for streams*/
+} cam_shim_cmd_type;
+
+typedef struct {
+    uint32_t command;    /*V4L2 or private command*/
+    uint32_t stream_id;  /*streamID*/
+    void *value;          /*command value/data*/
+} cam_shim_cmd_data;
+
+/*
+* Structure to communicate command with shim layer
+*/
+typedef struct cam_shim_packet {
+    uint32_t session_id;
+    cam_shim_cmd_type cmd_type;                 /*Command type to process*/
+    union {
+        cam_shim_cmd_data cmd_data;             /*get/set parameter structure*/
+        cam_reg_buf_t reg_buf;                  /*Buffer register and unregister*/
+        cam_shim_stream_cmd_packet_t bundle_cmd;/*Bundled command*/
+    };
+} cam_shim_packet_t;
+
+typedef int32_t (*mm_camera_shim_event_handler_func)(uint32_t session_id,
+        cam_event_t *event);
+
+typedef struct {
+    cam_status_t (*mm_camera_shim_open_session) (int session,
+            mm_camera_shim_event_handler_func evt_cb);
+    int32_t (*mm_camera_shim_close_session)(int session);
+    int32_t (*mm_camera_shim_send_cmd)(cam_shim_packet_t *event);
+} mm_camera_shim_ops_t;
+
+int32_t (*mm_camera_shim_module_init)(mm_camera_shim_ops_t *shim_ops);
+
+#endif  /*__MM_CAMERA_SHIMLAYER_H_*/
diff --git a/msmcobalt/QCamera2/stack/common/mm_jpeg_interface.h b/msmcobalt/QCamera2/stack/common/mm_jpeg_interface.h
new file mode 100644
index 0000000..9f0ac7f
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/common/mm_jpeg_interface.h
@@ -0,0 +1,408 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef MM_JPEG_INTERFACE_H_
+#define MM_JPEG_INTERFACE_H_
+
+// System dependencies
+#include <stdbool.h>
+
+// Camera dependencies
+#include "QOMX_JpegExtensions.h"
+#include "cam_intf.h"
+
+#define MM_JPEG_MAX_PLANES 3
+#define MM_JPEG_MAX_BUF CAM_MAX_NUM_BUFS_PER_STREAM
+#define QUANT_SIZE 64
+#define QTABLE_MAX 2
+#define MM_JPEG_MAX_MPO_IMAGES 2
+
+typedef enum {
+  MM_JPEG_FMT_YUV,
+  MM_JPEG_FMT_BITSTREAM
+} mm_jpeg_format_t;
+
+typedef enum {
+  MM_JPEG_TYPE_JPEG,
+  MM_JPEG_TYPE_MPO
+} mm_jpeg_image_type_t;
+
+typedef struct {
+  cam_ae_exif_debug_t ae_debug_params;
+  cam_awb_exif_debug_t awb_debug_params;
+  cam_af_exif_debug_t af_debug_params;
+  cam_asd_exif_debug_t asd_debug_params;
+  cam_stats_buffer_exif_debug_t stats_debug_params;
+  cam_bestats_buffer_exif_debug_t bestats_debug_params;
+  cam_bhist_buffer_exif_debug_t bhist_debug_params;
+  cam_q3a_tuning_info_t q3a_tuning_debug_params;
+  uint8_t ae_debug_params_valid;
+  uint8_t awb_debug_params_valid;
+  uint8_t af_debug_params_valid;
+  uint8_t asd_debug_params_valid;
+  uint8_t stats_debug_params_valid;
+  uint8_t bestats_debug_params_valid;
+  uint8_t bhist_debug_params_valid;
+  uint8_t q3a_tuning_debug_params_valid;
+} mm_jpeg_debug_exif_params_t;
+
+typedef struct {
+  cam_3a_params_t cam_3a_params;
+  uint8_t cam_3a_params_valid;
+  cam_sensor_params_t sensor_params;
+  mm_jpeg_debug_exif_params_t *debug_params;
+} mm_jpeg_exif_params_t;
+
+typedef struct {
+  /* Indicates if it is a single jpeg or part of a multi picture sequence */
+  mm_jpeg_image_type_t type;
+
+  /* Indicates if image is the primary image in a sequence of images.
+  Applicable only to multi picture formats */
+  uint8_t is_primary;
+
+  /* Number of images in the sequence */
+  uint32_t num_of_images;
+
+  /* Flag to indicate if multi picture metadata need to be added to Exif */
+  uint8_t enable_metadata;
+} mm_jpeg_multi_image_t;
+
+typedef struct {
+  uint32_t sequence;          /* for jpeg bit streams, assembling is based on sequence. sequence starts from 0 */
+  uint8_t *buf_vaddr;        /* ptr to buf */
+  int fd;                    /* fd of buf */
+  size_t buf_size;         /* total size of buf (header + image) */
+  mm_jpeg_format_t format;   /* buffer format*/
+  cam_frame_len_offset_t offset; /* offset of all the planes */
+  uint32_t index; /* index used to identify the buffers */
+} mm_jpeg_buf_t;
+
+typedef struct {
+  uint8_t *buf_vaddr;        /* ptr to buf */
+  int fd;                    /* fd of buf */
+  size_t buf_filled_len;   /* used for output image. filled by the client */
+} mm_jpeg_output_t;
+
+typedef enum {
+  MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2,
+  MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2,
+  MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1,
+  MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1,
+  MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V2,
+  MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V2,
+  MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V1,
+  MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V1,
+  MM_JPEG_COLOR_FORMAT_MONOCHROME,
+  MM_JPEG_COLOR_FORMAT_BITSTREAM_H2V2,
+  MM_JPEG_COLOR_FORMAT_BITSTREAM_H2V1,
+  MM_JPEG_COLOR_FORMAT_BITSTREAM_H1V2,
+  MM_JPEG_COLOR_FORMAT_BITSTREAM_H1V1,
+  MM_JPEG_COLOR_FORMAT_MAX
+} mm_jpeg_color_format;
+
+typedef enum {
+  JPEG_JOB_STATUS_DONE = 0,
+  JPEG_JOB_STATUS_ERROR
+} jpeg_job_status_t;
+
+typedef void (*jpeg_encode_callback_t)(jpeg_job_status_t status,
+  uint32_t client_hdl,
+  uint32_t jobId,
+  mm_jpeg_output_t *p_output,
+  void *userData);
+
+typedef struct {
+  /* src img dimension */
+  cam_dimension_t src_dim;
+
+  /* jpeg output dimension */
+  cam_dimension_t dst_dim;
+
+  /* crop information */
+  cam_rect_t crop;
+} mm_jpeg_dim_t;
+
+typedef struct {
+  /* num of buf in src img */
+  uint32_t num_src_bufs;
+
+  /* num of src tmb bufs */
+  uint32_t num_tmb_bufs;
+
+  /* num of buf in src img */
+  uint32_t num_dst_bufs;
+
+  /* should create thumbnail from main image or not */
+  uint32_t encode_thumbnail;
+
+  /* src img bufs */
+  mm_jpeg_buf_t src_main_buf[MM_JPEG_MAX_BUF];
+
+  /* this will be used only for bitstream */
+  mm_jpeg_buf_t src_thumb_buf[MM_JPEG_MAX_BUF];
+
+  /* this will be used only for bitstream */
+  mm_jpeg_buf_t dest_buf[MM_JPEG_MAX_BUF];
+
+  /* mainimage color format */
+  mm_jpeg_color_format color_format;
+
+  /* thumbnail color format */
+  mm_jpeg_color_format thumb_color_format;
+
+  /* jpeg quality: range 0~100 */
+  uint32_t quality;
+
+  /* jpeg thumbnail quality: range 0~100 */
+  uint32_t thumb_quality;
+
+  /* buf to exif entries, caller needs to
+   * take care of the memory manage with insider ptr */
+  QOMX_EXIF_INFO exif_info;
+
+  /*Callback registered to be called after encode*/
+  jpeg_encode_callback_t jpeg_cb;
+
+  /*Appdata passed by the user*/
+  void* userdata;
+
+  /* thumbnail dimension */
+  mm_jpeg_dim_t thumb_dim;
+
+  /* rotation informaiton */
+  uint32_t rotation;
+
+  /* thumb rotation informaiton */
+  uint32_t thumb_rotation;
+
+  /* main image dimension */
+  mm_jpeg_dim_t main_dim;
+
+  /* enable encoder burst mode */
+  uint32_t burst_mode;
+
+  /* get memory function ptr */
+  int (*get_memory)( omx_jpeg_ouput_buf_t *p_out_buf);
+
+  /* release memory function ptr */
+  int (*put_memory)( omx_jpeg_ouput_buf_t *p_out_buf);
+
+  /* Flag to indicate whether to generate thumbnail from postview */
+  bool thumb_from_postview;
+} mm_jpeg_encode_params_t;
+
+typedef struct {
+  /* num of buf in src img */
+  uint32_t num_src_bufs;
+
+  /* num of buf in src img */
+  uint32_t num_dst_bufs;
+
+  /* src img bufs */
+  mm_jpeg_buf_t src_main_buf[MM_JPEG_MAX_BUF];
+
+  /* this will be used only for bitstream */
+  mm_jpeg_buf_t dest_buf[MM_JPEG_MAX_BUF];
+
+  /* color format */
+  mm_jpeg_color_format color_format;
+
+  jpeg_encode_callback_t jpeg_cb;
+
+  void* userdata;
+
+} mm_jpeg_decode_params_t;
+
+typedef struct {
+  /* active indices of the buffers for encoding */
+  int32_t src_index;
+  int32_t dst_index;
+  uint32_t thumb_index;
+  mm_jpeg_dim_t thumb_dim;
+
+  /* rotation informaiton */
+  uint32_t rotation;
+
+  /* main image dimension */
+  mm_jpeg_dim_t main_dim;
+
+  /*session id*/
+  uint32_t session_id;
+
+  /* jpeg output buffer ref count */
+  int32_t ref_count;
+
+  /* allocated jpeg output buffer */
+  void *alloc_out_buffer;
+
+  /*Metadata stream*/
+  metadata_buffer_t *p_metadata;
+
+  /*HAL version*/
+  cam_hal_version_t hal_version;
+
+  /* buf to exif entries, caller needs to
+   * take care of the memory manage with insider ptr */
+  QOMX_EXIF_INFO exif_info;
+
+  /* 3a parameters */
+  mm_jpeg_exif_params_t cam_exif_params;
+
+  /* jpeg encoder QTable */
+  uint8_t qtable_set[QTABLE_MAX];
+
+  OMX_IMAGE_PARAM_QUANTIZATIONTABLETYPE qtable[QTABLE_MAX];
+
+  /* flag to enable/disable mobicat */
+  uint8_t mobicat_mask;
+
+  /*Info associated with multiple image sequence*/
+  mm_jpeg_multi_image_t multi_image_info;
+
+  /* work buf */
+  mm_jpeg_buf_t work_buf;
+} mm_jpeg_encode_job_t;
+
+typedef struct {
+  /* active indices of the buffers for encoding */
+  int32_t src_index;
+  int32_t dst_index;
+  uint32_t tmb_dst_index;
+
+  /* rotation informaiton */
+  uint32_t rotation;
+
+  /* main image  */
+  mm_jpeg_dim_t main_dim;
+
+  /*session id*/
+  uint32_t session_id;
+} mm_jpeg_decode_job_t;
+
+typedef enum {
+  JPEG_JOB_TYPE_ENCODE,
+  JPEG_JOB_TYPE_DECODE,
+  JPEG_JOB_TYPE_MAX
+} mm_jpeg_job_type_t;
+
+typedef struct {
+  mm_jpeg_job_type_t job_type;
+  union {
+    mm_jpeg_encode_job_t encode_job;
+    mm_jpeg_decode_job_t decode_job;
+  };
+} mm_jpeg_job_t;
+
+typedef struct {
+  uint32_t w;
+  uint32_t h;
+} mm_dimension;
+
+typedef struct {
+  /*Primary image in the MPO sequence*/
+  mm_jpeg_output_t primary_image;
+
+  /*All auxillary images in the sequence*/
+  mm_jpeg_output_t aux_images[MM_JPEG_MAX_MPO_IMAGES - 1];
+
+  /*Total number of images in the MPO sequence*/
+  int num_of_images;
+
+  /*Output MPO buffer*/
+  mm_jpeg_output_t output_buff;
+
+  /*Size of the allocated output buffer*/
+  size_t output_buff_size;
+} mm_jpeg_mpo_info_t;
+
+typedef struct {
+  /* config a job -- async call */
+  int (*start_job)(mm_jpeg_job_t* job, uint32_t* job_id);
+
+  /* abort a job -- sync call */
+  int (*abort_job)(uint32_t job_id);
+
+  /* create a session */
+  int (*create_session)(uint32_t client_hdl,
+    mm_jpeg_encode_params_t *p_params, uint32_t *p_session_id);
+
+  /* destroy session */
+  int (*destroy_session)(uint32_t session_id);
+
+  /* close a jpeg client -- sync call */
+  int (*close) (uint32_t clientHdl);
+
+} mm_jpeg_ops_t;
+
+typedef struct {
+  /* config a job -- async call */
+  int (*start_job)(mm_jpeg_job_t* job, uint32_t* job_id);
+
+  /* abort a job -- sync call */
+  int (*abort_job)(uint32_t job_id);
+
+  /* create a session */
+  int (*create_session)(uint32_t client_hdl,
+    mm_jpeg_decode_params_t *p_params, uint32_t *p_session_id);
+
+  /* destroy session */
+  int (*destroy_session)(uint32_t session_id);
+
+  /* close a jpeg client -- sync call */
+  int (*close) (uint32_t clientHdl);
+} mm_jpegdec_ops_t;
+
+typedef struct {
+
+  /* Get Mpo size*/
+  int (*get_mpo_size)(mm_jpeg_output_t jpeg_buffer[MM_JPEG_MAX_MPO_IMAGES],
+    int num_of_images);
+
+  /* Compose MPO*/
+  int (*compose_mpo)(mm_jpeg_mpo_info_t *mpo_info);
+
+} mm_jpeg_mpo_ops_t;
+
+/* open a jpeg client -- sync call
+ * returns client_handle.
+ * failed if client_handle=0
+ * jpeg ops tbl and mpo ops tbl will be filled in if open succeeds
+ * and jpeg meta data will be cached */
+uint32_t jpeg_open(mm_jpeg_ops_t *ops, mm_jpeg_mpo_ops_t *mpo_ops,
+  mm_dimension picture_size,
+  cam_jpeg_metadata_t *jpeg_metadata);
+
+/* open a jpeg client -- sync call
+ * returns client_handle.
+ * failed if client_handle=0
+ * jpeg ops tbl will be filled in if open succeeds */
+uint32_t jpegdec_open(mm_jpegdec_ops_t *ops);
+
+#endif /* MM_JPEG_INTERFACE_H_ */
diff --git a/msmcobalt/QCamera2/stack/mm-camera-interface/Android.mk b/msmcobalt/QCamera2/stack/mm-camera-interface/Android.mk
new file mode 100644
index 0000000..1497fbd
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-interface/Android.mk
@@ -0,0 +1,67 @@
+OLD_LOCAL_PATH := $(LOCAL_PATH)
+LOCAL_PATH := $(call my-dir)
+
+include $(LOCAL_PATH)/../../../common.mk
+include $(CLEAR_VARS)
+
+MM_CAM_FILES := \
+        src/mm_camera_interface.c \
+        src/mm_camera.c \
+        src/mm_camera_channel.c \
+        src/mm_camera_stream.c \
+        src/mm_camera_thread.c \
+        src/mm_camera_sock.c
+
+ifeq ($(CAMERA_DAEMON_NOT_PRESENT), true)
+else
+LOCAL_CFLAGS += -DDAEMON_PRESENT
+endif
+
+# System header file path prefix
+LOCAL_CFLAGS += -DSYSTEM_HEADER_PREFIX=sys
+
+ifeq ($(strip $(TARGET_USES_ION)),true)
+    LOCAL_CFLAGS += -DUSE_ION
+endif
+
+ifneq (,$(filter msm8974 msm8916 msm8226 msm8610 msm8916 apq8084 msm8084 msm8994 msm8992 msm8952 msm8937 msm8953 msm8996 msmcobalt msmfalcon, $(TARGET_BOARD_PLATFORM)))
+    LOCAL_CFLAGS += -DVENUS_PRESENT
+endif
+
+ifneq (,$(filter msm8996 msmcobalt msmfalcon,$(TARGET_BOARD_PLATFORM)))
+    LOCAL_CFLAGS += -DUBWC_PRESENT
+endif
+
+LOCAL_CFLAGS += -D_ANDROID_ -DQCAMERA_REDEFINE_LOG
+LOCAL_COPY_HEADERS_TO := mm-camera-interface
+LOCAL_COPY_HEADERS += ../common/cam_intf.h
+LOCAL_COPY_HEADERS += ../common/cam_types.h
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/inc \
+    $(LOCAL_PATH)/../common \
+    hardware/libhardware/include/hardware \
+    system/media/camera/include \
+
+LOCAL_CFLAGS += -DCAMERA_ION_HEAP_ID=ION_IOMMU_HEAP_ID
+LOCAL_C_INCLUDES+= $(kernel_includes)
+LOCAL_ADDITIONAL_DEPENDENCIES := $(common_deps)
+
+ifneq (1,$(filter 1,$(shell echo "$$(( $(PLATFORM_SDK_VERSION) >= 17 ))" )))
+  LOCAL_CFLAGS += -include bionic/libc/kernel/common/linux/socket.h
+  LOCAL_CFLAGS += -include bionic/libc/kernel/common/linux/un.h
+endif
+
+LOCAL_CFLAGS += -Wall -Wextra -Werror
+
+LOCAL_SRC_FILES := $(MM_CAM_FILES)
+
+LOCAL_MODULE           := libmmcamera_interface
+LOCAL_PRELINK_MODULE   := false
+LOCAL_SHARED_LIBRARIES := libdl libcutils liblog
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_32_BIT_ONLY := $(BOARD_QTI_CAMERA_32BIT_ONLY)
+include $(BUILD_SHARED_LIBRARY)
+
+LOCAL_PATH := $(OLD_LOCAL_PATH)
diff --git a/msmcobalt/QCamera2/stack/mm-camera-interface/inc/mm_camera.h b/msmcobalt/QCamera2/stack/mm-camera-interface/inc/mm_camera.h
new file mode 100644
index 0000000..94f158f
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-interface/inc/mm_camera.h
@@ -0,0 +1,781 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_CAMERA_H__
+#define __MM_CAMERA_H__
+
+// System dependencies
+#include <poll.h>
+
+// Camera dependencies
+#include "hardware/camera_common.h"
+#include "cam_semaphore.h"
+#include "mm_camera_interface.h"
+#include "mm_camera_shim.h"
+
+/**********************************************************************************
+* Data structure declarations
+***********************************************************************************/
+/* num of callbacks allowed for an event type */
+#define MM_CAMERA_EVT_ENTRY_MAX 4
+/* num of data callbacks allowed in a stream obj */
+#define MM_CAMERA_STREAM_BUF_CB_MAX 4
+/* num of data poll threads allowed in a channel obj */
+#define MM_CAMERA_CHANNEL_POLL_THREAD_MAX 1
+
+#define MM_CAMERA_DEV_NAME_LEN 32
+#define MM_CAMERA_DEV_OPEN_TRIES 20
+#define MM_CAMERA_DEV_OPEN_RETRY_SLEEP 20
+#define THREAD_NAME_SIZE 15
+
+/* Future frame idx, large enough to make sure capture
+* settings can be applied and small enough to still capture an image */
+#define MM_CAMERA_MAX_FUTURE_FRAME_WAIT 100
+#define WAIT_TIMEOUT 5
+
+#ifndef TRUE
+#define TRUE 1
+#endif
+
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+#define ARRAY_SIZE(a) (sizeof(a)/sizeof((a)[0]))
+
+struct mm_channel;
+struct mm_stream;
+struct mm_camera_obj;
+
+typedef int64_t nsecs_t;
+
+typedef enum
+{
+    MM_CAMERA_CMD_TYPE_DATA_CB,    /* dataB CMD */
+    MM_CAMERA_CMD_TYPE_EVT_CB,     /* evtCB CMD */
+    MM_CAMERA_CMD_TYPE_EXIT,       /* EXIT */
+    MM_CAMERA_CMD_TYPE_REQ_DATA_CB,/* request data */
+    MM_CAMERA_CMD_TYPE_SUPER_BUF_DATA_CB,    /* superbuf dataB CMD */
+    MM_CAMERA_CMD_TYPE_CONFIG_NOTIFY, /* configure notify mode */
+    MM_CAMERA_CMD_TYPE_START_ZSL, /* start zsl snapshot for channel */
+    MM_CAMERA_CMD_TYPE_STOP_ZSL, /* stop zsl snapshot for channel */
+    MM_CAMERA_CMD_TYPE_FLUSH_QUEUE, /* flush queue */
+    MM_CAMERA_CMD_TYPE_GENERAL,  /* general cmd */
+    MM_CAMERA_CMD_TYPE_MAX
+} mm_camera_cmdcb_type_t;
+
+typedef struct {
+    uint32_t stream_id;
+    uint32_t frame_idx;
+    uint32_t flags;
+    mm_camera_buf_def_t *buf; /* ref to buf */
+} mm_camera_buf_info_t;
+
+typedef enum {
+    MM_CAMERA_GENERIC_CMD_TYPE_AE_BRACKETING,
+    MM_CAMERA_GENERIC_CMD_TYPE_AF_BRACKETING,
+    MM_CAMERA_GENERIC_CMD_TYPE_FLASH_BRACKETING,
+    MM_CAMERA_GENERIC_CMD_TYPE_ZOOM_1X,
+    MM_CAMERA_GENERIC_CMD_TYPE_CAPTURE_SETTING,
+} mm_camera_generic_cmd_type_t;
+
+typedef struct {
+    mm_camera_generic_cmd_type_t type;
+    uint32_t payload[32];
+    union {
+        cam_capture_frame_config_t frame_config;
+    };
+} mm_camera_generic_cmd_t;
+
+typedef struct {
+    uint32_t frame_idx;
+    cam_stream_type_t stream_type;
+} mm_camera_flush_cmd_t;
+
+typedef struct {
+    mm_camera_cmdcb_type_t cmd_type;
+    union {
+        mm_camera_buf_info_t buf;    /* frame buf if dataCB */
+        mm_camera_event_t evt;       /* evt if evtCB */
+        mm_camera_super_buf_t superbuf; /* superbuf if superbuf dataCB*/
+        mm_camera_req_buf_t req_buf; /* num of buf requested */
+        mm_camera_flush_cmd_t flush_cmd; /* frame idx boundary for flush superbuf queue*/
+        mm_camera_super_buf_notify_mode_t notify_mode; /* notification mode */
+        mm_camera_generic_cmd_t gen_cmd;
+    } u;
+} mm_camera_cmdcb_t;
+
+typedef void (*mm_camera_cmd_cb_t)(mm_camera_cmdcb_t * cmd_cb, void* user_data);
+
+typedef struct {
+    uint8_t is_active;     /*indicates whether thread is active or not */
+    cam_queue_t cmd_queue; /* cmd queue (queuing dataCB, asyncCB, or exitCMD) */
+    pthread_t cmd_pid;           /* cmd thread ID */
+    cam_semaphore_t cmd_sem;     /* semaphore for cmd thread */
+    cam_semaphore_t sync_sem;     /* semaphore for synchronization with cmd thread */
+    mm_camera_cmd_cb_t cb;       /* cb for cmd */
+    void* user_data;             /* user_data for cb */
+    char threadName[THREAD_NAME_SIZE];
+} mm_camera_cmd_thread_t;
+
+typedef enum {
+    MM_CAMERA_POLL_TYPE_EVT,
+    MM_CAMERA_POLL_TYPE_DATA,
+    MM_CAMERA_POLL_TYPE_MAX
+} mm_camera_poll_thread_type_t;
+
+/* function ptr defined for poll notify CB,
+ * registered at poll thread with poll fd */
+typedef void (*mm_camera_poll_notify_t)(void *user_data);
+
+typedef struct {
+    int32_t fd;
+    mm_camera_poll_notify_t notify_cb;
+    uint32_t handler;
+    void* user_data;
+} mm_camera_poll_entry_t;
+
+typedef struct {
+    mm_camera_poll_thread_type_t poll_type;
+    /* array to store poll fd and cb info
+     * for MM_CAMERA_POLL_TYPE_EVT, only index 0 is valid;
+     * for MM_CAMERA_POLL_TYPE_DATA, depends on valid stream fd */
+    mm_camera_poll_entry_t poll_entries[MAX_STREAM_NUM_IN_BUNDLE];
+    int32_t pfds[2];
+    pthread_t pid;
+    int32_t state;
+    int timeoutms;
+    uint32_t cmd;
+    struct pollfd poll_fds[MAX_STREAM_NUM_IN_BUNDLE + 1];
+    uint8_t num_fds;
+    pthread_mutex_t mutex;
+    pthread_cond_t cond_v;
+    int32_t status;
+    char threadName[THREAD_NAME_SIZE];
+    //void *my_obj;
+} mm_camera_poll_thread_t;
+
+/* mm_stream */
+typedef enum {
+    MM_STREAM_STATE_NOTUSED = 0,      /* not used */
+    MM_STREAM_STATE_INITED,           /* inited  */
+    MM_STREAM_STATE_ACQUIRED,         /* acquired, fd opened  */
+    MM_STREAM_STATE_CFG,              /* fmt & dim configured */
+    MM_STREAM_STATE_BUFFED,           /* buf allocated */
+    MM_STREAM_STATE_REG,              /* buf regged, stream off */
+    MM_STREAM_STATE_ACTIVE,           /* active */
+    MM_STREAM_STATE_MAX
+} mm_stream_state_type_t;
+
+typedef enum {
+    MM_STREAM_EVT_ACQUIRE,
+    MM_STREAM_EVT_RELEASE,
+    MM_STREAM_EVT_SET_FMT,
+    MM_STREAM_EVT_GET_BUF,
+    MM_STREAM_EVT_PUT_BUF,
+    MM_STREAM_EVT_REG_BUF,
+    MM_STREAM_EVT_UNREG_BUF,
+    MM_STREAM_EVT_START,
+    MM_STREAM_EVT_STOP,
+    MM_STREAM_EVT_QBUF,
+    MM_STREAM_EVT_SET_PARM,
+    MM_STREAM_EVT_GET_PARM,
+    MM_STREAM_EVT_DO_ACTION,
+    MM_STREAM_EVT_GET_QUEUED_BUF_COUNT,
+    MM_STREAM_EVT_MAX
+} mm_stream_evt_type_t;
+
+typedef struct {
+    mm_camera_buf_notify_t cb;
+    void *user_data;
+    /* cb_count = -1: infinite
+     * cb_count > 0: register only for required times */
+    int8_t cb_count;
+    mm_camera_stream_cb_type cb_type;
+} mm_stream_data_cb_t;
+
+typedef struct {
+    /* buf reference count */
+    uint8_t buf_refcnt;
+
+    /* This flag is to indicate if after allocation,
+     * the corresponding buf needs to qbuf into kernel
+     * (e.g. for preview usecase, display needs to hold two bufs,
+     * so no need to qbuf these two bufs initially) */
+    uint8_t initial_reg_flag;
+
+    /* indicate if buf is in kernel(1) or client(0) */
+    uint8_t in_kernel;
+    /*indicate if this buffer is mapped to daemon*/
+    int8_t map_status;
+} mm_stream_buf_status_t;
+
+typedef struct mm_stream {
+    uint32_t my_hdl; /* local stream id */
+    uint32_t server_stream_id; /* stream id from server */
+    int32_t fd;
+    mm_stream_state_type_t state;
+
+    /* stream info*/
+    cam_stream_info_t *stream_info;
+
+    /* padding info */
+    cam_padding_info_t padding_info;
+
+    /* offset */
+    cam_frame_len_offset_t frame_offset;
+
+    pthread_mutex_t cmd_lock; /* lock to protect cmd_thread */
+    mm_camera_cmd_thread_t cmd_thread;
+
+    /* dataCB registered on this stream obj */
+    pthread_mutex_t cb_lock; /* cb lock to protect buf_cb */
+    mm_stream_data_cb_t buf_cb[MM_CAMERA_STREAM_BUF_CB_MAX];
+
+    /* stream buffer management */
+    pthread_mutex_t buf_lock;
+    uint8_t buf_num; /* num of buffers allocated */
+    mm_camera_buf_def_t* buf; /* ptr to buf array */
+    mm_stream_buf_status_t buf_status[CAM_MAX_NUM_BUFS_PER_STREAM]; /* ptr to buf status array */
+
+    uint8_t plane_buf_num; /* num of plane buffers allocated  Used only in Batch mode*/
+    mm_camera_buf_def_t *plane_buf; /*Pointer to plane buffer array Used only in Batch mode */
+    int32_t cur_buf_idx; /* Current container buffer active filling. Used only in Batch mode*/
+    uint8_t cur_bufs_staged; /*Number of plane buf freed by HAL for this usr buf*/
+
+
+    /* reference to parent channel_obj */
+    struct mm_channel* ch_obj;
+
+    uint8_t is_bundled; /* flag if stream is bundled */
+
+    /* reference to linked channel_obj */
+    struct mm_channel* linked_obj;
+    struct mm_stream * linked_stream; /* original stream */
+    uint8_t is_linked; /* flag if stream is linked */
+
+    mm_camera_stream_mem_vtbl_t mem_vtbl; /* mem ops tbl */
+
+    mm_camera_map_unmap_ops_tbl_t map_ops;
+
+    int8_t queued_buffer_count;
+
+    /*latest timestamp of this stream frame received & last frameID*/
+    uint32_t prev_frameID;
+    nsecs_t prev_timestamp;
+
+    /* Need to wait for buffer mapping before stream-on*/
+    pthread_cond_t buf_cond;
+} mm_stream_t;
+
+/* mm_channel */
+typedef enum {
+    MM_CHANNEL_STATE_NOTUSED = 0,   /* not used */
+    MM_CHANNEL_STATE_STOPPED,       /* stopped */
+    MM_CHANNEL_STATE_ACTIVE,        /* active, at least one stream active */
+    MM_CHANNEL_STATE_PAUSED,        /* paused */
+    MM_CHANNEL_STATE_MAX
+} mm_channel_state_type_t;
+
+typedef enum {
+    MM_CHANNEL_EVT_ADD_STREAM,
+    MM_CHANNEL_EVT_DEL_STREAM,
+    MM_CHANNEL_EVT_LINK_STREAM,
+    MM_CHANNEL_EVT_CONFIG_STREAM,
+    MM_CHANNEL_EVT_GET_BUNDLE_INFO,
+    MM_CHANNEL_EVT_START,
+    MM_CHANNEL_EVT_STOP,
+    MM_CHANNEL_EVT_PAUSE,
+    MM_CHANNEL_EVT_RESUME,
+    MM_CHANNEL_EVT_REQUEST_SUPER_BUF,
+    MM_CHANNEL_EVT_CANCEL_REQUEST_SUPER_BUF,
+    MM_CHANNEL_EVT_FLUSH_SUPER_BUF_QUEUE,
+    MM_CHANNEL_EVT_CONFIG_NOTIFY_MODE,
+    MM_CHANNEL_EVT_START_ZSL_SNAPSHOT,
+    MM_CHANNEL_EVT_STOP_ZSL_SNAPSHOT,
+    MM_CHANNEL_EVT_MAP_STREAM_BUF,
+    MM_CHANNEL_EVT_UNMAP_STREAM_BUF,
+    MM_CHANNEL_EVT_SET_STREAM_PARM,
+    MM_CHANNEL_EVT_GET_STREAM_PARM,
+    MM_CHANNEL_EVT_DO_STREAM_ACTION,
+    MM_CHANNEL_EVT_DELETE,
+    MM_CHANNEL_EVT_AF_BRACKETING,
+    MM_CHANNEL_EVT_AE_BRACKETING,
+    MM_CHANNEL_EVT_FLASH_BRACKETING,
+    MM_CHANNEL_EVT_ZOOM_1X,
+    MM_CAMERA_EVT_CAPTURE_SETTING,
+    MM_CHANNEL_EVT_GET_STREAM_QUEUED_BUF_COUNT,
+    MM_CHANNEL_EVT_MAP_STREAM_BUFS,
+    MM_CHANNEL_EVT_REG_STREAM_BUF_CB
+} mm_channel_evt_type_t;
+
+typedef struct {
+    uint32_t stream_id;
+    mm_camera_stream_config_t *config;
+} mm_evt_paylod_config_stream_t;
+
+typedef struct {
+    uint32_t stream_id;
+    cam_stream_parm_buffer_t *parms;
+} mm_evt_paylod_set_get_stream_parms_t;
+
+typedef struct {
+    uint32_t stream_id;
+    void *actions;
+} mm_evt_paylod_do_stream_action_t;
+
+typedef struct {
+    uint32_t stream_id;
+    mm_stream_data_cb_t buf_cb;
+} mm_evt_paylod_reg_stream_buf_cb;
+
+typedef struct {
+    uint8_t num_of_bufs;
+    mm_camera_buf_info_t super_buf[MAX_STREAM_NUM_IN_BUNDLE];
+    uint8_t matched;
+    uint8_t expected_frame;
+    uint32_t frame_idx;
+    /* unmatched meta idx needed in case of low priority queue */
+    uint32_t unmatched_meta_idx;
+} mm_channel_queue_node_t;
+
+typedef struct {
+    cam_queue_t que;
+    uint8_t num_streams;
+    /* container for bundled stream handlers */
+    uint32_t bundled_streams[MAX_STREAM_NUM_IN_BUNDLE];
+    mm_camera_channel_attr_t attr;
+    uint32_t expected_frame_id;
+    uint32_t match_cnt;
+    uint32_t expected_frame_id_without_led;
+    uint32_t led_on_start_frame_id;
+    uint32_t led_off_start_frame_id;
+    uint32_t led_on_num_frames;
+    uint32_t once;
+    uint32_t frame_skip_count;
+    uint32_t good_frame_id;
+} mm_channel_queue_t;
+
+typedef struct {
+    uint8_t is_active; /* flag to indicate if bundle is valid */
+    /* queue to store bundled super buffers */
+    mm_channel_queue_t superbuf_queue;
+    mm_camera_buf_notify_t super_buf_notify_cb;
+    void *user_data;
+} mm_channel_bundle_t;
+
+/* Nodes used for frame sync */
+typedef struct {
+    /* Frame idx */
+    uint32_t frame_idx;
+    /* Frame present for corresponding channel*/
+    uint32_t frame_valid[MAX_NUM_CAMERA_PER_BUNDLE];
+    /* Frame present in all channels*/
+    uint32_t matched;
+} mm_channel_sync_node_t;
+
+/* Frame sync information */
+typedef struct {
+    /* Number of camera channels that need to be synced*/
+    uint8_t num_cam;
+    /* position of the next node to be updated */
+    uint8_t pos;
+    /* circular node array used to store frame information */
+    mm_channel_sync_node_t node[MM_CAMERA_FRAME_SYNC_NODES];
+    /* Channel corresponding to each camera */
+    struct mm_channel *ch_obj[MAX_NUM_CAMERA_PER_BUNDLE];
+    /* Cb corresponding to each camera */
+    mm_camera_buf_notify_t cb[MAX_NUM_CAMERA_PER_BUNDLE];
+} mm_channel_frame_sync_info_t;
+
+/* Node information for multiple superbuf callbacks
+*  This can be used to batch nodes before sending to upper layer */
+typedef struct {
+    /* Number of nodes to be sent*/
+    uint8_t num_nodes;
+    /* queue node information*/
+    mm_channel_queue_node_t *node[MAX_NUM_CAMERA_PER_BUNDLE];
+    /* channel information*/
+    struct mm_channel *ch_obj[MAX_NUM_CAMERA_PER_BUNDLE];
+} mm_channel_node_info_t;
+
+typedef enum {
+    MM_CHANNEL_BRACKETING_STATE_OFF,
+    MM_CHANNEL_BRACKETING_STATE_WAIT_GOOD_FRAME_IDX,
+    MM_CHANNEL_BRACKETING_STATE_ACTIVE,
+} mm_channel_bracketing_state_t;
+
+typedef struct mm_channel {
+    uint32_t my_hdl;
+    mm_channel_state_type_t state;
+    pthread_mutex_t ch_lock; /* channel lock */
+
+    /* stream bundle info in the channel */
+    mm_channel_bundle_t bundle;
+
+    /* num of pending suferbuffers */
+    uint32_t pending_cnt;
+    uint32_t pending_retro_cnt;
+    mm_camera_req_buf_type_t req_type;
+    uint32_t bWaitForPrepSnapshotDone;
+    uint32_t unLockAEC;
+    /* num of pending suferbuffers */
+    uint8_t stopZslSnapshot;
+
+    /* cmd thread for superbuffer dataCB and async stop*/
+    mm_camera_cmd_thread_t cmd_thread;
+
+    /* cb thread for sending data cb */
+    mm_camera_cmd_thread_t cb_thread;
+
+    /* data poll thread
+    * currently one data poll thread per channel
+    * could extended to support one data poll thread per stream in the channel */
+    mm_camera_poll_thread_t poll_thread[MM_CAMERA_CHANNEL_POLL_THREAD_MAX];
+
+    /* container for all streams in channel */
+    mm_stream_t streams[MAX_STREAM_NUM_IN_BUNDLE];
+
+    /* reference to parent cam_obj */
+    struct mm_camera_obj* cam_obj;
+
+    /* manual zsl snapshot control */
+    uint8_t manualZSLSnapshot;
+
+    /* control for zsl led */
+    uint8_t startZSlSnapshotCalled;
+    uint8_t needLEDFlash;
+    mm_channel_bracketing_state_t bracketingState;
+    uint8_t isFlashBracketingEnabled;
+    uint8_t isZoom1xFrameRequested;
+    uint32_t burstSnapNum;
+    char threadName[THREAD_NAME_SIZE];
+
+    /*Buffer diverted*/
+    uint8_t diverted_frame_id;
+    uint32_t sessionid;
+
+    /*Frame capture configaration*/
+    uint8_t isConfigCapture;
+    uint8_t cur_capture_idx;
+    uint32_t capture_frame_id[MAX_CAPTURE_BATCH_NUM];
+    cam_capture_frame_config_t frameConfig;
+    uint8_t needLowLightZSL;
+} mm_channel_t;
+
+typedef struct {
+    mm_channel_t *ch;
+    uint32_t stream_id;
+} mm_camera_stream_link_t;
+
+/* struct to store information about pp cookie*/
+typedef struct {
+    uint32_t cam_hdl;
+    uint32_t ch_hdl;
+    uint32_t stream_hdl;
+    mm_channel_queue_node_t* super_buf;
+} mm_channel_pp_info_t;
+
+/* mm_camera */
+typedef struct {
+    mm_camera_event_notify_t evt_cb;
+    void *user_data;
+} mm_camera_evt_entry_t;
+
+typedef struct {
+    mm_camera_evt_entry_t evt[MM_CAMERA_EVT_ENTRY_MAX];
+    /* reg_count <=0: infinite
+     * reg_count > 0: register only for required times */
+    int reg_count;
+} mm_camera_evt_obj_t;
+
+typedef struct mm_camera_obj {
+    uint32_t my_hdl;
+    int ref_count;
+    int32_t ctrl_fd;
+    int32_t ds_fd; /* domain socket fd */
+    pthread_mutex_t cam_lock;
+    pthread_mutex_t cb_lock; /* lock for evt cb */
+    mm_channel_t ch[MM_CAMERA_CHANNEL_MAX];
+    mm_camera_evt_obj_t evt;
+    mm_camera_poll_thread_t evt_poll_thread; /* evt poll thread */
+    mm_camera_cmd_thread_t evt_thread;       /* thread for evt CB */
+    mm_camera_vtbl_t vtbl;
+
+    pthread_mutex_t evt_lock;
+    pthread_cond_t evt_cond;
+    mm_camera_event_t evt_rcvd;
+
+    pthread_mutex_t msg_lock; /* lock for sending msg through socket */
+    uint32_t sessionid; /* Camera server session id */
+} mm_camera_obj_t;
+
+typedef struct {
+    int8_t num_cam;
+    mm_camera_shim_ops_t cam_shim_ops;
+    char video_dev_name[MM_CAMERA_MAX_NUM_SENSORS][MM_CAMERA_DEV_NAME_LEN];
+    mm_camera_obj_t *cam_obj[MM_CAMERA_MAX_NUM_SENSORS];
+    struct camera_info info[MM_CAMERA_MAX_NUM_SENSORS];
+    cam_sync_type_t cam_type[MM_CAMERA_MAX_NUM_SENSORS];
+    cam_sync_mode_t cam_mode[MM_CAMERA_MAX_NUM_SENSORS];
+    uint8_t is_yuv[MM_CAMERA_MAX_NUM_SENSORS]; // 1=CAM_SENSOR_YUV, 0=CAM_SENSOR_RAW
+} mm_camera_ctrl_t;
+
+typedef enum {
+    mm_camera_async_call,
+    mm_camera_sync_call
+} mm_camera_call_type_t;
+
+/**********************************************************************************
+* external function declare
+***********************************************************************************/
+/* utility functions */
+/* set int32_t value */
+extern int32_t mm_camera_util_s_ctrl(mm_camera_obj_t *my_obj,
+        int stream_id, int32_t fd, uint32_t id, int32_t *value);
+
+/* get int32_t value */
+extern int32_t mm_camera_util_g_ctrl(mm_camera_obj_t *my_obj,
+        int stream_id, int32_t fd, uint32_t id, int32_t *value);
+
+/* send msg throught domain socket for fd mapping */
+extern int32_t mm_camera_util_sendmsg(mm_camera_obj_t *my_obj,
+                                      void *msg,
+                                      size_t buf_size,
+                                      int sendfd);
+
+/* send msg through domain socket for bundled fd mapping */
+extern int32_t mm_camera_util_bundled_sendmsg(mm_camera_obj_t *my_obj,
+                                              void *msg,
+                                              size_t buf_size,
+                                              int sendfds[CAM_MAX_NUM_BUFS_PER_STREAM],
+                                              int numfds);
+
+/* Check if hardware target is A family */
+uint8_t mm_camera_util_chip_is_a_family(void);
+
+/* mm-camera */
+extern int32_t mm_camera_open(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_close(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_register_event_notify(mm_camera_obj_t *my_obj,
+                                               mm_camera_event_notify_t evt_cb,
+                                               void * user_data);
+extern int32_t mm_camera_qbuf(mm_camera_obj_t *my_obj,
+                              uint32_t ch_id,
+                              mm_camera_buf_def_t *buf);
+extern int32_t mm_camera_get_queued_buf_count(mm_camera_obj_t *my_obj,
+        uint32_t ch_id, uint32_t stream_id);
+extern int32_t mm_camera_query_capability(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_set_parms(mm_camera_obj_t *my_obj,
+                                   parm_buffer_t *parms);
+extern int32_t mm_camera_get_parms(mm_camera_obj_t *my_obj,
+                                   parm_buffer_t *parms);
+extern int32_t mm_camera_map_buf(mm_camera_obj_t *my_obj,
+                                 uint8_t buf_type,
+                                 int fd,
+                                 size_t size,
+                                 void *buffer);
+extern int32_t mm_camera_map_bufs(mm_camera_obj_t *my_obj,
+                                  const cam_buf_map_type_list *buf_map_list);
+extern int32_t mm_camera_unmap_buf(mm_camera_obj_t *my_obj,
+                                   uint8_t buf_type);
+extern int32_t mm_camera_do_auto_focus(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_cancel_auto_focus(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_prepare_snapshot(mm_camera_obj_t *my_obj,
+                                          int32_t do_af_flag);
+extern int32_t mm_camera_start_zsl_snapshot(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_stop_zsl_snapshot(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_flush(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_start_zsl_snapshot_ch(mm_camera_obj_t *my_obj,
+        uint32_t ch_id);
+extern int32_t mm_camera_stop_zsl_snapshot_ch(mm_camera_obj_t *my_obj,
+        uint32_t ch_id);
+extern uint32_t mm_camera_add_channel(mm_camera_obj_t *my_obj,
+                                      mm_camera_channel_attr_t *attr,
+                                      mm_camera_buf_notify_t channel_cb,
+                                      void *userdata);
+extern int32_t mm_camera_del_channel(mm_camera_obj_t *my_obj,
+                                     uint32_t ch_id);
+extern int32_t mm_camera_get_bundle_info(mm_camera_obj_t *my_obj,
+                                         uint32_t ch_id,
+                                         cam_bundle_config_t *bundle_info);
+extern uint32_t mm_camera_add_stream(mm_camera_obj_t *my_obj,
+                                     uint32_t ch_id);
+extern int32_t mm_camera_del_stream(mm_camera_obj_t *my_obj,
+                                    uint32_t ch_id,
+                                    uint32_t stream_id);
+extern uint32_t mm_camera_link_stream(mm_camera_obj_t *my_obj,
+        uint32_t ch_id,
+        uint32_t stream_id,
+        uint32_t linked_ch_id);
+
+extern int32_t mm_camera_reg_stream_buf_cb(mm_camera_obj_t *my_obj,
+        uint32_t ch_id, uint32_t stream_id, mm_camera_buf_notify_t buf_cb,
+        mm_camera_stream_cb_type cb_type, void *userdata);
+
+extern int32_t mm_camera_config_stream(mm_camera_obj_t *my_obj,
+                                       uint32_t ch_id,
+                                       uint32_t stream_id,
+                                       mm_camera_stream_config_t *config);
+extern int32_t mm_camera_start_channel(mm_camera_obj_t *my_obj,
+                                       uint32_t ch_id);
+extern int32_t mm_camera_stop_channel(mm_camera_obj_t *my_obj,
+                                      uint32_t ch_id);
+extern int32_t mm_camera_request_super_buf(mm_camera_obj_t *my_obj,
+        uint32_t ch_id, mm_camera_req_buf_t *buf);
+extern int32_t mm_camera_cancel_super_buf_request(mm_camera_obj_t *my_obj,
+                                                  uint32_t ch_id);
+extern int32_t mm_camera_flush_super_buf_queue(mm_camera_obj_t *my_obj,
+                                               uint32_t ch_id,
+                                               uint32_t frame_idx);
+extern int32_t mm_camera_config_channel_notify(mm_camera_obj_t *my_obj,
+                                               uint32_t ch_id,
+                                               mm_camera_super_buf_notify_mode_t notify_mode);
+extern int32_t mm_camera_set_stream_parms(mm_camera_obj_t *my_obj,
+                                          uint32_t ch_id,
+                                          uint32_t s_id,
+                                          cam_stream_parm_buffer_t *parms);
+extern int32_t mm_camera_get_stream_parms(mm_camera_obj_t *my_obj,
+                                          uint32_t ch_id,
+                                          uint32_t s_id,
+                                          cam_stream_parm_buffer_t *parms);
+extern int32_t mm_camera_register_event_notify_internal(mm_camera_obj_t *my_obj,
+                                                        mm_camera_event_notify_t evt_cb,
+                                                        void * user_data);
+extern int32_t mm_camera_map_stream_buf(mm_camera_obj_t *my_obj,
+                                        uint32_t ch_id,
+                                        uint32_t stream_id,
+                                        uint8_t buf_type,
+                                        uint32_t buf_idx,
+                                        int32_t plane_idx,
+                                        int fd,
+                                        size_t size,
+                                        void *buffer);
+extern int32_t mm_camera_map_stream_bufs(mm_camera_obj_t *my_obj,
+                                         uint32_t ch_id,
+                                         const cam_buf_map_type_list *buf_map_list);
+extern int32_t mm_camera_unmap_stream_buf(mm_camera_obj_t *my_obj,
+                                          uint32_t ch_id,
+                                          uint32_t stream_id,
+                                          uint8_t buf_type,
+                                          uint32_t buf_idx,
+                                          int32_t plane_idx);
+extern int32_t mm_camera_do_stream_action(mm_camera_obj_t *my_obj,
+                                          uint32_t ch_id,
+                                          uint32_t stream_id,
+                                          void *actions);
+extern int32_t mm_camera_get_session_id(mm_camera_obj_t *my_obj,
+                                        uint32_t* sessionid);
+extern int32_t mm_camera_sync_related_sensors(mm_camera_obj_t *my_obj,
+                                   cam_sync_related_sensors_event_info_t *parms);
+
+/* mm_channel */
+extern int32_t mm_channel_fsm_fn(mm_channel_t *my_obj,
+                                 mm_channel_evt_type_t evt,
+                                 void * in_val,
+                                 void * out_val);
+extern int32_t mm_channel_init(mm_channel_t *my_obj,
+                               mm_camera_channel_attr_t *attr,
+                               mm_camera_buf_notify_t channel_cb,
+                               void *userdata);
+/* qbuf is a special case that not going through state machine.
+ * This is to avoid deadlock when trying to aquire ch_lock,
+ * from the context of dataCB, but async stop is holding ch_lock */
+extern int32_t mm_channel_qbuf(mm_channel_t *my_obj,
+                               mm_camera_buf_def_t *buf);
+/* mm_stream */
+extern int32_t mm_stream_fsm_fn(mm_stream_t *my_obj,
+                                mm_stream_evt_type_t evt,
+                                void * in_val,
+                                void * out_val);
+/* Function to register special callback for stream buffer*/
+extern int32_t mm_stream_reg_buf_cb(mm_stream_t *my_obj,
+        mm_stream_data_cb_t val);
+extern int32_t mm_stream_map_buf(mm_stream_t *my_obj,
+                                 uint8_t buf_type,
+                                 uint32_t frame_idx,
+                                 int32_t plane_idx,
+                                 int fd,
+                                 size_t size,
+                                 void *buffer);
+extern int32_t mm_stream_map_bufs(mm_stream_t *my_obj,
+                                  const cam_buf_map_type_list *buf_map_list);
+extern int32_t mm_stream_unmap_buf(mm_stream_t *my_obj,
+                                   uint8_t buf_type,
+                                   uint32_t frame_idx,
+                                   int32_t plane_idx);
+
+/* utiltity fucntion declared in mm-camera-inteface2.c
+ * and need be used by mm-camera and below*/
+uint32_t mm_camera_util_generate_handler(uint8_t index);
+const char * mm_camera_util_get_dev_name(uint32_t cam_handler);
+uint8_t mm_camera_util_get_index_by_handler(uint32_t handler);
+
+/* poll/cmd thread functions */
+extern int32_t mm_camera_poll_thread_launch(
+                                mm_camera_poll_thread_t * poll_cb,
+                                mm_camera_poll_thread_type_t poll_type);
+extern int32_t mm_camera_poll_thread_release(mm_camera_poll_thread_t *poll_cb);
+extern int32_t mm_camera_poll_thread_add_poll_fd(
+                                mm_camera_poll_thread_t * poll_cb,
+                                uint32_t handler,
+                                int32_t fd,
+                                mm_camera_poll_notify_t nofity_cb,
+                                void *userdata,
+                                mm_camera_call_type_t);
+extern int32_t mm_camera_poll_thread_del_poll_fd(
+                                mm_camera_poll_thread_t * poll_cb,
+                                uint32_t handler,
+                                mm_camera_call_type_t);
+extern int32_t mm_camera_poll_thread_commit_updates(
+        mm_camera_poll_thread_t * poll_cb);
+extern int32_t mm_camera_cmd_thread_launch(
+                                mm_camera_cmd_thread_t * cmd_thread,
+                                mm_camera_cmd_cb_t cb,
+                                void* user_data);
+extern int32_t mm_camera_cmd_thread_name(const char* name);
+extern int32_t mm_camera_cmd_thread_release(mm_camera_cmd_thread_t * cmd_thread);
+
+extern int32_t mm_camera_channel_advanced_capture(mm_camera_obj_t *my_obj,
+        uint32_t ch_id, mm_camera_advanced_capture_t type,
+        uint32_t trigger, void *in_value);
+int32_t mm_camera_enqueue_evt(mm_camera_obj_t *my_obj,
+                              mm_camera_event_t *event);
+int32_t mm_camera_load_shim_lib();
+cam_shim_packet_t *mm_camera_create_shim_cmd_packet(cam_shim_cmd_type type,
+        uint32_t sessionID, void *data);
+int32_t mm_camera_destroy_shim_cmd_packet(cam_shim_packet_t *cmd);
+int32_t mm_camera_module_event_handler(
+        uint32_t session_id, cam_event_t *event);
+cam_status_t mm_camera_module_open_session(int sessionid,
+        int (*event_cb)(uint32_t sessionid, cam_event_t *event));
+int32_t mm_camera_module_close_session(int session);
+int32_t mm_camera_module_send_cmd(cam_shim_packet_t *event);
+
+#endif /* __MM_CAMERA_H__ */
diff --git a/msmcobalt/QCamera2/stack/mm-camera-interface/inc/mm_camera_dbg.h b/msmcobalt/QCamera2/stack/mm-camera-interface/inc/mm_camera_dbg.h
new file mode 100644
index 0000000..8298c78
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-interface/inc/mm_camera_dbg.h
@@ -0,0 +1,134 @@
+/* Copyright (c) 2012, 2014, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_CAMERA_DBG_H__
+#define __MM_CAMERA_DBG_H__
+
+// System dependencies
+#include <utils/Log.h>
+
+#ifdef QCAMERA_REDEFINE_LOG
+
+// Camera dependencies
+#include "cam_types.h"
+
+typedef enum {
+    CAM_NO_MODULE,
+    CAM_HAL_MODULE,
+    CAM_MCI_MODULE,
+    CAM_JPEG_MODULE,
+    CAM_LAST_MODULE
+} cam_modules_t;
+
+/* values that persist.camera.global.debug can be set to */
+/* all camera modules need to map their internal debug levels to this range */
+typedef enum {
+    CAM_GLBL_DBG_NONE  = 0,
+    CAM_GLBL_DBG_ERR   = 1,
+    CAM_GLBL_DBG_WARN  = 2,
+    CAM_GLBL_DBG_HIGH  = 3,
+    CAM_GLBL_DBG_DEBUG = 4,
+    CAM_GLBL_DBG_LOW   = 5,
+    CAM_GLBL_DBG_INFO  = 6
+} cam_global_debug_level_t;
+
+extern int g_cam_log[CAM_LAST_MODULE][CAM_GLBL_DBG_INFO + 1];
+
+#define FATAL_IF(cond, ...) LOG_ALWAYS_FATAL_IF(cond, ## __VA_ARGS__)
+
+#undef CLOGx
+#define CLOGx(module, level, fmt, args...)                         \
+{\
+if (g_cam_log[module][level]) {                                  \
+  mm_camera_debug_log(module, level, __func__, __LINE__, fmt, ##args); \
+}\
+}
+
+#undef CLOGI
+#define CLOGI(module, fmt, args...)                \
+    CLOGx(module, CAM_GLBL_DBG_INFO, fmt, ##args)
+#undef CLOGD
+#define CLOGD(module, fmt, args...)                \
+    CLOGx(module, CAM_GLBL_DBG_DEBUG, fmt, ##args)
+#undef CLOGL
+#define CLOGL(module, fmt, args...)                \
+    CLOGx(module, CAM_GLBL_DBG_LOW, fmt, ##args)
+#undef CLOGW
+#define CLOGW(module, fmt, args...)                \
+    CLOGx(module, CAM_GLBL_DBG_WARN, fmt, ##args)
+#undef CLOGH
+#define CLOGH(module, fmt, args...)                \
+    CLOGx(module, CAM_GLBL_DBG_HIGH, fmt, ##args)
+#undef CLOGE
+#define CLOGE(module, fmt, args...)                \
+    CLOGx(module, CAM_GLBL_DBG_ERR, fmt, ##args)
+
+#ifndef CAM_MODULE
+#define CAM_MODULE CAM_MCI_MODULE
+#endif
+
+#undef LOGD
+#define LOGD(fmt, args...) CLOGD(CAM_MODULE, fmt, ##args)
+#undef LOGL
+#define LOGL(fmt, args...) CLOGL(CAM_MODULE, fmt, ##args)
+#undef LOGW
+#define LOGW(fmt, args...) CLOGW(CAM_MODULE, fmt, ##args)
+#undef LOGH
+#define LOGH(fmt, args...) CLOGH(CAM_MODULE, fmt, ##args)
+#undef LOGE
+#define LOGE(fmt, args...) CLOGE(CAM_MODULE, fmt, ##args)
+#undef LOGI
+#define LOGI(fmt, args...) CLOGI(CAM_MODULE, fmt, ##args)
+
+/* reads and updates camera logging properties */
+void mm_camera_set_dbg_log_properties(void);
+
+/* generic logger function */
+void mm_camera_debug_log(const cam_modules_t module,
+                   const cam_global_debug_level_t level,
+                   const char *func, const int line, const char *fmt, ...);
+
+#else
+
+#undef LOGD
+#define LOGD(fmt, args...) ALOGD(fmt, ##args)
+#undef LOGL
+#define LOGL(fmt, args...) ALOGD(fmt, ##args)
+#undef LOGW
+#define LOGW(fmt, args...) ALOGW(fmt, ##args)
+#undef LOGH
+#define LOGH(fmt, args...) ALOGD(fmt, ##args)
+#undef LOGE
+#define LOGE(fmt, args...) ALOGE(fmt, ##args)
+#undef LOGI
+#define LOGI(fmt, args...) ALOGV(fmt, ##args)
+
+#endif
+
+#endif /* __MM_CAMERA_DBG_H__ */
diff --git a/msmcobalt/QCamera2/stack/mm-camera-interface/inc/mm_camera_sock.h b/msmcobalt/QCamera2/stack/mm-camera-interface/inc/mm_camera_sock.h
new file mode 100644
index 0000000..89d5040
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-interface/inc/mm_camera_sock.h
@@ -0,0 +1,76 @@
+/* Copyright (c) 2012-2014, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_CAMERA_SOCKET_H__
+#define __MM_CAMERA_SOCKET_H__
+
+// System dependencies
+#define SOCKET_H <SYSTEM_HEADER_PREFIX/socket.h>
+#include SOCKET_H
+#define UN_H <SYSTEM_HEADER_PREFIX/un.h>
+#include UN_H
+
+// Camera dependencies
+#include "cam_types.h"
+
+typedef enum {
+    MM_CAMERA_SOCK_TYPE_UDP,
+    MM_CAMERA_SOCK_TYPE_TCP,
+} mm_camera_sock_type_t;
+
+typedef union {
+    struct sockaddr addr;
+    struct sockaddr_un addr_un;
+} mm_camera_sock_addr_t;
+
+int mm_camera_socket_create(int cam_id, mm_camera_sock_type_t sock_type);
+
+int mm_camera_socket_sendmsg(
+  int fd,
+  void *msg,
+  size_t buf_size,
+  int sendfd);
+
+int mm_camera_socket_bundle_sendmsg(
+  int fd,
+  void *msg,
+  size_t buf_size,
+  int sendfds[CAM_MAX_NUM_BUFS_PER_STREAM],
+  int num_fds);
+
+int mm_camera_socket_recvmsg(
+  int fd,
+  void *msg,
+  uint32_t buf_size,
+  int *rcvdfd);
+
+void mm_camera_socket_close(int fd);
+
+#endif /*__MM_CAMERA_SOCKET_H__*/
+
diff --git a/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera.c b/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera.c
new file mode 100644
index 0000000..78ab597
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera.c
@@ -0,0 +1,2608 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// To remove
+#include <cutils/properties.h>
+
+// System dependencies
+#include <pthread.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <stdlib.h>
+#include <dlfcn.h>
+#define IOCTL_H <SYSTEM_HEADER_PREFIX/ioctl.h>
+#include IOCTL_H
+
+// Camera dependencies
+#include "cam_semaphore.h"
+#include "mm_camera_dbg.h"
+#include "mm_camera_sock.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+#define SET_PARM_BIT32(parm, parm_arr) \
+    (parm_arr[parm/32] |= (1<<(parm%32)))
+
+#define GET_PARM_BIT32(parm, parm_arr) \
+    ((parm_arr[parm/32]>>(parm%32))& 0x1)
+
+/* internal function declare */
+int32_t mm_camera_evt_sub(mm_camera_obj_t * my_obj,
+                          uint8_t reg_flag);
+int32_t mm_camera_enqueue_evt(mm_camera_obj_t *my_obj,
+                              mm_camera_event_t *event);
+extern mm_camera_obj_t* mm_camera_util_get_camera_by_session_id
+        (uint32_t session_id);
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_get_channel_by_handler
+ *
+ * DESCRIPTION: utility function to get a channel object from its handle
+ *
+ * PARAMETERS :
+ *   @cam_obj: ptr to a camera object
+ *   @handler: channel handle
+ *
+ * RETURN     : ptr to a channel object.
+ *              NULL if failed.
+ *==========================================================================*/
+mm_channel_t * mm_camera_util_get_channel_by_handler(
+                                    mm_camera_obj_t * cam_obj,
+                                    uint32_t handler)
+{
+    int i;
+    mm_channel_t *ch_obj = NULL;
+    for(i = 0; i < MM_CAMERA_CHANNEL_MAX; i++) {
+        if (handler == cam_obj->ch[i].my_hdl) {
+            ch_obj = &cam_obj->ch[i];
+            break;
+        }
+    }
+    return ch_obj;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_chip_is_a_family
+ *
+ * DESCRIPTION: utility function to check if the host is A family chip
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : TRUE if A family.
+ *              FALSE otherwise.
+ *==========================================================================*/
+uint8_t mm_camera_util_chip_is_a_family(void)
+{
+#ifdef USE_A_FAMILY
+    return TRUE;
+#else
+    return FALSE;
+#endif
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_dispatch_app_event
+ *
+ * DESCRIPTION: dispatch event to apps who regitster for event notify
+ *
+ * PARAMETERS :
+ *   @cmd_cb: ptr to a struct storing event info
+ *   @user_data: user data ptr (camera object)
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_camera_dispatch_app_event(mm_camera_cmdcb_t *cmd_cb,
+                                         void* user_data)
+{
+    int i;
+    mm_camera_event_t *event = &cmd_cb->u.evt;
+    mm_camera_obj_t * my_obj = (mm_camera_obj_t *)user_data;
+    if (NULL != my_obj) {
+        mm_camera_cmd_thread_name(my_obj->evt_thread.threadName);
+        pthread_mutex_lock(&my_obj->cb_lock);
+        for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
+            if(my_obj->evt.evt[i].evt_cb) {
+                my_obj->evt.evt[i].evt_cb(
+                    my_obj->my_hdl,
+                    event,
+                    my_obj->evt.evt[i].user_data);
+            }
+        }
+        pthread_mutex_unlock(&my_obj->cb_lock);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_event_notify
+ *
+ * DESCRIPTION: callback to handle event notify from kernel. This call will
+ *              dequeue event from kernel.
+ *
+ * PARAMETERS :
+ *   @user_data: user data ptr (camera object)
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_camera_event_notify(void* user_data)
+{
+    struct v4l2_event ev;
+    struct msm_v4l2_event_data *msm_evt = NULL;
+    int rc;
+    mm_camera_event_t evt;
+    memset(&evt, 0, sizeof(mm_camera_event_t));
+
+    mm_camera_obj_t *my_obj = (mm_camera_obj_t*)user_data;
+    if (NULL != my_obj) {
+        /* read evt */
+        memset(&ev, 0, sizeof(ev));
+        rc = ioctl(my_obj->ctrl_fd, VIDIOC_DQEVENT, &ev);
+
+        if (rc >= 0 && ev.id == MSM_CAMERA_MSM_NOTIFY) {
+            msm_evt = (struct msm_v4l2_event_data *)ev.u.data;
+            switch (msm_evt->command) {
+            case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
+                evt.server_event_type = CAM_EVENT_TYPE_DAEMON_PULL_REQ;
+                mm_camera_enqueue_evt(my_obj, &evt);
+                break;
+            case CAM_EVENT_TYPE_MAP_UNMAP_DONE:
+                pthread_mutex_lock(&my_obj->evt_lock);
+                my_obj->evt_rcvd.server_event_type = msm_evt->command;
+                my_obj->evt_rcvd.status = msm_evt->status;
+                pthread_cond_signal(&my_obj->evt_cond);
+                pthread_mutex_unlock(&my_obj->evt_lock);
+                break;
+            case CAM_EVENT_TYPE_INT_TAKE_JPEG:
+            case CAM_EVENT_TYPE_INT_TAKE_RAW:
+                {
+                    evt.server_event_type = msm_evt->command;
+                    mm_camera_enqueue_evt(my_obj, &evt);
+                }
+                break;
+            case MSM_CAMERA_PRIV_SHUTDOWN:
+                {
+                    LOGE("Camera Event DAEMON DIED received");
+                    evt.server_event_type = CAM_EVENT_TYPE_DAEMON_DIED;
+                    mm_camera_enqueue_evt(my_obj, &evt);
+                }
+                break;
+            case CAM_EVENT_TYPE_CAC_DONE:
+                {
+                    evt.server_event_type = CAM_EVENT_TYPE_CAC_DONE;
+                    mm_camera_enqueue_evt(my_obj, &evt);
+                }
+                break;
+            default:
+                break;
+            }
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_enqueue_evt
+ *
+ * DESCRIPTION: enqueue received event into event queue to be processed by
+ *              event thread.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a camera object
+ *   @event    : event to be queued
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_enqueue_evt(mm_camera_obj_t *my_obj,
+                              mm_camera_event_t *event)
+{
+    int32_t rc = 0;
+    mm_camera_cmdcb_t *node = NULL;
+
+    node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+    if (NULL != node) {
+        memset(node, 0, sizeof(mm_camera_cmdcb_t));
+        node->cmd_type = MM_CAMERA_CMD_TYPE_EVT_CB;
+        node->u.evt = *event;
+
+        /* enqueue to evt cmd thread */
+        cam_queue_enq(&(my_obj->evt_thread.cmd_queue), node);
+        /* wake up evt cmd thread */
+        cam_sem_post(&(my_obj->evt_thread.cmd_sem));
+    } else {
+        LOGE("No memory for mm_camera_node_t");
+        rc = -1;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_open
+ *
+ * DESCRIPTION: open a camera
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a camera object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_open(mm_camera_obj_t *my_obj)
+{
+    char dev_name[MM_CAMERA_DEV_NAME_LEN];
+    int32_t rc = 0;
+    int8_t n_try=MM_CAMERA_DEV_OPEN_TRIES;
+    uint8_t sleep_msec=MM_CAMERA_DEV_OPEN_RETRY_SLEEP;
+    int cam_idx = 0;
+    const char *dev_name_value = NULL;
+    int l_errno = 0;
+
+    LOGD("begin\n");
+
+    if (NULL == my_obj) {
+        goto on_error;
+    }
+    dev_name_value = mm_camera_util_get_dev_name(my_obj->my_hdl);
+    if (NULL == dev_name_value) {
+        goto on_error;
+    }
+    snprintf(dev_name, sizeof(dev_name), "/dev/%s",
+             dev_name_value);
+    sscanf(dev_name, "/dev/video%d", &cam_idx);
+    LOGD("dev name = %s, cam_idx = %d", dev_name, cam_idx);
+
+    do{
+        n_try--;
+        errno = 0;
+        my_obj->ctrl_fd = open(dev_name, O_RDWR | O_NONBLOCK);
+        l_errno = errno;
+        LOGD("ctrl_fd = %d, errno == %d", my_obj->ctrl_fd, l_errno);
+        if((my_obj->ctrl_fd >= 0) || (errno != EIO && errno != ETIMEDOUT) || (n_try <= 0 )) {
+            break;
+        }
+        LOGE("Failed with %s error, retrying after %d milli-seconds",
+              strerror(errno), sleep_msec);
+        usleep(sleep_msec * 1000U);
+    }while (n_try > 0);
+
+    if (my_obj->ctrl_fd < 0) {
+        LOGE("cannot open control fd of '%s' (%s)\n",
+                  dev_name, strerror(l_errno));
+        if (l_errno == EBUSY)
+            rc = -EUSERS;
+        else
+            rc = -1;
+        goto on_error;
+    } else {
+        mm_camera_get_session_id(my_obj, &my_obj->sessionid);
+        LOGH("Camera Opened id = %d sessionid = %d", cam_idx, my_obj->sessionid);
+    }
+
+#ifdef DAEMON_PRESENT
+    /* open domain socket*/
+    n_try = MM_CAMERA_DEV_OPEN_TRIES;
+    do {
+        n_try--;
+        my_obj->ds_fd = mm_camera_socket_create(cam_idx, MM_CAMERA_SOCK_TYPE_UDP);
+        l_errno = errno;
+        LOGD("ds_fd = %d, errno = %d", my_obj->ds_fd, l_errno);
+        if((my_obj->ds_fd >= 0) || (n_try <= 0 )) {
+            LOGD("opened, break out while loop");
+            break;
+        }
+        LOGD("failed with I/O error retrying after %d milli-seconds",
+              sleep_msec);
+        usleep(sleep_msec * 1000U);
+    } while (n_try > 0);
+
+    if (my_obj->ds_fd < 0) {
+        LOGE("cannot open domain socket fd of '%s'(%s)\n",
+                  dev_name, strerror(l_errno));
+        rc = -1;
+        goto on_error;
+    }
+#else /* DAEMON_PRESENT */
+    cam_status_t cam_status;
+    cam_status = mm_camera_module_open_session(my_obj->sessionid,
+            mm_camera_module_event_handler);
+    if (cam_status < 0) {
+        LOGE("Failed to open session");
+        if (cam_status == CAM_STATUS_BUSY) {
+            rc = -EUSERS;
+        } else {
+            rc = -1;
+        }
+        goto on_error;
+    }
+#endif /* DAEMON_PRESENT */
+
+    pthread_mutex_init(&my_obj->msg_lock, NULL);
+    pthread_mutex_init(&my_obj->cb_lock, NULL);
+    pthread_mutex_init(&my_obj->evt_lock, NULL);
+    pthread_cond_init(&my_obj->evt_cond, NULL);
+
+    LOGD("Launch evt Thread in Cam Open");
+    snprintf(my_obj->evt_thread.threadName, THREAD_NAME_SIZE, "CAM_Dispatch");
+    mm_camera_cmd_thread_launch(&my_obj->evt_thread,
+                                mm_camera_dispatch_app_event,
+                                (void *)my_obj);
+
+    /* launch event poll thread
+     * we will add evt fd into event poll thread upon user first register for evt */
+    LOGD("Launch evt Poll Thread in Cam Open");
+    snprintf(my_obj->evt_poll_thread.threadName, THREAD_NAME_SIZE, "CAM_evntPoll");
+    mm_camera_poll_thread_launch(&my_obj->evt_poll_thread,
+                                 MM_CAMERA_POLL_TYPE_EVT);
+    mm_camera_evt_sub(my_obj, TRUE);
+
+    /* unlock cam_lock, we need release global intf_lock in camera_open(),
+     * in order not block operation of other Camera in dual camera use case.*/
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    LOGD("end (rc = %d)\n", rc);
+    return rc;
+
+on_error:
+
+    if (NULL == dev_name_value) {
+        LOGE("Invalid device name\n");
+        rc = -1;
+    }
+
+    if (NULL == my_obj) {
+        LOGE("Invalid camera object\n");
+        rc = -1;
+    } else {
+        if (my_obj->ctrl_fd >= 0) {
+            close(my_obj->ctrl_fd);
+            my_obj->ctrl_fd = -1;
+        }
+#ifdef DAEMON_PRESENT
+        if (my_obj->ds_fd >= 0) {
+            mm_camera_socket_close(my_obj->ds_fd);
+            my_obj->ds_fd = -1;
+        }
+#endif
+    }
+
+    /* unlock cam_lock, we need release global intf_lock in camera_open(),
+     * in order not block operation of other Camera in dual camera use case.*/
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_close
+ *
+ * DESCRIPTION: enqueue received event into event queue to be processed by
+ *              event thread.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a camera object
+ *   @event    : event to be queued
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_close(mm_camera_obj_t *my_obj)
+{
+    LOGD("unsubscribe evt");
+
+#ifndef DAEMON_PRESENT
+    mm_camera_module_close_session(my_obj->sessionid);
+#endif /* DAEMON_PRESENT */
+
+    mm_camera_evt_sub(my_obj, FALSE);
+
+    LOGD("Close evt Poll Thread in Cam Close");
+    mm_camera_poll_thread_release(&my_obj->evt_poll_thread);
+
+    LOGD("Close evt cmd Thread in Cam Close");
+    mm_camera_cmd_thread_release(&my_obj->evt_thread);
+
+    if(my_obj->ctrl_fd >= 0) {
+        close(my_obj->ctrl_fd);
+        my_obj->ctrl_fd = -1;
+    }
+
+#ifdef DAEMON_PRESENT
+    if(my_obj->ds_fd >= 0) {
+        mm_camera_socket_close(my_obj->ds_fd);
+        my_obj->ds_fd = -1;
+    }
+#endif
+
+    pthread_mutex_destroy(&my_obj->msg_lock);
+    pthread_mutex_destroy(&my_obj->cb_lock);
+    pthread_mutex_destroy(&my_obj->evt_lock);
+    pthread_cond_destroy(&my_obj->evt_cond);
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_register_event_notify_internal
+ *
+ * DESCRIPTION: internal implementation for registering callback for event notify.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a camera object
+ *   @evt_cb   : callback to be registered to handle event notify
+ *   @user_data: user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_register_event_notify_internal(mm_camera_obj_t *my_obj,
+                                                 mm_camera_event_notify_t evt_cb,
+                                                 void * user_data)
+{
+    int i;
+    int rc = -1;
+    mm_camera_evt_obj_t *evt_array = NULL;
+
+    pthread_mutex_lock(&my_obj->cb_lock);
+    evt_array = &my_obj->evt;
+    if(evt_cb) {
+        /* this is reg case */
+        for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
+            if(evt_array->evt[i].user_data == NULL) {
+                evt_array->evt[i].evt_cb = evt_cb;
+                evt_array->evt[i].user_data = user_data;
+                evt_array->reg_count++;
+                rc = 0;
+                break;
+            }
+        }
+    } else {
+        /* this is unreg case */
+        for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
+            if(evt_array->evt[i].user_data == user_data) {
+                evt_array->evt[i].evt_cb = NULL;
+                evt_array->evt[i].user_data = NULL;
+                evt_array->reg_count--;
+                rc = 0;
+                break;
+            }
+        }
+    }
+
+    pthread_mutex_unlock(&my_obj->cb_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_register_event_notify
+ *
+ * DESCRIPTION: registering a callback for event notify.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a camera object
+ *   @evt_cb   : callback to be registered to handle event notify
+ *   @user_data: user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_register_event_notify(mm_camera_obj_t *my_obj,
+                                        mm_camera_event_notify_t evt_cb,
+                                        void * user_data)
+{
+    int rc = -1;
+    rc = mm_camera_register_event_notify_internal(my_obj,
+                                                  evt_cb,
+                                                  user_data);
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_qbuf
+ *
+ * DESCRIPTION: enqueue buffer back to kernel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @buf          : buf ptr to be enqueued
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_qbuf(mm_camera_obj_t *my_obj,
+                       uint32_t ch_id,
+                       mm_camera_buf_def_t *buf)
+{
+    int rc = -1;
+    mm_channel_t * ch_obj = NULL;
+    ch_obj = mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    pthread_mutex_unlock(&my_obj->cam_lock);
+
+    /* we always assume qbuf will be done before channel/stream is fully stopped
+     * because qbuf is done within dataCB context
+     * in order to avoid deadlock, we are not locking ch_lock for qbuf */
+    if (NULL != ch_obj) {
+        rc = mm_channel_qbuf(ch_obj, buf);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_get_queued_buf_count
+ *
+ * DESCRIPTION: return queued buffer count
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @stream_id : stream id
+ *
+ * RETURN     : queued buffer count
+ *==========================================================================*/
+int32_t mm_camera_get_queued_buf_count(mm_camera_obj_t *my_obj,
+        uint32_t ch_id, uint32_t stream_id)
+{
+    int rc = -1;
+    mm_channel_t * ch_obj = NULL;
+    uint32_t payload;
+    ch_obj = mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+    payload = stream_id;
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+        rc = mm_channel_fsm_fn(ch_obj,
+                MM_CHANNEL_EVT_GET_STREAM_QUEUED_BUF_COUNT,
+                (void *)&payload,
+                NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_query_capability
+ *
+ * DESCRIPTION: query camera capability
+ *
+ * PARAMETERS :
+ *   @my_obj: camera object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_query_capability(mm_camera_obj_t *my_obj)
+{
+    int32_t rc = 0;
+
+#ifdef DAEMON_PRESENT
+    struct v4l2_capability cap;
+    /* get camera capabilities */
+    memset(&cap, 0, sizeof(cap));
+    rc = ioctl(my_obj->ctrl_fd, VIDIOC_QUERYCAP, &cap);
+#else /* DAEMON_PRESENT */
+    cam_shim_packet_t *shim_cmd;
+    cam_shim_cmd_data shim_cmd_data;
+    memset(&shim_cmd_data, 0, sizeof(shim_cmd_data));
+    shim_cmd_data.command = MSM_CAMERA_PRIV_QUERY_CAP;
+    shim_cmd_data.stream_id = 0;
+    shim_cmd_data.value = NULL;
+    shim_cmd = mm_camera_create_shim_cmd_packet(CAM_SHIM_GET_PARM,
+            my_obj->sessionid,&shim_cmd_data);
+    rc = mm_camera_module_send_cmd(shim_cmd);
+    mm_camera_destroy_shim_cmd_packet(shim_cmd);
+#endif /* DAEMON_PRESENT */
+    if (rc != 0) {
+        LOGE("cannot get camera capabilities, rc = %d, errno %d",
+                rc, errno);
+    }
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_set_parms
+ *
+ * DESCRIPTION: set parameters per camera
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @parms        : ptr to a param struct to be set to server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of parameters to be set
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_camera_set_parms(mm_camera_obj_t *my_obj,
+                            parm_buffer_t *parms)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+    if (parms !=  NULL) {
+        rc = mm_camera_util_s_ctrl(my_obj, 0, my_obj->ctrl_fd,
+            CAM_PRIV_PARM, &value);
+    }
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_get_parms
+ *
+ * DESCRIPTION: get parameters per camera
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @parms        : ptr to a param struct to be get from server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Parameters to be get from server are already
+ *              filled in by upper layer caller. After this call, corresponding
+ *              fields of requested parameters will be filled in by server with
+ *              detailed information.
+ *==========================================================================*/
+int32_t mm_camera_get_parms(mm_camera_obj_t *my_obj,
+                            parm_buffer_t *parms)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+    if (parms != NULL) {
+        rc = mm_camera_util_g_ctrl(my_obj, 0, my_obj->ctrl_fd, CAM_PRIV_PARM, &value);
+    }
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_do_auto_focus
+ *
+ * DESCRIPTION: performing auto focus
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : if this call success, we will always assume there will
+ *              be an auto_focus event following up.
+ *==========================================================================*/
+int32_t mm_camera_do_auto_focus(mm_camera_obj_t *my_obj)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+    rc = mm_camera_util_s_ctrl(my_obj, 0, my_obj->ctrl_fd, CAM_PRIV_DO_AUTO_FOCUS, &value);
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_cancel_auto_focus
+ *
+ * DESCRIPTION: cancel auto focus
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_cancel_auto_focus(mm_camera_obj_t *my_obj)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+    rc = mm_camera_util_s_ctrl(my_obj, 0, my_obj->ctrl_fd, CAM_PRIV_CANCEL_AUTO_FOCUS, &value);
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_prepare_snapshot
+ *
+ * DESCRIPTION: prepare hardware for snapshot
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @do_af_flag   : flag indicating if AF is needed
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_prepare_snapshot(mm_camera_obj_t *my_obj,
+                                   int32_t do_af_flag)
+{
+    int32_t rc = -1;
+    int32_t value = do_af_flag;
+    rc = mm_camera_util_s_ctrl(my_obj, 0, my_obj->ctrl_fd, CAM_PRIV_PREPARE_SNAPSHOT, &value);
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_start_zsl_snapshot
+ *
+ * DESCRIPTION: start zsl snapshot
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_start_zsl_snapshot(mm_camera_obj_t *my_obj)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+
+    rc = mm_camera_util_s_ctrl(my_obj, 0, my_obj->ctrl_fd,
+             CAM_PRIV_START_ZSL_SNAPSHOT, &value);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_stop_zsl_snapshot
+ *
+ * DESCRIPTION: stop zsl capture
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_stop_zsl_snapshot(mm_camera_obj_t *my_obj)
+{
+    int32_t rc = -1;
+    int32_t value;
+    rc = mm_camera_util_s_ctrl(my_obj, 0, my_obj->ctrl_fd,
+             CAM_PRIV_STOP_ZSL_SNAPSHOT, &value);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_flush
+ *
+ * DESCRIPTION: flush the current camera state and buffers
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_flush(mm_camera_obj_t *my_obj)
+{
+    int32_t rc = -1;
+    int32_t value;
+    rc = mm_camera_util_s_ctrl(my_obj, 0, my_obj->ctrl_fd,
+            CAM_PRIV_FLUSH, &value);
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_add_channel
+ *
+ * DESCRIPTION: add a channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @attr         : bundle attribute of the channel if needed
+ *   @channel_cb   : callback function for bundle data notify
+ *   @userdata     : user data ptr
+ *
+ * RETURN     : uint32_t type of channel handle
+ *              0  -- invalid channel handle, meaning the op failed
+ *              >0 -- successfully added a channel with a valid handle
+ * NOTE       : if no bundle data notify is needed, meaning each stream in the
+ *              channel will have its own stream data notify callback, then
+ *              attr, channel_cb, and userdata can be NULL. In this case,
+ *              no matching logic will be performed in channel for the bundling.
+ *==========================================================================*/
+uint32_t mm_camera_add_channel(mm_camera_obj_t *my_obj,
+                               mm_camera_channel_attr_t *attr,
+                               mm_camera_buf_notify_t channel_cb,
+                               void *userdata)
+{
+    mm_channel_t *ch_obj = NULL;
+    uint8_t ch_idx = 0;
+    uint32_t ch_hdl = 0;
+
+    for(ch_idx = 0; ch_idx < MM_CAMERA_CHANNEL_MAX; ch_idx++) {
+        if (MM_CHANNEL_STATE_NOTUSED == my_obj->ch[ch_idx].state) {
+            ch_obj = &my_obj->ch[ch_idx];
+            break;
+        }
+    }
+
+    if (NULL != ch_obj) {
+        /* initialize channel obj */
+        memset(ch_obj, 0, sizeof(mm_channel_t));
+        ch_hdl = mm_camera_util_generate_handler(ch_idx);
+        ch_obj->my_hdl = ch_hdl;
+        ch_obj->state = MM_CHANNEL_STATE_STOPPED;
+        ch_obj->cam_obj = my_obj;
+        pthread_mutex_init(&ch_obj->ch_lock, NULL);
+        ch_obj->sessionid = my_obj->sessionid;
+        mm_channel_init(ch_obj, attr, channel_cb, userdata);
+    }
+
+    pthread_mutex_unlock(&my_obj->cam_lock);
+
+    return ch_hdl;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_del_channel
+ *
+ * DESCRIPTION: delete a channel by its handle
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : all streams in the channel should be stopped already before
+ *              this channel can be deleted.
+ *==========================================================================*/
+int32_t mm_camera_del_channel(mm_camera_obj_t *my_obj,
+                              uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_DELETE,
+                               NULL,
+                               NULL);
+
+        pthread_mutex_destroy(&ch_obj->ch_lock);
+        memset(ch_obj, 0, sizeof(mm_channel_t));
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_get_bundle_info
+ *
+ * DESCRIPTION: query bundle info of the channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @bundle_info  : bundle info to be filled in
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : all streams in the channel should be stopped already before
+ *              this channel can be deleted.
+ *==========================================================================*/
+int32_t mm_camera_get_bundle_info(mm_camera_obj_t *my_obj,
+                                  uint32_t ch_id,
+                                  cam_bundle_config_t *bundle_info)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_GET_BUNDLE_INFO,
+                               (void *)bundle_info,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_link_stream
+ *
+ * DESCRIPTION: link a stream into a channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @stream_id    : stream that will be linked
+ *   @linked_ch_id : channel in which the stream will be linked
+ *
+ * RETURN     : uint32_t type of stream handle
+ *              0  -- invalid stream handle, meaning the op failed
+ *              >0 -- successfully linked a stream with a valid handle
+ *==========================================================================*/
+uint32_t mm_camera_link_stream(mm_camera_obj_t *my_obj,
+        uint32_t ch_id,
+        uint32_t stream_id,
+        uint32_t linked_ch_id)
+{
+    uint32_t s_hdl = 0;
+    mm_channel_t * ch_obj =
+            mm_camera_util_get_channel_by_handler(my_obj, linked_ch_id);
+    mm_channel_t * owner_obj =
+            mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if ((NULL != ch_obj) && (NULL != owner_obj)) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        mm_camera_stream_link_t stream_link;
+        memset(&stream_link, 0, sizeof(mm_camera_stream_link_t));
+        stream_link.ch = owner_obj;
+        stream_link.stream_id = stream_id;
+        mm_channel_fsm_fn(ch_obj,
+                          MM_CHANNEL_EVT_LINK_STREAM,
+                          (void*)&stream_link,
+                          (void*)&s_hdl);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return s_hdl;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_add_stream
+ *
+ * DESCRIPTION: add a stream into a channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : uint32_t type of stream handle
+ *              0  -- invalid stream handle, meaning the op failed
+ *              >0 -- successfully added a stream with a valid handle
+ *==========================================================================*/
+uint32_t mm_camera_add_stream(mm_camera_obj_t *my_obj,
+                              uint32_t ch_id)
+{
+    uint32_t s_hdl = 0;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        mm_channel_fsm_fn(ch_obj,
+                          MM_CHANNEL_EVT_ADD_STREAM,
+                          NULL,
+                          (void *)&s_hdl);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return s_hdl;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_del_stream
+ *
+ * DESCRIPTION: delete a stream by its handle
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @stream_id    : stream handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : stream should be stopped already before it can be deleted.
+ *==========================================================================*/
+int32_t mm_camera_del_stream(mm_camera_obj_t *my_obj,
+                             uint32_t ch_id,
+                             uint32_t stream_id)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_DEL_STREAM,
+                               (void *)&stream_id,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_start_zsl_snapshot_ch
+ *
+ * DESCRIPTION: starts zsl snapshot for specific channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_start_zsl_snapshot_ch(mm_camera_obj_t *my_obj,
+        uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_START_ZSL_SNAPSHOT,
+                               NULL,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_stop_zsl_snapshot_ch
+ *
+ * DESCRIPTION: stops zsl snapshot for specific channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_stop_zsl_snapshot_ch(mm_camera_obj_t *my_obj,
+        uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_STOP_ZSL_SNAPSHOT,
+                               NULL,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_config_stream
+ *
+ * DESCRIPTION: configure a stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @stream_id    : stream handle
+ *   @config       : stream configuration
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_config_stream(mm_camera_obj_t *my_obj,
+                                uint32_t ch_id,
+                                uint32_t stream_id,
+                                mm_camera_stream_config_t *config)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+    mm_evt_paylod_config_stream_t payload;
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        memset(&payload, 0, sizeof(mm_evt_paylod_config_stream_t));
+        payload.stream_id = stream_id;
+        payload.config = config;
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_CONFIG_STREAM,
+                               (void *)&payload,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_start_channel
+ *
+ * DESCRIPTION: start a channel, which will start all streams in the channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_start_channel(mm_camera_obj_t *my_obj, uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_START,
+                               NULL,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_stop_channel
+ *
+ * DESCRIPTION: stop a channel, which will stop all streams in the channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_stop_channel(mm_camera_obj_t *my_obj,
+                               uint32_t ch_id)
+{
+    int32_t rc = 0;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_STOP,
+                               NULL,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_request_super_buf
+ *
+ * DESCRIPTION: for burst mode in bundle, reuqest certain amount of matched
+ *              frames from superbuf queue
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @num_buf_requested : number of matched frames needed
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_request_super_buf(mm_camera_obj_t *my_obj,
+        uint32_t ch_id, mm_camera_req_buf_t *buf)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if ((NULL != ch_obj) && (buf != NULL)) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj, MM_CHANNEL_EVT_REQUEST_SUPER_BUF,
+                (void *)buf, NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_cancel_super_buf_request
+ *
+ * DESCRIPTION: for burst mode in bundle, cancel the reuqest for certain amount
+ *              of matched frames from superbuf queue
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_cancel_super_buf_request(mm_camera_obj_t *my_obj, uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_CANCEL_REQUEST_SUPER_BUF,
+                               NULL,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_flush_super_buf_queue
+ *
+ * DESCRIPTION: flush out all frames in the superbuf queue
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_flush_super_buf_queue(mm_camera_obj_t *my_obj, uint32_t ch_id,
+                                                             uint32_t frame_idx)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_FLUSH_SUPER_BUF_QUEUE,
+                               (void *)&frame_idx,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_config_channel_notify
+ *
+ * DESCRIPTION: configures the channel notification mode
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @notify_mode  : notification mode
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_config_channel_notify(mm_camera_obj_t *my_obj,
+                                        uint32_t ch_id,
+                                        mm_camera_super_buf_notify_mode_t notify_mode)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_CONFIG_NOTIFY_MODE,
+                               (void *)&notify_mode,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_set_stream_parms
+ *
+ * DESCRIPTION: set parameters per stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @parms        : ptr to a param struct to be set to server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of parameters to be set
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_camera_set_stream_parms(mm_camera_obj_t *my_obj,
+                                   uint32_t ch_id,
+                                   uint32_t s_id,
+                                   cam_stream_parm_buffer_t *parms)
+{
+    int32_t rc = -1;
+    mm_evt_paylod_set_get_stream_parms_t payload;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        memset(&payload, 0, sizeof(payload));
+        payload.stream_id = s_id;
+        payload.parms = parms;
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_SET_STREAM_PARM,
+                               (void *)&payload,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_get_stream_parms
+ *
+ * DESCRIPTION: get parameters per stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @parms        : ptr to a param struct to be get from server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Parameters to be get from server are already
+ *              filled in by upper layer caller. After this call, corresponding
+ *              fields of requested parameters will be filled in by server with
+ *              detailed information.
+ *==========================================================================*/
+int32_t mm_camera_get_stream_parms(mm_camera_obj_t *my_obj,
+                                   uint32_t ch_id,
+                                   uint32_t s_id,
+                                   cam_stream_parm_buffer_t *parms)
+{
+    int32_t rc = -1;
+    mm_evt_paylod_set_get_stream_parms_t payload;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        memset(&payload, 0, sizeof(payload));
+        payload.stream_id = s_id;
+        payload.parms = parms;
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_GET_STREAM_PARM,
+                               (void *)&payload,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_do_stream_action
+ *
+ * DESCRIPTION: request server to perform stream based action. Maybe removed later
+ *              if the functionality is included in mm_camera_set_parms
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @actions      : ptr to an action struct buf to be performed by server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the action struct buf is already mapped to server via
+ *              domain socket. Actions to be performed by server are already
+ *              filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_camera_do_stream_action(mm_camera_obj_t *my_obj,
+                                   uint32_t ch_id,
+                                   uint32_t stream_id,
+                                   void *actions)
+{
+    int32_t rc = -1;
+    mm_evt_paylod_do_stream_action_t payload;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        memset(&payload, 0, sizeof(payload));
+        payload.stream_id = stream_id;
+        payload.actions = actions;
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_DO_STREAM_ACTION,
+                               (void*)&payload,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_map_stream_buf
+ *
+ * DESCRIPTION: mapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @buf_type     : type of buffer to be mapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @buf_idx      : index of buffer within the stream buffers, only valid if
+ *                   buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @plane_idx    : plane index. If all planes share the same fd,
+ *                   plane_idx = -1; otherwise, plean_idx is the
+ *                   index to plane (0..num_of_planes)
+ *   @fd           : file descriptor of the buffer
+ *   @size         : size of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_map_stream_buf(mm_camera_obj_t *my_obj,
+                                 uint32_t ch_id,
+                                 uint32_t stream_id,
+                                 uint8_t buf_type,
+                                 uint32_t buf_idx,
+                                 int32_t plane_idx,
+                                 int fd,
+                                 size_t size,
+                                 void *buffer)
+{
+    int32_t rc = -1;
+    cam_buf_map_type payload;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        memset(&payload, 0, sizeof(payload));
+        payload.stream_id = stream_id;
+        payload.type = buf_type;
+        payload.frame_idx = buf_idx;
+        payload.plane_idx = plane_idx;
+        payload.fd = fd;
+        payload.size = size;
+        payload.buffer = buffer;
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_MAP_STREAM_BUF,
+                               (void*)&payload,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_map_stream_bufs
+ *
+ * DESCRIPTION: mapping stream buffers via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @buf_map_list : list of buffers to be mapped
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_map_stream_bufs(mm_camera_obj_t *my_obj,
+                                  uint32_t ch_id,
+                                  const cam_buf_map_type_list *buf_map_list)
+{
+    int32_t rc = -1;
+    cam_buf_map_type_list payload;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        memcpy(&payload, buf_map_list, sizeof(payload));
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_MAP_STREAM_BUFS,
+                               (void*)&payload,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_unmap_stream_buf
+ *
+ * DESCRIPTION: unmapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @buf_type     : type of buffer to be mapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @buf_idx      : index of buffer within the stream buffers, only valid if
+ *                   buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @plane_idx    : plane index. If all planes share the same fd,
+ *                   plane_idx = -1; otherwise, plean_idx is the
+ *                   index to plane (0..num_of_planes)
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_unmap_stream_buf(mm_camera_obj_t *my_obj,
+                                   uint32_t ch_id,
+                                   uint32_t stream_id,
+                                   uint8_t buf_type,
+                                   uint32_t buf_idx,
+                                   int32_t plane_idx)
+{
+    int32_t rc = -1;
+    cam_buf_unmap_type payload;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        memset(&payload, 0, sizeof(payload));
+        payload.stream_id = stream_id;
+        payload.type = buf_type;
+        payload.frame_idx = buf_idx;
+        payload.plane_idx = plane_idx;
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_UNMAP_STREAM_BUF,
+                               (void*)&payload,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_evt_sub
+ *
+ * DESCRIPTION: subscribe/unsubscribe event notify from kernel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @reg_flag     : 1 -- subscribe ; 0 -- unsubscribe
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_evt_sub(mm_camera_obj_t * my_obj,
+                          uint8_t reg_flag)
+{
+    int32_t rc = 0;
+    struct v4l2_event_subscription sub;
+
+    memset(&sub, 0, sizeof(sub));
+    sub.type = MSM_CAMERA_V4L2_EVENT_TYPE;
+    sub.id = MSM_CAMERA_MSM_NOTIFY;
+    if(FALSE == reg_flag) {
+        /* unsubscribe */
+        rc = ioctl(my_obj->ctrl_fd, VIDIOC_UNSUBSCRIBE_EVENT, &sub);
+        if (rc < 0) {
+            LOGE("unsubscribe event rc = %d, errno %d",
+                     rc, errno);
+            return rc;
+        }
+        /* remove evt fd from the polling thraed when unreg the last event */
+        rc = mm_camera_poll_thread_del_poll_fd(&my_obj->evt_poll_thread,
+                                               my_obj->my_hdl,
+                                               mm_camera_sync_call);
+    } else {
+        rc = ioctl(my_obj->ctrl_fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
+        if (rc < 0) {
+            LOGE("subscribe event rc = %d, errno %d",
+             rc, errno);
+            return rc;
+        }
+        /* add evt fd to polling thread when subscribe the first event */
+        rc = mm_camera_poll_thread_add_poll_fd(&my_obj->evt_poll_thread,
+                                               my_obj->my_hdl,
+                                               my_obj->ctrl_fd,
+                                               mm_camera_event_notify,
+                                               (void*)my_obj,
+                                               mm_camera_sync_call);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_wait_for_event
+ *
+ * DESCRIPTION: utility function to wait for certain events
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @evt_mask     : mask for events to be waited. Any of event in the mask would
+ *                   trigger the wait to end
+ *   @status       : status of the event
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void mm_camera_util_wait_for_event(mm_camera_obj_t *my_obj,
+                                   uint32_t evt_mask,
+                                   uint32_t *status)
+{
+    int32_t rc = 0;
+    struct timespec ts;
+
+    pthread_mutex_lock(&my_obj->evt_lock);
+    while (!(my_obj->evt_rcvd.server_event_type & evt_mask)) {
+        clock_gettime(CLOCK_REALTIME, &ts);
+        ts.tv_sec += WAIT_TIMEOUT;
+        rc = pthread_cond_timedwait(&my_obj->evt_cond, &my_obj->evt_lock, &ts);
+        if (rc) {
+            LOGE("pthread_cond_timedwait of evt_mask 0x%x failed %d",
+                     evt_mask, rc);
+            break;
+        }
+    }
+    if (!rc) {
+        *status = my_obj->evt_rcvd.status;
+    } else {
+        *status = MSM_CAMERA_STATUS_FAIL;
+    }
+    /* reset local storage for recieved event for next event */
+    memset(&my_obj->evt_rcvd, 0, sizeof(mm_camera_event_t));
+    pthread_mutex_unlock(&my_obj->evt_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_bundled_sendmsg
+ *
+ * DESCRIPTION: utility function to send bundled msg via domain socket
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @msg          : message to be sent
+ *   @buf_size     : size of the message to be sent
+ *   @sendfds      : array of file descriptors to be sent
+ *   @numfds       : number of file descriptors to be sent
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_util_bundled_sendmsg(mm_camera_obj_t *my_obj,
+                                       void *msg,
+                                       size_t buf_size,
+                                       int sendfds[CAM_MAX_NUM_BUFS_PER_STREAM],
+                                       int numfds)
+{
+    int32_t rc = -1;
+    uint32_t status;
+
+    /* need to lock msg_lock, since sendmsg until response back is deemed as one operation*/
+    pthread_mutex_lock(&my_obj->msg_lock);
+    if(mm_camera_socket_bundle_sendmsg(my_obj->ds_fd, msg, buf_size, sendfds, numfds) > 0) {
+        /* wait for event that mapping/unmapping is done */
+        mm_camera_util_wait_for_event(my_obj, CAM_EVENT_TYPE_MAP_UNMAP_DONE, &status);
+        if (MSM_CAMERA_STATUS_SUCCESS == status) {
+            rc = 0;
+        }
+    }
+    pthread_mutex_unlock(&my_obj->msg_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_sendmsg
+ *
+ * DESCRIPTION: utility function to send msg via domain socket
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @msg          : message to be sent
+ *   @buf_size     : size of the message to be sent
+ *   @sendfd       : >0 if any file descriptor need to be passed across process
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_util_sendmsg(mm_camera_obj_t *my_obj,
+                               void *msg,
+                               size_t buf_size,
+                               int sendfd)
+{
+    int32_t rc = -1;
+    uint32_t status;
+
+    /* need to lock msg_lock, since sendmsg until reposonse back is deemed as one operation*/
+    pthread_mutex_lock(&my_obj->msg_lock);
+    if(mm_camera_socket_sendmsg(my_obj->ds_fd, msg, buf_size, sendfd) > 0) {
+        /* wait for event that mapping/unmapping is done */
+        mm_camera_util_wait_for_event(my_obj, CAM_EVENT_TYPE_MAP_UNMAP_DONE, &status);
+        if (MSM_CAMERA_STATUS_SUCCESS == status) {
+            rc = 0;
+        }
+    }
+    pthread_mutex_unlock(&my_obj->msg_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTIOa   : mm_camera_map_buf
+ *
+ * DESCRIPTION: mapping camera buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @buf_type     : type of buffer to be mapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_CAPABILITY
+ *                   CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+ *   @fd           : file descriptor of the buffer
+ *   @size         : size of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_map_buf(mm_camera_obj_t *my_obj,
+        uint8_t buf_type, int fd, size_t size, void *buffer)
+{
+    int32_t rc = 0;
+
+    cam_sock_packet_t packet;
+    memset(&packet, 0, sizeof(cam_sock_packet_t));
+    packet.msg_type = CAM_MAPPING_TYPE_FD_MAPPING;
+    packet.payload.buf_map.type = buf_type;
+    packet.payload.buf_map.fd = fd;
+    packet.payload.buf_map.size = size;
+    packet.payload.buf_map.buffer = buffer;
+#ifdef DAEMON_PRESENT
+    rc = mm_camera_util_sendmsg(my_obj,
+                                &packet,
+                                sizeof(cam_sock_packet_t),
+                                fd);
+#else
+    cam_shim_packet_t *shim_cmd;
+    shim_cmd = mm_camera_create_shim_cmd_packet(CAM_SHIM_REG_BUF,
+            my_obj->sessionid, &packet);
+    rc = mm_camera_module_send_cmd(shim_cmd);
+    mm_camera_destroy_shim_cmd_packet(shim_cmd);
+#endif
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_map_bufs
+ *
+ * DESCRIPTION: mapping camera buffers via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @buf_map_list : list of buffers to be mapped
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_map_bufs(mm_camera_obj_t *my_obj,
+                           const cam_buf_map_type_list* buf_map_list)
+{
+    int32_t rc = 0;
+    cam_sock_packet_t packet;
+    memset(&packet, 0, sizeof(cam_sock_packet_t));
+    packet.msg_type = CAM_MAPPING_TYPE_FD_BUNDLED_MAPPING;
+
+    memcpy(&packet.payload.buf_map_list, buf_map_list,
+           sizeof(packet.payload.buf_map_list));
+
+    int sendfds[CAM_MAX_NUM_BUFS_PER_STREAM];
+    uint32_t numbufs = packet.payload.buf_map_list.length;
+    uint32_t i;
+    for (i = 0; i < numbufs; i++) {
+        sendfds[i] = packet.payload.buf_map_list.buf_maps[i].fd;
+        packet.payload.buf_map_list.buf_maps[i].buffer =
+                buf_map_list->buf_maps[i].buffer;
+    }
+    for (i = numbufs; i < CAM_MAX_NUM_BUFS_PER_STREAM; i++) {
+        packet.payload.buf_map_list.buf_maps[i].fd = -1;
+        sendfds[i] = -1;
+    }
+
+#ifdef DAEMON_PRESENT
+    rc = mm_camera_util_bundled_sendmsg(my_obj,
+            &packet, sizeof(cam_sock_packet_t),
+            sendfds, numbufs);
+#else
+    cam_shim_packet_t *shim_cmd;
+    shim_cmd = mm_camera_create_shim_cmd_packet(CAM_SHIM_REG_BUF,
+            my_obj->sessionid, &packet);
+    rc = mm_camera_module_send_cmd(shim_cmd);
+    mm_camera_destroy_shim_cmd_packet(shim_cmd);
+#endif
+
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_unmap_buf
+ *
+ * DESCRIPTION: unmapping camera buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @buf_type     : type of buffer to be mapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_CAPABILITY
+ *                   CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_unmap_buf(mm_camera_obj_t *my_obj,
+                            uint8_t buf_type)
+{
+    int32_t rc = 0;
+    cam_sock_packet_t packet;
+    memset(&packet, 0, sizeof(cam_sock_packet_t));
+    packet.msg_type = CAM_MAPPING_TYPE_FD_UNMAPPING;
+    packet.payload.buf_unmap.type = buf_type;
+#ifdef DAEMON_PRESENT
+    rc = mm_camera_util_sendmsg(my_obj,
+                                &packet,
+                                sizeof(cam_sock_packet_t),
+                                -1);
+#else
+    cam_shim_packet_t *shim_cmd;
+    shim_cmd = mm_camera_create_shim_cmd_packet(CAM_SHIM_REG_BUF,
+            my_obj->sessionid, &packet);
+    rc = mm_camera_module_send_cmd(shim_cmd);
+    mm_camera_destroy_shim_cmd_packet(shim_cmd);
+#endif
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_s_ctrl
+ *
+ * DESCRIPTION: utility function to send v4l2 ioctl for s_ctrl
+ *
+ * PARAMETERS :
+ *   @my_obj     :Camera object
+ *   @stream_id :streamID
+ *   @fd      : file descritpor for sending ioctl
+ *   @id      : control id
+ *   @value   : value of the ioctl to be sent
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_util_s_ctrl(__unused mm_camera_obj_t *my_obj,
+        __unused int stream_id, int32_t fd,
+        uint32_t id, int32_t *value)
+{
+    int rc = 0;
+
+#ifdef DAEMON_PRESENT
+    struct v4l2_control control;
+    memset(&control, 0, sizeof(control));
+    control.id = id;
+    if (value != NULL) {
+        control.value = *value;
+    }
+    rc = ioctl(fd, VIDIOC_S_CTRL, &control);
+    LOGD("fd=%d, S_CTRL, id=0x%x, value = %p, rc = %d\n",
+          fd, id, value, rc);
+    if (rc < 0) {
+        LOGE("ioctl failed %d, errno %d", rc, errno);
+    } else if (value != NULL) {
+        *value = control.value;
+    }
+#else /* DAEMON_PRESENT */
+    cam_shim_packet_t *shim_cmd;
+    cam_shim_cmd_data shim_cmd_data;
+    (void)fd;
+    (void)value;
+    memset(&shim_cmd_data, 0, sizeof(shim_cmd_data));
+
+    shim_cmd_data.command = id;
+    shim_cmd_data.stream_id = stream_id;
+    shim_cmd_data.value = NULL;
+    shim_cmd = mm_camera_create_shim_cmd_packet(CAM_SHIM_SET_PARM,
+            my_obj->sessionid,&shim_cmd_data);
+    rc = mm_camera_module_send_cmd(shim_cmd);
+    mm_camera_destroy_shim_cmd_packet(shim_cmd);
+#endif /* DAEMON_PRESENT */
+    return (rc >= 0)? 0 : -1;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_g_ctrl
+ *
+ * DESCRIPTION: utility function to send v4l2 ioctl for g_ctrl
+ *
+ * PARAMETERS :
+ *   @my_obj     :Camera object
+ *   @stream_id :streamID
+ *   @fd      : file descritpor for sending ioctl
+ *   @id      : control id
+ *   @value   : value of the ioctl to be sent
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_util_g_ctrl(__unused mm_camera_obj_t *my_obj,
+        __unused int stream_id, int32_t fd, uint32_t id, int32_t *value)
+{
+    int rc = 0;
+    struct v4l2_control control;
+
+    memset(&control, 0, sizeof(control));
+    control.id = id;
+    if (value != NULL) {
+        control.value = *value;
+    }
+
+#ifdef DAEMON_PRESENT
+    rc = ioctl(fd, VIDIOC_G_CTRL, &control);
+    LOGD("fd=%d, G_CTRL, id=0x%x, rc = %d\n", fd, id, rc);
+    if (value != NULL) {
+        *value = control.value;
+    }
+#else /* DAEMON_PRESENT */
+    cam_shim_packet_t *shim_cmd;
+    cam_shim_cmd_data shim_cmd_data;
+    (void)fd;
+    memset(&shim_cmd_data, 0, sizeof(shim_cmd_data));
+
+    shim_cmd_data.command = id;
+    shim_cmd_data.stream_id = stream_id;
+    shim_cmd_data.value = value;
+    shim_cmd = mm_camera_create_shim_cmd_packet(CAM_SHIM_GET_PARM,
+            my_obj->sessionid, &shim_cmd_data);
+
+    rc = mm_camera_module_send_cmd(shim_cmd);
+    mm_camera_destroy_shim_cmd_packet(shim_cmd);
+#endif /* DAEMON_PRESENT */
+    return (rc >= 0)? 0 : -1;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_create_shim_cmd
+ *
+ * DESCRIPTION: Prepare comand packet to pass to back-end through shim layer
+ *
+ * PARAMETERS :
+ *   @type                : type of command
+ *   @sessionID        : camera sessionID
+  *  @data                : command data
+ *
+ * RETURN     : NULL in case of failures
+                      allocated pointer to shim packet
+ *==========================================================================*/
+cam_shim_packet_t *mm_camera_create_shim_cmd_packet(cam_shim_cmd_type type,
+        uint32_t sessionID, void *data)
+{
+    cam_shim_packet_t *shim_pack = NULL;
+    uint32_t i = 0;
+
+    shim_pack = (cam_shim_packet_t *)malloc(sizeof(cam_shim_packet_t));
+    if (shim_pack == NULL) {
+        LOGE("Cannot allocate a memory for shim packet");
+        return NULL;
+    }
+    memset(shim_pack, 0, sizeof(cam_shim_packet_t));
+    shim_pack->cmd_type = type;
+    shim_pack->session_id = sessionID;
+    switch (type) {
+        case CAM_SHIM_SET_PARM:
+        case CAM_SHIM_GET_PARM: {
+            cam_shim_cmd_data *cmd_data = (cam_shim_cmd_data *)data;
+            shim_pack->cmd_data = *cmd_data;
+            break;
+        }
+        case CAM_SHIM_REG_BUF: {
+            cam_reg_buf_t *cmd_data = (cam_reg_buf_t *)data;
+            shim_pack->reg_buf = *cmd_data;
+            break;
+        }
+        case CAM_SHIM_BUNDLE_CMD: {
+            cam_shim_stream_cmd_packet_t *cmd_data = (cam_shim_stream_cmd_packet_t *)data;
+            for (i = 0; i < cmd_data->stream_count; i++) {
+                shim_pack->bundle_cmd.stream_event[i] = cmd_data->stream_event[i];
+            }
+            shim_pack->bundle_cmd.stream_count = cmd_data->stream_count;
+            break;
+        }
+        default:
+            LOGW("No Data for this command");
+    }
+    return shim_pack;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_destroy_shim_cmd
+ *
+ * DESCRIPTION: destroy shim packet
+ *
+ * PARAMETERS :
+ *   @cmd                : ptr to shim packet
+
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_destroy_shim_cmd_packet(cam_shim_packet_t *cmd)
+{
+    int32_t rc = 0;
+    uint32_t i = 0, j = 0;
+
+    if (cmd == NULL) {
+        LOGW("Command is NULL");
+        return rc;
+    }
+
+    switch (cmd->cmd_type) {
+        case CAM_SHIM_SET_PARM:
+        case CAM_SHIM_GET_PARM:
+        case CAM_SHIM_REG_BUF:
+            break;
+        case CAM_SHIM_BUNDLE_CMD: {
+            cam_shim_stream_cmd_packet_t *cmd_data = (cam_shim_stream_cmd_packet_t *)cmd;
+            for (i = 0; i < cmd_data->stream_count; i++) {
+                cam_shim_cmd_packet_t *stream_evt = &cmd_data->stream_event[i];
+                for (j = 0; j < stream_evt->cmd_count; j++) {
+                    if (stream_evt->cmd != NULL) {
+                        if(stream_evt->cmd->cmd_type == CAM_SHIM_BUNDLE_CMD) {
+                            mm_camera_destroy_shim_cmd_packet(stream_evt->cmd);
+                        }
+                        free(stream_evt->cmd);
+                        stream_evt->cmd = NULL;
+                    }
+                }
+            }
+            break;
+        }
+        default:
+            LOGW("No Data for this command");
+    }
+    free(cmd);
+    cmd = NULL;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_channel_advanced_capture
+ *
+ * DESCRIPTION: sets the channel advanced capture
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+  *   @type : advanced capture type.
+ *   @start_flag  : flag to indicate start/stop
+  *   @in_value  : input configaration
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_channel_advanced_capture(mm_camera_obj_t *my_obj,
+            uint32_t ch_id, mm_camera_advanced_capture_t type,
+            uint32_t trigger, void *in_value)
+{
+    LOGD("E type = %d", type);
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+        switch (type) {
+            case MM_CAMERA_AF_BRACKETING:
+                rc = mm_channel_fsm_fn(ch_obj,
+                                       MM_CHANNEL_EVT_AF_BRACKETING,
+                                       (void *)&trigger,
+                                       NULL);
+                break;
+            case MM_CAMERA_AE_BRACKETING:
+                rc = mm_channel_fsm_fn(ch_obj,
+                                       MM_CHANNEL_EVT_AE_BRACKETING,
+                                       (void *)&trigger,
+                                       NULL);
+                break;
+            case MM_CAMERA_FLASH_BRACKETING:
+                rc = mm_channel_fsm_fn(ch_obj,
+                                       MM_CHANNEL_EVT_FLASH_BRACKETING,
+                                       (void *)&trigger,
+                                       NULL);
+                break;
+            case MM_CAMERA_ZOOM_1X:
+                rc = mm_channel_fsm_fn(ch_obj,
+                                       MM_CHANNEL_EVT_ZOOM_1X,
+                                       (void *)&trigger,
+                                       NULL);
+                break;
+            case MM_CAMERA_FRAME_CAPTURE:
+                rc = mm_channel_fsm_fn(ch_obj,
+                                       MM_CAMERA_EVT_CAPTURE_SETTING,
+                                       (void *)in_value,
+                                       NULL);
+                break;
+            default:
+                break;
+        }
+
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    LOGD("X");
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_get_session_id
+ *
+ * DESCRIPTION: get the session identity
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @sessionid: pointer to the output session id
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : if this call succeeds, we will get a valid session id
+ *==========================================================================*/
+int32_t mm_camera_get_session_id(mm_camera_obj_t *my_obj,
+        uint32_t* sessionid)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+    if(sessionid != NULL) {
+        struct v4l2_control control;
+        memset(&control, 0, sizeof(control));
+        control.id = MSM_CAMERA_PRIV_G_SESSION_ID;
+        control.value = value;
+
+        rc = ioctl(my_obj->ctrl_fd, VIDIOC_G_CTRL, &control);
+        value = control.value;
+        LOGD("fd=%d, get_session_id, id=0x%x, value = %d, rc = %d\n",
+                 my_obj->ctrl_fd, MSM_CAMERA_PRIV_G_SESSION_ID,
+                value, rc);
+        *sessionid = value;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_sync_related_sensors
+ *
+ * DESCRIPTION: send sync cmd
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @parms        : ptr to the related cam info to be sent to server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the sync struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of parameters to be set
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_camera_sync_related_sensors(mm_camera_obj_t *my_obj,
+        cam_sync_related_sensors_event_info_t* parms)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+    if (parms !=  NULL) {
+        rc = mm_camera_util_s_ctrl(my_obj, 0, my_obj->ctrl_fd,
+                CAM_PRIV_SYNC_RELATED_SENSORS, &value);
+    }
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_reg_stream_buf_cb
+ *
+ * DESCRIPTION: Register callback for stream buffer
+ *
+ * PARAMETERS :
+ *   @my_obj    : camera object
+ *   @ch_id     : channel handle
+ *   @stream_id : stream that will be linked
+ *   @buf_cb    : special callback needs to be registered for stream buffer
+ *   @cb_type   : Callback type SYNC/ASYNC
+ *   @userdata  : user data pointer
+ *
+ * RETURN    : int32_t type of status
+ *             0  -- success
+ *             1 --  failure
+ *==========================================================================*/
+int32_t mm_camera_reg_stream_buf_cb(mm_camera_obj_t *my_obj,
+        uint32_t ch_id, uint32_t stream_id, mm_camera_buf_notify_t stream_cb,
+        mm_camera_stream_cb_type cb_type, void *userdata)
+{
+    int rc = 0;
+    mm_stream_data_cb_t buf_cb;
+    mm_channel_t * ch_obj =
+            mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        memset(&buf_cb, 0, sizeof(mm_stream_data_cb_t));
+        buf_cb.cb = stream_cb;
+        buf_cb.cb_count = -1;
+        buf_cb.cb_type = cb_type;
+        buf_cb.user_data = userdata;
+
+        mm_evt_paylod_reg_stream_buf_cb payload;
+        memset(&payload, 0, sizeof(mm_evt_paylod_reg_stream_buf_cb));
+        payload.buf_cb = buf_cb;
+        payload.stream_id = stream_id;
+        mm_channel_fsm_fn(ch_obj,
+                MM_CHANNEL_EVT_REG_STREAM_BUF_CB,
+                (void*)&payload, NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+    return rc;
+}
+
+#ifdef QCAMERA_REDEFINE_LOG
+
+/*===========================================================================
+ * DESCRIPTION: mm camera debug interface
+ *
+ *==========================================================================*/
+pthread_mutex_t dbg_log_mutex;
+
+#undef LOG_TAG
+#define LOG_TAG "QCamera"
+#define CDBG_MAX_STR_LEN 1024
+#define CDBG_MAX_LINE_LENGTH 256
+
+/* current trace loggin permissions
+   * {NONE, ERR, WARN, HIGH, DEBUG, LOW, INFO} */
+int g_cam_log[CAM_LAST_MODULE][CAM_GLBL_DBG_INFO + 1] = {
+    {0, 1, 0, 0, 0, 0, 1}, /* CAM_NO_MODULE     */
+    {0, 1, 0, 0, 0, 0, 1}, /* CAM_HAL_MODULE    */
+    {0, 1, 0, 0, 0, 0, 1}, /* CAM_MCI_MODULE    */
+    {0, 1, 0, 0, 0, 0, 1}, /* CAM_JPEG_MODULE   */
+};
+
+/* string representation for logging level */
+static const char *cam_dbg_level_to_str[] = {
+     "",        /* CAM_GLBL_DBG_NONE  */
+     "<ERROR>", /* CAM_GLBL_DBG_ERR   */
+     "<WARN>", /* CAM_GLBL_DBG_WARN  */
+     "<HIGH>", /* CAM_GLBL_DBG_HIGH  */
+     "<DBG>", /* CAM_GLBL_DBG_DEBUG */
+     "<LOW>", /* CAM_GLBL_DBG_LOW   */
+     "<INFO>"  /* CAM_GLBL_DBG_INFO  */
+};
+
+/* current trace logging configuration */
+typedef struct {
+   cam_global_debug_level_t  level;
+   int                       initialized;
+   const char               *name;
+   const char               *prop;
+} module_debug_t;
+
+static module_debug_t cam_loginfo[(int)CAM_LAST_MODULE] = {
+  {CAM_GLBL_DBG_ERR, 1,
+      "",         "persist.camera.global.debug"     }, /* CAM_NO_MODULE     */
+  {CAM_GLBL_DBG_ERR, 1,
+      "<HAL>", "persist.camera.hal.debug"        }, /* CAM_HAL_MODULE    */
+  {CAM_GLBL_DBG_ERR, 1,
+      "<MCI>", "persist.camera.mci.debug"        }, /* CAM_MCI_MODULE    */
+  {CAM_GLBL_DBG_ERR, 1,
+      "<JPEG>", "persist.camera.mmstill.logs"     }, /* CAM_JPEG_MODULE   */
+};
+
+/** cam_get_dbg_level
+ *
+ *    @module: module name
+ *    @level:  module debug logging level
+ *
+ *  Maps debug log string to value.
+ *
+ *  Return: logging level
+ **/
+__unused
+static cam_global_debug_level_t cam_get_dbg_level(const char *module,
+  char *pValue) {
+
+  cam_global_debug_level_t rc = CAM_GLBL_DBG_NONE;
+
+  if (!strcmp(pValue, "none")) {
+    rc = CAM_GLBL_DBG_NONE;
+  } else if (!strcmp(pValue, "warn")) {
+    rc = CAM_GLBL_DBG_WARN;
+  } else if (!strcmp(pValue, "debug")) {
+    rc = CAM_GLBL_DBG_DEBUG;
+  } else if (!strcmp(pValue, "error")) {
+    rc = CAM_GLBL_DBG_ERR;
+  } else if (!strcmp(pValue, "low")) {
+    rc = CAM_GLBL_DBG_LOW;
+  } else if (!strcmp(pValue, "high")) {
+    rc = CAM_GLBL_DBG_HIGH;
+  } else if (!strcmp(pValue, "info")) {
+    rc = CAM_GLBL_DBG_INFO;
+  } else {
+    ALOGE("Invalid %s debug log level %s\n", module, pValue);
+  }
+
+  ALOGD("%s debug log level: %s\n", module, cam_dbg_level_to_str[rc]);
+
+  return rc;
+}
+
+/** cam_vsnprintf
+ *    @pdst:   destination buffer pointer
+ *    @size:   size of destination b uffer
+ *    @pfmt:   string format
+ *    @argptr: variabkle length argument list
+ *
+ *  Processes variable length argument list to a formatted string.
+ *
+ *  Return: n/a
+ **/
+static void cam_vsnprintf(char* pdst, unsigned int size,
+                          const char* pfmt, va_list argptr) {
+  int num_chars_written = 0;
+
+  pdst[0] = '\0';
+  num_chars_written = vsnprintf(pdst, size, pfmt, argptr);
+
+  if ((num_chars_written >= (int)size) && (size > 0)) {
+     /* Message length exceeds the buffer limit size */
+     num_chars_written = size - 1;
+     pdst[size - 1] = '\0';
+  }
+}
+
+/** mm_camera_debug_log
+ *    @module: origin or log message
+ *    @level:  logging level
+ *    @func:   caller function name
+ *    @line:   caller line number
+ *    @fmt:    log message formatting string
+ *    @...:    variable argument list
+ *
+ *  Generig logger method.
+ *
+ *  Return: N/A
+ **/
+void mm_camera_debug_log(const cam_modules_t module,
+                   const cam_global_debug_level_t level,
+                   const char *func, const int line, const char *fmt, ...) {
+  char    str_buffer[CDBG_MAX_STR_LEN];
+  va_list args;
+
+  va_start(args, fmt);
+  cam_vsnprintf(str_buffer, CDBG_MAX_STR_LEN, fmt, args);
+  va_end(args);
+
+  switch (level) {
+  case CAM_GLBL_DBG_WARN:
+    ALOGW("%s%s %s: %d: %s", cam_loginfo[module].name,
+      cam_dbg_level_to_str[level], func, line, str_buffer);
+    break;
+  case CAM_GLBL_DBG_ERR:
+    ALOGE("%s%s %s: %d: %s", cam_loginfo[module].name,
+      cam_dbg_level_to_str[level], func, line, str_buffer);
+    break;
+  case CAM_GLBL_DBG_INFO:
+    ALOGI("%s%s %s: %d: %s", cam_loginfo[module].name,
+      cam_dbg_level_to_str[level], func, line, str_buffer);
+    break;
+  case CAM_GLBL_DBG_HIGH:
+  case CAM_GLBL_DBG_DEBUG:
+  case CAM_GLBL_DBG_LOW:
+  default:
+    ALOGD("%s%s %s: %d: %s", cam_loginfo[module].name,
+      cam_dbg_level_to_str[level], func, line, str_buffer);
+  }
+}
+
+ /** mm_camera_set_dbg_log_properties
+ *
+ *  Set global and module log level properties.
+ *
+ *  Return: N/A
+ **/
+void mm_camera_set_dbg_log_properties(void) {
+  int          i;
+  unsigned int j;
+  static int   boot_init = 1;
+  char         property_value[PROPERTY_VALUE_MAX] = {0};
+  char         default_value[PROPERTY_VALUE_MAX]  = {0};
+
+  if (boot_init) {
+      boot_init = 0;
+      pthread_mutex_init(&dbg_log_mutex, 0);
+  }
+
+  /* set global and individual module logging levels */
+  pthread_mutex_lock(&dbg_log_mutex);
+  for (i = CAM_NO_MODULE; i < CAM_LAST_MODULE; i++) {
+    cam_global_debug_level_t log_level;
+    snprintf(default_value, PROPERTY_VALUE_MAX, "%d", (int)cam_loginfo[i].level);
+    property_get(cam_loginfo[i].prop, property_value, default_value);
+    log_level = (cam_global_debug_level_t)atoi(property_value);
+
+    /* fix KW warnings */
+    if (log_level > CAM_GLBL_DBG_INFO) {
+       log_level = CAM_GLBL_DBG_INFO;
+    }
+
+    cam_loginfo[i].level = log_level;
+
+    /* The logging macros will produce a log message when logging level for
+     * a module is less or equal to the level specified in the property for
+     * the module, or less or equal the level specified by the global logging
+     * property. Currently we don't allow INFO logging to be turned off */
+    for (j = CAM_GLBL_DBG_ERR; j <= CAM_GLBL_DBG_LOW; j++) {
+      g_cam_log[i][j] = (cam_loginfo[CAM_NO_MODULE].level != CAM_GLBL_DBG_NONE)     &&
+                        (cam_loginfo[i].level             != CAM_GLBL_DBG_NONE)     &&
+                        ((j                                <= cam_loginfo[i].level) ||
+                         (j                                <= cam_loginfo[CAM_NO_MODULE].level));
+    }
+  }
+  pthread_mutex_unlock(&dbg_log_mutex);
+}
+
+#endif
diff --git a/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c b/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
new file mode 100644
index 0000000..e75d962
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
@@ -0,0 +1,3638 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <pthread.h>
+#include <fcntl.h>
+
+// Camera dependencies
+#include "cam_semaphore.h"
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+extern mm_camera_obj_t* mm_camera_util_get_camera_by_handler(uint32_t cam_handler);
+extern mm_channel_t * mm_camera_util_get_channel_by_handler(mm_camera_obj_t * cam_obj,
+                                                            uint32_t handler);
+/* Static frame sync info used between different camera channels*/
+static mm_channel_frame_sync_info_t fs = { .num_cam =0, .pos = 0};
+/* Frame sync info access lock */
+static pthread_mutex_t fs_lock = PTHREAD_MUTEX_INITIALIZER;
+
+/* internal function declare goes here */
+int32_t mm_channel_qbuf(mm_channel_t *my_obj,
+                        mm_camera_buf_def_t *buf);
+int32_t mm_channel_init(mm_channel_t *my_obj,
+                        mm_camera_channel_attr_t *attr,
+                        mm_camera_buf_notify_t channel_cb,
+                        void *userdata);
+void mm_channel_release(mm_channel_t *my_obj);
+uint32_t mm_channel_add_stream(mm_channel_t *my_obj);
+int32_t mm_channel_del_stream(mm_channel_t *my_obj,
+                                   uint32_t stream_id);
+uint32_t mm_channel_link_stream(mm_channel_t *my_obj,
+        mm_camera_stream_link_t *stream_link);
+int32_t mm_channel_config_stream(mm_channel_t *my_obj,
+                                 uint32_t stream_id,
+                                 mm_camera_stream_config_t *config);
+int32_t mm_channel_get_bundle_info(mm_channel_t *my_obj,
+                                   cam_bundle_config_t *bundle_info);
+int32_t mm_channel_start(mm_channel_t *my_obj);
+int32_t mm_channel_stop(mm_channel_t *my_obj);
+int32_t mm_channel_request_super_buf(mm_channel_t *my_obj,
+        mm_camera_req_buf_t *buf);
+int32_t mm_channel_cancel_super_buf_request(mm_channel_t *my_obj);
+int32_t mm_channel_flush_super_buf_queue(mm_channel_t *my_obj,
+                                         uint32_t frame_idx,
+                                         cam_stream_type_t stream_type);
+int32_t mm_channel_config_notify_mode(mm_channel_t *my_obj,
+                                      mm_camera_super_buf_notify_mode_t notify_mode);
+int32_t mm_channel_start_zsl_snapshot(mm_channel_t *my_obj);
+int32_t mm_channel_stop_zsl_snapshot(mm_channel_t *my_obj);
+int32_t mm_channel_superbuf_flush(mm_channel_t* my_obj,
+        mm_channel_queue_t * queue, cam_stream_type_t cam_type);
+int32_t mm_channel_set_stream_parm(mm_channel_t *my_obj,
+                                   mm_evt_paylod_set_get_stream_parms_t *payload);
+int32_t mm_channel_get_queued_buf_count(mm_channel_t *my_obj,
+        uint32_t stream_id);
+
+int32_t mm_channel_get_stream_parm(mm_channel_t *my_obj,
+                                   mm_evt_paylod_set_get_stream_parms_t *payload);
+int32_t mm_channel_do_stream_action(mm_channel_t *my_obj,
+                                    mm_evt_paylod_do_stream_action_t *payload);
+int32_t mm_channel_map_stream_buf(mm_channel_t *my_obj,
+                                  cam_buf_map_type *payload);
+int32_t mm_channel_map_stream_bufs(mm_channel_t *my_obj,
+                                   cam_buf_map_type_list *payload);
+int32_t mm_channel_unmap_stream_buf(mm_channel_t *my_obj,
+                                    cam_buf_unmap_type *payload);
+
+/* state machine function declare */
+int32_t mm_channel_fsm_fn_notused(mm_channel_t *my_obj,
+                          mm_channel_evt_type_t evt,
+                          void * in_val,
+                          void * out_val);
+int32_t mm_channel_fsm_fn_stopped(mm_channel_t *my_obj,
+                          mm_channel_evt_type_t evt,
+                          void * in_val,
+                          void * out_val);
+int32_t mm_channel_fsm_fn_active(mm_channel_t *my_obj,
+                          mm_channel_evt_type_t evt,
+                          void * in_val,
+                          void * out_val);
+int32_t mm_channel_fsm_fn_paused(mm_channel_t *my_obj,
+                          mm_channel_evt_type_t evt,
+                          void * in_val,
+                          void * out_val);
+
+/* channel super queue functions */
+int32_t mm_channel_superbuf_queue_init(mm_channel_queue_t * queue);
+int32_t mm_channel_superbuf_queue_deinit(mm_channel_queue_t * queue);
+int32_t mm_channel_superbuf_comp_and_enqueue(mm_channel_t *ch_obj,
+                                             mm_channel_queue_t * queue,
+                                             mm_camera_buf_info_t *buf);
+mm_channel_queue_node_t* mm_channel_superbuf_dequeue(
+        mm_channel_queue_t * queue, mm_channel_t *ch_obj);
+int32_t mm_channel_superbuf_bufdone_overflow(mm_channel_t *my_obj,
+                                             mm_channel_queue_t *queue);
+int32_t mm_channel_superbuf_skip(mm_channel_t *my_obj,
+                                 mm_channel_queue_t *queue);
+
+static int32_t mm_channel_proc_general_cmd(mm_channel_t *my_obj,
+                                           mm_camera_generic_cmd_t *p_gen_cmd);
+int32_t mm_channel_superbuf_flush_matched(mm_channel_t* my_obj,
+                                          mm_channel_queue_t * queue);
+
+/* Start of Frame Sync util methods */
+void mm_frame_sync_reset();
+int32_t mm_frame_sync_register_channel(mm_channel_t *ch_obj);
+int32_t mm_frame_sync_unregister_channel(mm_channel_t *ch_obj);
+int32_t mm_frame_sync_add(uint32_t frame_id, mm_channel_t *ch_obj);
+int32_t mm_frame_sync_remove(uint32_t frame_id);
+uint32_t mm_frame_sync_find_matched(uint8_t oldest);
+int8_t mm_frame_sync_find_frame_index(uint32_t frame_id);
+void mm_frame_sync_lock_queues();
+void mm_frame_sync_unlock_queues();
+void mm_channel_node_qbuf(mm_channel_t *ch_obj, mm_channel_queue_node_t *node);
+/* End of Frame Sync Util methods */
+void mm_channel_send_super_buf(mm_channel_node_info_t *info);
+mm_channel_queue_node_t* mm_channel_superbuf_dequeue_frame_internal(
+        mm_channel_queue_t * queue, uint32_t frame_idx);
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_util_get_stream_by_handler
+ *
+ * DESCRIPTION: utility function to get a stream object from its handle
+ *
+ * PARAMETERS :
+ *   @cam_obj: ptr to a channel object
+ *   @handler: stream handle
+ *
+ * RETURN     : ptr to a stream object.
+ *              NULL if failed.
+ *==========================================================================*/
+mm_stream_t * mm_channel_util_get_stream_by_handler(
+                                    mm_channel_t * ch_obj,
+                                    uint32_t handler)
+{
+    int i;
+    mm_stream_t *s_obj = NULL;
+    for(i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+        if ((MM_STREAM_STATE_NOTUSED != ch_obj->streams[i].state) &&
+            (handler == ch_obj->streams[i].my_hdl)) {
+            s_obj = &ch_obj->streams[i];
+            break;
+        }
+    }
+    return s_obj;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_dispatch_super_buf
+ *
+ * DESCRIPTION: dispatch super buffer of bundle to registered user
+ *
+ * PARAMETERS :
+ *   @cmd_cb  : ptr storing matched super buf information
+ *   @userdata: user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_channel_dispatch_super_buf(mm_camera_cmdcb_t *cmd_cb,
+                                          void* user_data)
+{
+    mm_channel_t * my_obj = (mm_channel_t *)user_data;
+
+    if (NULL == my_obj) {
+        return;
+    }
+
+    if (MM_CAMERA_CMD_TYPE_SUPER_BUF_DATA_CB != cmd_cb->cmd_type) {
+        LOGE("Wrong cmd_type (%d) for super buf dataCB",
+                    cmd_cb->cmd_type);
+        return;
+    }
+
+    if (my_obj->bundle.super_buf_notify_cb) {
+        my_obj->bundle.super_buf_notify_cb(&cmd_cb->u.superbuf, my_obj->bundle.user_data);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_process_stream_buf
+ *
+ * DESCRIPTION: handle incoming buffer from stream in a bundle. In this function,
+ *              matching logic will be performed on incoming stream frames.
+ *              Will depends on the bundle attribute, either storing matched frames
+ *              in the superbuf queue, or sending matched superbuf frames to upper
+ *              layer through registered callback.
+ *
+ * PARAMETERS :
+ *   @cmd_cb  : ptr storing matched super buf information
+ *   @userdata: user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_channel_process_stream_buf(mm_camera_cmdcb_t * cmd_cb,
+                                          void *user_data)
+{
+    mm_camera_super_buf_notify_mode_t notify_mode;
+    mm_channel_queue_node_t *node = NULL;
+    mm_channel_t *ch_obj = (mm_channel_t *)user_data;
+    uint32_t i = 0;
+    /* Set expected frame id to a future frame idx, large enough to wait
+    * for good_frame_idx_range, and small enough to still capture an image */
+    uint8_t needStartZSL = FALSE;
+
+    if (NULL == ch_obj) {
+        return;
+    }
+    if (MM_CAMERA_CMD_TYPE_DATA_CB  == cmd_cb->cmd_type) {
+        /* comp_and_enqueue */
+        mm_channel_superbuf_comp_and_enqueue(
+                        ch_obj,
+                        &ch_obj->bundle.superbuf_queue,
+                        &cmd_cb->u.buf);
+    } else if (MM_CAMERA_CMD_TYPE_REQ_DATA_CB  == cmd_cb->cmd_type) {
+        /* skip frames if needed */
+        ch_obj->pending_cnt = cmd_cb->u.req_buf.num_buf_requested;
+        ch_obj->pending_retro_cnt = cmd_cb->u.req_buf.num_retro_buf_requested;
+        ch_obj->req_type = cmd_cb->u.req_buf.type;
+        ch_obj->bWaitForPrepSnapshotDone = 0;
+
+        LOGH("pending cnt (%d), retro count (%d)"
+                "req_type (%d) is_primary (%d)",
+                 ch_obj->pending_cnt, ch_obj->pending_retro_cnt,
+                ch_obj->req_type, cmd_cb->u.req_buf.primary_only);
+        if (!ch_obj->pending_cnt || (ch_obj->pending_retro_cnt > ch_obj->pending_cnt)) {
+          ch_obj->pending_retro_cnt = ch_obj->pending_cnt;
+        }
+        if (ch_obj->pending_retro_cnt > 0) {
+          LOGL("Resetting need Led Flash!!!");
+          ch_obj->needLEDFlash = 0;
+        }
+        ch_obj->stopZslSnapshot = 0;
+        ch_obj->unLockAEC = 0;
+
+        mm_channel_superbuf_skip(ch_obj, &ch_obj->bundle.superbuf_queue);
+
+    } else if (MM_CAMERA_CMD_TYPE_START_ZSL == cmd_cb->cmd_type) {
+            ch_obj->manualZSLSnapshot = TRUE;
+            mm_camera_start_zsl_snapshot(ch_obj->cam_obj);
+    } else if (MM_CAMERA_CMD_TYPE_STOP_ZSL == cmd_cb->cmd_type) {
+            ch_obj->manualZSLSnapshot = FALSE;
+            mm_camera_stop_zsl_snapshot(ch_obj->cam_obj);
+    } else if (MM_CAMERA_CMD_TYPE_CONFIG_NOTIFY == cmd_cb->cmd_type) {
+           ch_obj->bundle.superbuf_queue.attr.notify_mode = cmd_cb->u.notify_mode;
+    } else if (MM_CAMERA_CMD_TYPE_FLUSH_QUEUE  == cmd_cb->cmd_type) {
+        ch_obj->bundle.superbuf_queue.expected_frame_id = cmd_cb->u.flush_cmd.frame_idx;
+        mm_channel_superbuf_flush(ch_obj,
+                &ch_obj->bundle.superbuf_queue, cmd_cb->u.flush_cmd.stream_type);
+        cam_sem_post(&(ch_obj->cmd_thread.sync_sem));
+        return;
+    } else if (MM_CAMERA_CMD_TYPE_GENERAL == cmd_cb->cmd_type) {
+        LOGH("MM_CAMERA_CMD_TYPE_GENERAL");
+        switch (cmd_cb->u.gen_cmd.type) {
+            case MM_CAMERA_GENERIC_CMD_TYPE_AE_BRACKETING:
+            case MM_CAMERA_GENERIC_CMD_TYPE_AF_BRACKETING: {
+                uint32_t start = cmd_cb->u.gen_cmd.payload[0];
+                LOGI("MM_CAMERA_GENERIC_CMDTYPE_AF_BRACKETING %u",
+                      start);
+                mm_channel_superbuf_flush(ch_obj,
+                        &ch_obj->bundle.superbuf_queue, CAM_STREAM_TYPE_DEFAULT);
+
+                if (start) {
+                    LOGH("need AE bracketing, start zsl snapshot");
+                    ch_obj->bracketingState = MM_CHANNEL_BRACKETING_STATE_WAIT_GOOD_FRAME_IDX;
+                } else {
+                    ch_obj->bracketingState = MM_CHANNEL_BRACKETING_STATE_OFF;
+                }
+            }
+                break;
+            case MM_CAMERA_GENERIC_CMD_TYPE_FLASH_BRACKETING: {
+                uint32_t start = cmd_cb->u.gen_cmd.payload[0];
+                LOGI("MM_CAMERA_GENERIC_CMDTYPE_FLASH_BRACKETING %u",
+                      start);
+                mm_channel_superbuf_flush(ch_obj,
+                        &ch_obj->bundle.superbuf_queue, CAM_STREAM_TYPE_DEFAULT);
+
+                if (start) {
+                    LOGH("need flash bracketing");
+                    ch_obj->isFlashBracketingEnabled = TRUE;
+                } else {
+                    ch_obj->isFlashBracketingEnabled = FALSE;
+                }
+            }
+                break;
+            case MM_CAMERA_GENERIC_CMD_TYPE_ZOOM_1X: {
+                uint32_t start = cmd_cb->u.gen_cmd.payload[0];
+                LOGI("MM_CAMERA_GENERIC_CMD_TYPE_ZOOM_1X %u",
+                      start);
+                mm_channel_superbuf_flush(ch_obj,
+                        &ch_obj->bundle.superbuf_queue, CAM_STREAM_TYPE_DEFAULT);
+
+                if (start) {
+                    LOGH("need zoom 1x frame");
+                    ch_obj->isZoom1xFrameRequested = TRUE;
+                } else {
+                    ch_obj->isZoom1xFrameRequested = FALSE;
+                }
+            }
+                break;
+            case MM_CAMERA_GENERIC_CMD_TYPE_CAPTURE_SETTING: {
+                uint32_t start = cmd_cb->u.gen_cmd.payload[0];
+                LOGI("MM_CAMERA_GENERIC_CMD_TYPE_CAPTURE_SETTING %u num_batch = %d",
+                      start, cmd_cb->u.gen_cmd.frame_config.num_batch);
+
+                if (start) {
+                    memset(&ch_obj->frameConfig, 0, sizeof(cam_capture_frame_config_t));
+                    for (i = 0; i < cmd_cb->u.gen_cmd.frame_config.num_batch; i++) {
+                        if (cmd_cb->u.gen_cmd.frame_config.configs[i].type
+                                != CAM_CAPTURE_RESET) {
+                            ch_obj->frameConfig.configs[
+                                    ch_obj->frameConfig.num_batch] =
+                                    cmd_cb->u.gen_cmd.frame_config.configs[i];
+                            ch_obj->frameConfig.num_batch++;
+                            LOGH("capture setting frame = %d type = %d",
+                                    i,ch_obj->frameConfig.configs[
+                                    ch_obj->frameConfig.num_batch].type);
+                        }
+                    }
+                    LOGD("Capture setting Batch Count %d",
+                              ch_obj->frameConfig.num_batch);
+                    ch_obj->isConfigCapture = TRUE;
+                } else {
+                    ch_obj->isConfigCapture = FALSE;
+                    memset(&ch_obj->frameConfig, 0, sizeof(cam_capture_frame_config_t));
+                }
+                ch_obj->cur_capture_idx = 0;
+                memset(ch_obj->capture_frame_id, 0, sizeof(uint8_t) * MAX_CAPTURE_BATCH_NUM);
+                break;
+            }
+            default:
+                LOGE("Error: Invalid command");
+                break;
+        }
+    }
+    notify_mode = ch_obj->bundle.superbuf_queue.attr.notify_mode;
+
+    /*Handle use case which does not need start ZSL even in unified case*/
+    if ((ch_obj->pending_cnt > 0)
+            && (ch_obj->isConfigCapture)
+            && (ch_obj->manualZSLSnapshot == FALSE)
+            && (ch_obj->startZSlSnapshotCalled == FALSE)) {
+        needStartZSL = TRUE;
+        for (i = ch_obj->cur_capture_idx;
+                i < ch_obj->frameConfig.num_batch;
+                i++) {
+            cam_capture_type type = ch_obj->frameConfig.configs[i].type;
+            if (((type == CAM_CAPTURE_FLASH) && (!ch_obj->needLEDFlash))
+                    || ((type == CAM_CAPTURE_LOW_LIGHT) && (!ch_obj->needLowLightZSL))) {
+                /*For flash and low light capture, start ZSL is triggered only if needed*/
+                needStartZSL = FALSE;
+                break;
+            }
+        }
+    }
+
+    if ((ch_obj->isConfigCapture)
+            && (needStartZSL)) {
+        for (i = ch_obj->cur_capture_idx;
+                i < ch_obj->frameConfig.num_batch;
+                i++) {
+            ch_obj->capture_frame_id[i] =
+                    ch_obj->bundle.superbuf_queue.expected_frame_id
+                    + MM_CAMERA_MAX_FUTURE_FRAME_WAIT;
+        }
+
+        /* Need to Flush the queue and trigger frame config */
+        mm_channel_superbuf_flush(ch_obj,
+                &ch_obj->bundle.superbuf_queue, CAM_STREAM_TYPE_DEFAULT);
+        LOGI("TRIGGER Start ZSL");
+        mm_camera_start_zsl_snapshot(ch_obj->cam_obj);
+        ch_obj->startZSlSnapshotCalled = TRUE;
+        ch_obj->burstSnapNum = ch_obj->pending_cnt;
+        ch_obj->bWaitForPrepSnapshotDone = 0;
+    } else if ((ch_obj->pending_cnt > 0)
+        && ((ch_obj->needLEDFlash == TRUE) ||
+        (MM_CHANNEL_BRACKETING_STATE_OFF != ch_obj->bracketingState))
+        && (ch_obj->manualZSLSnapshot == FALSE)
+        && ch_obj->startZSlSnapshotCalled == FALSE) {
+
+        LOGI("TRIGGER Start ZSL for Flash");
+        mm_camera_start_zsl_snapshot(ch_obj->cam_obj);
+        ch_obj->startZSlSnapshotCalled = TRUE;
+        ch_obj->burstSnapNum = ch_obj->pending_cnt;
+        ch_obj->bWaitForPrepSnapshotDone = 0;
+    } else if (((ch_obj->pending_cnt == 0) || (ch_obj->stopZslSnapshot == 1))
+            && (ch_obj->manualZSLSnapshot == FALSE)
+            && (ch_obj->startZSlSnapshotCalled == TRUE)) {
+        LOGI("TRIGGER Stop ZSL for cancel picture");
+        mm_camera_stop_zsl_snapshot(ch_obj->cam_obj);
+        // Unlock AEC
+        ch_obj->startZSlSnapshotCalled = FALSE;
+        ch_obj->needLEDFlash = FALSE;
+        ch_obj->burstSnapNum = 0;
+        ch_obj->stopZslSnapshot = 0;
+        ch_obj->bWaitForPrepSnapshotDone = 0;
+        ch_obj->unLockAEC = 1;
+        ch_obj->bracketingState = MM_CHANNEL_BRACKETING_STATE_OFF;
+        ch_obj->isConfigCapture = FALSE;
+    }
+    /* bufdone for overflowed bufs */
+    mm_channel_superbuf_bufdone_overflow(ch_obj, &ch_obj->bundle.superbuf_queue);
+
+    LOGD("Super Buffer received, pending_cnt=%d queue cnt = %d expected = %d",
+            ch_obj->pending_cnt, ch_obj->bundle.superbuf_queue.match_cnt,
+            ch_obj->bundle.superbuf_queue.expected_frame_id);
+
+    /* dispatch frame if pending_cnt>0 or is in continuous streaming mode */
+    while (((ch_obj->pending_cnt > 0) ||
+             (MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS == notify_mode)) &&
+             (!ch_obj->bWaitForPrepSnapshotDone)) {
+
+        /* dequeue */
+        mm_channel_node_info_t info;
+        memset(&info, 0x0, sizeof(info));
+
+        if (ch_obj->req_type == MM_CAMERA_REQ_FRAME_SYNC_BUF) {
+            // Lock the Queues
+            mm_frame_sync_lock_queues();
+            uint32_t match_frame = mm_frame_sync_find_matched(FALSE);
+            if (match_frame) {
+                uint8_t j = 0;
+                for (j = 0; j < MAX_NUM_CAMERA_PER_BUNDLE; j++) {
+                    if (fs.ch_obj[j]) {
+                        mm_channel_queue_t *ch_queue =
+                                &fs.ch_obj[j]->bundle.superbuf_queue;
+                        if (ch_queue == NULL) {
+                            LOGW("Channel queue is NULL");
+                            break;
+                        }
+                        node = mm_channel_superbuf_dequeue_frame_internal(
+                                ch_queue, match_frame);
+                        if (node != NULL) {
+                            info.ch_obj[info.num_nodes] = fs.ch_obj[j];
+                            info.node[info.num_nodes] = node;
+                            info.num_nodes++;
+                            LOGH("Added ch(%p) to node ,num nodes %d",
+                                     fs.ch_obj[j], info.num_nodes);
+                        }
+                    }
+                }
+                mm_frame_sync_remove(match_frame);
+                LOGI("match frame %d", match_frame);
+                if (info.num_nodes != fs.num_cam) {
+                    LOGI("num node %d != num cam (%d) Debug this",
+                             info.num_nodes, fs.num_cam);
+                    uint8_t j = 0;
+                    // free super buffers from various nodes
+                    for (j = 0; j < info.num_nodes; j++) {
+                        if (info.node[j]) {
+                            mm_channel_node_qbuf(info.ch_obj[j], info.node[j]);
+                            free(info.node[j]);
+                        }
+                    }
+                    // we should not use it as matched dual camera frames
+                    info.num_nodes = 0;
+                }
+            }
+            mm_frame_sync_unlock_queues();
+        } else {
+           node = mm_channel_superbuf_dequeue(&ch_obj->bundle.superbuf_queue, ch_obj);
+           if (node != NULL) {
+               if (ch_obj->isConfigCapture &&
+                       ((node->frame_idx <
+                        ch_obj->capture_frame_id[ch_obj->cur_capture_idx]))) {
+                   uint8_t i;
+                   LOGD("Not expected super buffer. frameID = %d expected = %d",
+                           node->frame_idx, ch_obj->capture_frame_id[ch_obj->cur_capture_idx]);
+                   for (i = 0; i < node->num_of_bufs; i++) {
+                       mm_channel_qbuf(ch_obj, node->super_buf[i].buf);
+                   }
+                   free(node);
+               } else {
+                   info.num_nodes = 1;
+                   info.ch_obj[0] = ch_obj;
+                   info.node[0] = node;
+               }
+            }
+        }
+        if (info.num_nodes > 0) {
+            /* decrease pending_cnt */
+            if (MM_CAMERA_SUPER_BUF_NOTIFY_BURST == notify_mode) {
+                ch_obj->pending_cnt--;
+                if (ch_obj->pending_retro_cnt > 0) {
+                  if (ch_obj->pending_retro_cnt == 1) {
+                    ch_obj->bWaitForPrepSnapshotDone = 1;
+                  }
+                  ch_obj->pending_retro_cnt--;
+                }
+
+                if (((ch_obj->pending_cnt == 0) ||
+                      (ch_obj->stopZslSnapshot == 1)) &&
+                      (ch_obj->manualZSLSnapshot == FALSE) &&
+                       ch_obj->startZSlSnapshotCalled == TRUE) {
+                    LOGI("TRIGGER Stop ZSL. All frame received");
+                    mm_camera_stop_zsl_snapshot(ch_obj->cam_obj);
+                    ch_obj->startZSlSnapshotCalled = FALSE;
+                    ch_obj->burstSnapNum = 0;
+                    ch_obj->stopZslSnapshot = 0;
+                    ch_obj->unLockAEC = 1;
+                    ch_obj->needLEDFlash = FALSE;
+                    ch_obj->bracketingState = MM_CHANNEL_BRACKETING_STATE_OFF;
+                    ch_obj->isConfigCapture = FALSE;
+                }
+
+                if (ch_obj->isConfigCapture) {
+                    if (ch_obj->frameConfig.configs[ch_obj->cur_capture_idx].num_frames != 0) {
+                        ch_obj->frameConfig.configs[ch_obj->cur_capture_idx].num_frames--;
+                    } else {
+                        LOGW("Invalid frame config batch index %d max batch = %d",
+                                ch_obj->cur_capture_idx, ch_obj->frameConfig.num_batch);
+                    }
+
+                    if (ch_obj->frameConfig.configs[ch_obj->cur_capture_idx].num_frames == 0) {
+                        //Received all frames for current batch
+                        ch_obj->cur_capture_idx++;
+                        ch_obj->bundle.superbuf_queue.expected_frame_id =
+                                ch_obj->capture_frame_id[ch_obj->cur_capture_idx];
+                        ch_obj->bundle.superbuf_queue.good_frame_id =
+                                ch_obj->capture_frame_id[ch_obj->cur_capture_idx];
+                    } else {
+                        LOGH("Need %d frames more for batch %d",
+                                ch_obj->frameConfig.configs[ch_obj->cur_capture_idx].num_frames,
+                                ch_obj->cur_capture_idx);
+                    }
+                }
+            }
+            /* dispatch superbuf */
+            mm_channel_send_super_buf(&info);
+        } else {
+            /* no superbuf avail, break the loop */
+            break;
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_send_super_buf
+ *
+ * DESCRIPTION: Send super buffers to HAL
+ *
+ * PARAMETERS :
+ *   @info  : Info of super buffers to be sent in callback
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void mm_channel_send_super_buf(mm_channel_node_info_t *info)
+{
+    if (!info || !info->num_nodes){
+        LOGE("X Error!! Info invalid");
+        return;
+    }
+    mm_channel_queue_node_t *node = NULL;
+
+    LOGH("num nodes %d to send", info->num_nodes);
+    uint32_t idx = 0;
+    mm_channel_t *ch_obj = NULL;
+    for (idx = 0; idx < info->num_nodes; idx++) {
+        node = info->node[idx];
+        ch_obj = info->ch_obj[idx];
+        if ((ch_obj) && (NULL != ch_obj->bundle.super_buf_notify_cb) && node) {
+            mm_camera_cmdcb_t* cb_node = NULL;
+            LOGD("Send superbuf to HAL, pending_cnt=%d",
+                     ch_obj->pending_cnt);
+            /* send cam_sem_post to wake up cb thread to dispatch super buffer */
+            cb_node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+            if (NULL != cb_node) {
+                memset(cb_node, 0, sizeof(mm_camera_cmdcb_t));
+                cb_node->cmd_type = MM_CAMERA_CMD_TYPE_SUPER_BUF_DATA_CB;
+                cb_node->u.superbuf.num_bufs = node->num_of_bufs;
+                uint8_t i = 0;
+                for (i = 0; i < node->num_of_bufs; i++) {
+                    cb_node->u.superbuf.bufs[i] = node->super_buf[i].buf;
+                }
+                cb_node->u.superbuf.camera_handle = ch_obj->cam_obj->my_hdl;
+                cb_node->u.superbuf.ch_id = ch_obj->my_hdl;
+                cb_node->u.superbuf.bReadyForPrepareSnapshot =
+                        ch_obj->bWaitForPrepSnapshotDone;
+                if (ch_obj->unLockAEC == 1) {
+                    cb_node->u.superbuf.bUnlockAEC = 1;
+                    LOGH("Unlocking AEC");
+                    ch_obj->unLockAEC = 0;
+                }
+                /* enqueue to cb thread */
+                cam_queue_enq(&(ch_obj->cb_thread.cmd_queue), cb_node);
+                /* wake up cb thread */
+                cam_sem_post(&(ch_obj->cb_thread.cmd_sem));
+                LOGH("Sent super buf for node[%d] ", idx);
+
+            } else {
+                LOGE("No memory for mm_camera_node_t");
+                /* buf done with the unused super buf */
+                uint8_t i = 0;
+                for (i = 0; i < node->num_of_bufs; i++) {
+                    mm_channel_qbuf(ch_obj, node->super_buf[i].buf);
+                }
+            }
+            free(node);
+        } else if ((ch_obj != NULL) && (node != NULL)) {
+            /* buf done with the unused super buf */
+            uint8_t i;
+            for (i = 0; i < node->num_of_bufs; i++) {
+                mm_channel_qbuf(ch_obj, node->super_buf[i].buf);
+            }
+            free(node);
+        } else {
+            LOGE("node is NULL, debug this");
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_reg_stream_buf_cb
+ *
+ * DESCRIPTION: Register callback for stream buffer
+ *
+ * PARAMETERS :
+ *   @my_obj     : Channel object
+ *   @stream_id  : stream that will be linked
+ *   @buf_cb     : special callback needs to be registered for stream buffer
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 --  failure
+ *==========================================================================*/
+int32_t mm_channel_reg_stream_buf_cb (mm_channel_t* my_obj,
+        uint32_t stream_id, mm_stream_data_cb_t buf_cb)
+{
+    int32_t rc = -1;
+    mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+            stream_id);
+
+    if (NULL != s_obj) {
+        if (s_obj->ch_obj != my_obj) {
+            /* No op. on linked streams */
+            return 0;
+        }
+        rc = mm_stream_reg_buf_cb(s_obj, buf_cb);
+    }
+
+    return rc;
+
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_fsm_fn
+ *
+ * DESCRIPTION: channel finite state machine entry function. Depends on channel
+ *              state, incoming event will be handled differently.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a channel object
+ *   @evt      : channel event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn(mm_channel_t *my_obj,
+                          mm_channel_evt_type_t evt,
+                          void * in_val,
+                          void * out_val)
+{
+    int32_t rc = -1;
+
+    LOGD("E state = %d", my_obj->state);
+    switch (my_obj->state) {
+    case MM_CHANNEL_STATE_NOTUSED:
+        rc = mm_channel_fsm_fn_notused(my_obj, evt, in_val, out_val);
+        break;
+    case MM_CHANNEL_STATE_STOPPED:
+        rc = mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val);
+        break;
+    case MM_CHANNEL_STATE_ACTIVE:
+        rc = mm_channel_fsm_fn_active(my_obj, evt, in_val, out_val);
+        break;
+    case MM_CHANNEL_STATE_PAUSED:
+        rc = mm_channel_fsm_fn_paused(my_obj, evt, in_val, out_val);
+        break;
+    default:
+        LOGD("Not a valid state (%d)", my_obj->state);
+        break;
+    }
+
+    /* unlock ch_lock */
+    pthread_mutex_unlock(&my_obj->ch_lock);
+    LOGD("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_fsm_fn_notused
+ *
+ * DESCRIPTION: channel finite state machine function to handle event
+ *              in NOT_USED state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a channel object
+ *   @evt      : channel event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn_notused(mm_channel_t *my_obj,
+                                  mm_channel_evt_type_t evt,
+                                  void * in_val,
+                                  void * out_val)
+{
+    int32_t rc = -1;
+
+    switch (evt) {
+    default:
+        LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+                    my_obj->state, evt, in_val, out_val);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_fsm_fn_stopped
+ *
+ * DESCRIPTION: channel finite state machine function to handle event
+ *              in STOPPED state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a channel object
+ *   @evt      : channel event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn_stopped(mm_channel_t *my_obj,
+                                  mm_channel_evt_type_t evt,
+                                  void * in_val,
+                                  void * out_val)
+{
+    int32_t rc = 0;
+    LOGD("E evt = %d", evt);
+    switch (evt) {
+    case MM_CHANNEL_EVT_ADD_STREAM:
+        {
+            uint32_t s_hdl = 0;
+            s_hdl = mm_channel_add_stream(my_obj);
+            *((uint32_t*)out_val) = s_hdl;
+            rc = 0;
+        }
+        break;
+    case MM_CHANNEL_EVT_LINK_STREAM:
+        {
+            mm_camera_stream_link_t *stream_link = NULL;
+            uint32_t s_hdl = 0;
+            stream_link = (mm_camera_stream_link_t *) in_val;
+            s_hdl = mm_channel_link_stream(my_obj, stream_link);
+            *((uint32_t*)out_val) = s_hdl;
+            rc = 0;
+        }
+        break;
+    case MM_CHANNEL_EVT_DEL_STREAM:
+        {
+            uint32_t s_id = *((uint32_t *)in_val);
+            rc = mm_channel_del_stream(my_obj, s_id);
+        }
+        break;
+    case MM_CHANNEL_EVT_START:
+        {
+            rc = mm_channel_start(my_obj);
+            /* first stream started in stopped state
+             * move to active state */
+            if (0 == rc) {
+                my_obj->state = MM_CHANNEL_STATE_ACTIVE;
+            }
+        }
+        break;
+    case MM_CHANNEL_EVT_CONFIG_STREAM:
+        {
+            mm_evt_paylod_config_stream_t *payload =
+                (mm_evt_paylod_config_stream_t *)in_val;
+            rc = mm_channel_config_stream(my_obj,
+                                          payload->stream_id,
+                                          payload->config);
+        }
+        break;
+    case MM_CHANNEL_EVT_GET_BUNDLE_INFO:
+        {
+            cam_bundle_config_t *payload =
+                (cam_bundle_config_t *)in_val;
+            rc = mm_channel_get_bundle_info(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_DELETE:
+        {
+            mm_channel_release(my_obj);
+            rc = 0;
+        }
+        break;
+    case MM_CHANNEL_EVT_SET_STREAM_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_channel_set_stream_parm(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_GET_STREAM_QUEUED_BUF_COUNT:
+        {
+            uint32_t stream_id = *((uint32_t *)in_val);
+            rc = mm_channel_get_queued_buf_count(my_obj, stream_id);
+        }
+        break;
+    case MM_CHANNEL_EVT_GET_STREAM_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_channel_get_stream_parm(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_DO_STREAM_ACTION:
+        {
+            mm_evt_paylod_do_stream_action_t *payload =
+                (mm_evt_paylod_do_stream_action_t *)in_val;
+            rc = mm_channel_do_stream_action(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_MAP_STREAM_BUF:
+        {
+            cam_buf_map_type *payload =
+                (cam_buf_map_type *)in_val;
+            rc = mm_channel_map_stream_buf(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_MAP_STREAM_BUFS:
+        {
+            cam_buf_map_type_list *payload =
+                (cam_buf_map_type_list *)in_val;
+            rc = mm_channel_map_stream_bufs(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_UNMAP_STREAM_BUF:
+        {
+            cam_buf_unmap_type *payload =
+                (cam_buf_unmap_type *)in_val;
+            rc = mm_channel_unmap_stream_buf(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_REG_STREAM_BUF_CB:
+        {
+            mm_evt_paylod_reg_stream_buf_cb *payload =
+                (mm_evt_paylod_reg_stream_buf_cb *)in_val;
+            rc = mm_channel_reg_stream_buf_cb (my_obj,
+                    payload->stream_id, payload->buf_cb);
+        }
+        break;
+    default:
+        LOGE("invalid state (%d) for evt (%d)",
+                    my_obj->state, evt);
+        break;
+    }
+    LOGD("E rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_fsm_fn_active
+ *
+ * DESCRIPTION: channel finite state machine function to handle event
+ *              in ACTIVE state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a channel object
+ *   @evt      : channel event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn_active(mm_channel_t *my_obj,
+                          mm_channel_evt_type_t evt,
+                          void * in_val,
+                          void * out_val)
+{
+    int32_t rc = 0;
+
+    LOGD("E evt = %d", evt);
+    switch (evt) {
+    case MM_CHANNEL_EVT_STOP:
+        {
+            rc = mm_channel_stop(my_obj);
+            my_obj->state = MM_CHANNEL_STATE_STOPPED;
+        }
+        break;
+    case MM_CHANNEL_EVT_REQUEST_SUPER_BUF:
+        {
+            mm_camera_req_buf_t *payload =
+                    (mm_camera_req_buf_t *)in_val;
+            rc = mm_channel_request_super_buf(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_CANCEL_REQUEST_SUPER_BUF:
+        {
+            rc = mm_channel_cancel_super_buf_request(my_obj);
+        }
+        break;
+    case MM_CHANNEL_EVT_FLUSH_SUPER_BUF_QUEUE:
+        {
+            uint32_t frame_idx = *((uint32_t *)in_val);
+            rc = mm_channel_flush_super_buf_queue(my_obj, frame_idx, CAM_STREAM_TYPE_DEFAULT);
+        }
+        break;
+    case MM_CHANNEL_EVT_START_ZSL_SNAPSHOT:
+        {
+            rc = mm_channel_start_zsl_snapshot(my_obj);
+        }
+        break;
+    case MM_CHANNEL_EVT_STOP_ZSL_SNAPSHOT:
+        {
+            rc = mm_channel_stop_zsl_snapshot(my_obj);
+        }
+        break;
+    case MM_CHANNEL_EVT_CONFIG_NOTIFY_MODE:
+        {
+            mm_camera_super_buf_notify_mode_t notify_mode =
+                *((mm_camera_super_buf_notify_mode_t *)in_val);
+            rc = mm_channel_config_notify_mode(my_obj, notify_mode);
+        }
+        break;
+    case MM_CHANNEL_EVT_SET_STREAM_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_channel_set_stream_parm(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_GET_STREAM_QUEUED_BUF_COUNT:
+        {
+            uint32_t stream_id = *((uint32_t *)in_val);
+            rc = mm_channel_get_queued_buf_count(my_obj, stream_id);
+        }
+        break;
+    case MM_CHANNEL_EVT_GET_STREAM_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_channel_get_stream_parm(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_DO_STREAM_ACTION:
+        {
+            mm_evt_paylod_do_stream_action_t *payload =
+                (mm_evt_paylod_do_stream_action_t *)in_val;
+            rc = mm_channel_do_stream_action(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_MAP_STREAM_BUF:
+        {
+            cam_buf_map_type *payload =
+                (cam_buf_map_type *)in_val;
+            if (payload != NULL) {
+                uint8_t type = payload->type;
+                if ((type == CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF) ||
+                        (type == CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF)) {
+                    rc = mm_channel_map_stream_buf(my_obj, payload);
+                }
+            } else {
+                LOGE("cannot map regualr stream buf in active state");
+            }
+        }
+        break;
+    case MM_CHANNEL_EVT_MAP_STREAM_BUFS:
+        {
+            cam_buf_map_type_list *payload =
+                (cam_buf_map_type_list *)in_val;
+            if ((payload != NULL) && (payload->length > 0)) {
+                uint8_t type = payload->buf_maps[0].type;
+                if ((type == CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF) ||
+                        (type == CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF)) {
+                    rc = mm_channel_map_stream_bufs(my_obj, payload);
+                }
+            } else {
+                LOGE("cannot map regualr stream buf in active state");
+            }
+        }
+        break;
+    case MM_CHANNEL_EVT_UNMAP_STREAM_BUF:
+        {
+            cam_buf_unmap_type *payload =
+                (cam_buf_unmap_type *)in_val;
+            if (payload != NULL) {
+                uint8_t type = payload->type;
+                if ((type == CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF) ||
+                        (type == CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF)) {
+                    rc = mm_channel_unmap_stream_buf(my_obj, payload);
+                }
+            } else {
+                LOGE("cannot unmap regualr stream buf in active state");
+            }
+        }
+        break;
+    case MM_CHANNEL_EVT_AF_BRACKETING:
+        {
+            LOGH("MM_CHANNEL_EVT_AF_BRACKETING");
+            uint32_t start_flag = *((uint32_t *)in_val);
+            mm_camera_generic_cmd_t gen_cmd;
+            gen_cmd.type = MM_CAMERA_GENERIC_CMD_TYPE_AF_BRACKETING;
+            gen_cmd.payload[0] = start_flag;
+            rc = mm_channel_proc_general_cmd(my_obj, &gen_cmd);
+        }
+        break;
+    case MM_CHANNEL_EVT_AE_BRACKETING:
+        {
+            LOGH("MM_CHANNEL_EVT_AE_BRACKETING");
+            uint32_t start_flag = *((uint32_t *)in_val);
+            mm_camera_generic_cmd_t gen_cmd;
+            gen_cmd.type = MM_CAMERA_GENERIC_CMD_TYPE_AE_BRACKETING;
+            gen_cmd.payload[0] = start_flag;
+            rc = mm_channel_proc_general_cmd(my_obj, &gen_cmd);
+        }
+        break;
+    case MM_CHANNEL_EVT_FLASH_BRACKETING:
+        {
+            LOGH("MM_CHANNEL_EVT_FLASH_BRACKETING");
+            uint32_t start_flag = *((uint32_t *)in_val);
+            mm_camera_generic_cmd_t gen_cmd;
+            gen_cmd.type = MM_CAMERA_GENERIC_CMD_TYPE_FLASH_BRACKETING;
+            gen_cmd.payload[0] = start_flag;
+            rc = mm_channel_proc_general_cmd(my_obj, &gen_cmd);
+        }
+        break;
+    case MM_CHANNEL_EVT_ZOOM_1X:
+        {
+            LOGH("MM_CHANNEL_EVT_ZOOM_1X");
+            uint32_t start_flag = *((uint32_t *)in_val);
+            mm_camera_generic_cmd_t gen_cmd;
+            gen_cmd.type = MM_CAMERA_GENERIC_CMD_TYPE_ZOOM_1X;
+            gen_cmd.payload[0] = start_flag;
+            rc = mm_channel_proc_general_cmd(my_obj, &gen_cmd);
+        }
+        break;
+    case MM_CAMERA_EVT_CAPTURE_SETTING:
+        {
+            mm_camera_generic_cmd_t gen_cmd;
+            cam_capture_frame_config_t *input;
+            gen_cmd.type = MM_CAMERA_GENERIC_CMD_TYPE_CAPTURE_SETTING;
+            LOGH("MM_CAMERA_EVT_CAPTURE_SETTING");
+            if (in_val == NULL) {
+                gen_cmd.payload[0] = 0;
+                memset(&gen_cmd.frame_config, 0, sizeof(cam_capture_frame_config_t));
+            } else {
+                gen_cmd.payload[0] = 1;
+                input = (cam_capture_frame_config_t *)in_val;
+                gen_cmd.frame_config = *input;
+            }
+            rc = mm_channel_proc_general_cmd(my_obj, &gen_cmd);
+        }
+        break;
+    case MM_CHANNEL_EVT_REG_STREAM_BUF_CB:
+        {
+            mm_evt_paylod_reg_stream_buf_cb *payload =
+                (mm_evt_paylod_reg_stream_buf_cb *)in_val;
+            rc = mm_channel_reg_stream_buf_cb (my_obj,
+                    payload->stream_id, payload->buf_cb);
+        }
+        break;
+     default:
+        LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+                    my_obj->state, evt, in_val, out_val);
+        break;
+    }
+    LOGD("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_fsm_fn_paused
+ *
+ * DESCRIPTION: channel finite state machine function to handle event
+ *              in PAUSED state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a channel object
+ *   @evt      : channel event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn_paused(mm_channel_t *my_obj,
+                          mm_channel_evt_type_t evt,
+                          void * in_val,
+                          void * out_val)
+{
+    int32_t rc = 0;
+
+    /* currently we are not supporting pause/resume channel */
+    LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+                my_obj->state, evt, in_val, out_val);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_init
+ *
+ * DESCRIPTION: initialize a channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object be to initialized
+ *   @attr         : bundle attribute of the channel if needed
+ *   @channel_cb   : callback function for bundle data notify
+ *   @userdata     : user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : if no bundle data notify is needed, meaning each stream in the
+ *              channel will have its own stream data notify callback, then
+ *              attr, channel_cb, and userdata can be NULL. In this case,
+ *              no matching logic will be performed in channel for the bundling.
+ *==========================================================================*/
+int32_t mm_channel_init(mm_channel_t *my_obj,
+                        mm_camera_channel_attr_t *attr,
+                        mm_camera_buf_notify_t channel_cb,
+                        void *userdata)
+{
+    int32_t rc = 0;
+
+    my_obj->bundle.super_buf_notify_cb = channel_cb;
+    my_obj->bundle.user_data = userdata;
+    if (NULL != attr) {
+        my_obj->bundle.superbuf_queue.attr = *attr;
+    }
+
+    LOGD("Launch data poll thread in channel open");
+    snprintf(my_obj->poll_thread[0].threadName, THREAD_NAME_SIZE, "CAM_dataPoll");
+    mm_camera_poll_thread_launch(&my_obj->poll_thread[0],
+                                 MM_CAMERA_POLL_TYPE_DATA);
+
+    /* change state to stopped state */
+    my_obj->state = MM_CHANNEL_STATE_STOPPED;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_release
+ *
+ * DESCRIPTION: release a channel resource. Channel state will move to UNUSED
+ *              state after this call.
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void mm_channel_release(mm_channel_t *my_obj)
+{
+    /* stop data poll thread */
+    mm_camera_poll_thread_release(&my_obj->poll_thread[0]);
+
+    /* memset bundle info */
+    memset(&my_obj->bundle, 0, sizeof(mm_channel_bundle_t));
+
+    /* change state to notused state */
+    my_obj->state = MM_CHANNEL_STATE_NOTUSED;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_link_stream
+ *
+ * DESCRIPTION: link a stream from external channel into this channel
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *   @stream_link  : channel and stream to be linked
+ *
+ * RETURN     : uint32_t type of stream handle
+ *              0  -- invalid stream handle, meaning the op failed
+ *              >0 -- successfully added a stream with a valid handle
+ *==========================================================================*/
+uint32_t mm_channel_link_stream(mm_channel_t *my_obj,
+        mm_camera_stream_link_t *stream_link)
+{
+    uint8_t idx = 0;
+    uint32_t s_hdl = 0;
+    mm_stream_t *stream_obj = NULL;
+    mm_stream_t *stream = NULL;
+
+    if (NULL == stream_link) {
+        LOGE("Invalid stream link");
+        return 0;
+    }
+
+    stream = mm_channel_util_get_stream_by_handler(stream_link->ch,
+            stream_link->stream_id);
+    if (NULL == stream) {
+        return 0;
+    }
+
+    /* check available stream */
+    for (idx = 0; idx < MAX_STREAM_NUM_IN_BUNDLE; idx++) {
+        if (MM_STREAM_STATE_NOTUSED == my_obj->streams[idx].state) {
+            stream_obj = &my_obj->streams[idx];
+            break;
+        }
+    }
+    if (NULL == stream_obj) {
+        LOGE("streams reach max, no more stream allowed to add");
+        return s_hdl;
+    }
+
+    /* initialize stream object */
+    *stream_obj = *stream;
+    stream_obj->linked_stream = stream;
+    s_hdl = stream->my_hdl;
+
+    LOGD("stream handle = %d", s_hdl);
+    return s_hdl;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_add_stream
+ *
+ * DESCRIPTION: add a stream into the channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *
+ * RETURN     : uint32_t type of stream handle
+ *              0  -- invalid stream handle, meaning the op failed
+ *              >0 -- successfully added a stream with a valid handle
+ *==========================================================================*/
+uint32_t mm_channel_add_stream(mm_channel_t *my_obj)
+{
+    int32_t rc = 0;
+    uint8_t idx = 0;
+    uint32_t s_hdl = 0;
+    mm_stream_t *stream_obj = NULL;
+
+    LOGD("E");
+    /* check available stream */
+    for (idx = 0; idx < MAX_STREAM_NUM_IN_BUNDLE; idx++) {
+        if (MM_STREAM_STATE_NOTUSED == my_obj->streams[idx].state) {
+            stream_obj = &my_obj->streams[idx];
+            break;
+        }
+    }
+    if (NULL == stream_obj) {
+        LOGE("streams reach max, no more stream allowed to add");
+        return s_hdl;
+    }
+
+    /* initialize stream object */
+    memset(stream_obj, 0, sizeof(mm_stream_t));
+    stream_obj->fd = -1;
+    stream_obj->my_hdl = mm_camera_util_generate_handler(idx);
+    stream_obj->ch_obj = my_obj;
+    pthread_mutex_init(&stream_obj->buf_lock, NULL);
+    pthread_mutex_init(&stream_obj->cb_lock, NULL);
+    pthread_mutex_init(&stream_obj->cmd_lock, NULL);
+    pthread_cond_init(&stream_obj->buf_cond, NULL);
+    memset(stream_obj->buf_status, 0,
+            sizeof(stream_obj->buf_status));
+    stream_obj->state = MM_STREAM_STATE_INITED;
+
+    /* acquire stream */
+    rc = mm_stream_fsm_fn(stream_obj, MM_STREAM_EVT_ACQUIRE, NULL, NULL);
+    if (0 == rc) {
+        s_hdl = stream_obj->my_hdl;
+    } else {
+        /* error during acquire, de-init */
+        pthread_cond_destroy(&stream_obj->buf_cond);
+        pthread_mutex_destroy(&stream_obj->buf_lock);
+        pthread_mutex_destroy(&stream_obj->cb_lock);
+        pthread_mutex_destroy(&stream_obj->cmd_lock);
+        memset(stream_obj, 0, sizeof(mm_stream_t));
+    }
+    LOGD("stream handle = %d", s_hdl);
+    return s_hdl;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_del_stream
+ *
+ * DESCRIPTION: delete a stream from the channel bu its handle
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @stream_id    : stream handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : assume steam is stooped before it can be deleted
+ *==========================================================================*/
+int32_t mm_channel_del_stream(mm_channel_t *my_obj,
+                              uint32_t stream_id)
+{
+    int rc = -1;
+    mm_stream_t * stream_obj = NULL;
+    stream_obj = mm_channel_util_get_stream_by_handler(my_obj, stream_id);
+
+    if (NULL == stream_obj) {
+        LOGE("Invalid Stream Object for stream_id = %d", stream_id);
+        return rc;
+    }
+
+    if (stream_obj->ch_obj != my_obj) {
+        /* Only unlink stream */
+        pthread_mutex_lock(&stream_obj->linked_stream->buf_lock);
+        stream_obj->linked_stream->is_linked = 0;
+        stream_obj->linked_stream->linked_obj = NULL;
+        pthread_mutex_unlock(&stream_obj->linked_stream->buf_lock);
+        memset(stream_obj, 0, sizeof(mm_stream_t));
+
+        return 0;
+    }
+
+    rc = mm_stream_fsm_fn(stream_obj,
+                          MM_STREAM_EVT_RELEASE,
+                          NULL,
+                          NULL);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_config_stream
+ *
+ * DESCRIPTION: configure a stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @stream_id    : stream handle
+ *   @config       : stream configuration
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_config_stream(mm_channel_t *my_obj,
+                                   uint32_t stream_id,
+                                   mm_camera_stream_config_t *config)
+{
+    int rc = -1;
+    mm_stream_t * stream_obj = NULL;
+    LOGD("E stream ID = %d", stream_id);
+    stream_obj = mm_channel_util_get_stream_by_handler(my_obj, stream_id);
+
+    if (NULL == stream_obj) {
+        LOGE("Invalid Stream Object for stream_id = %d", stream_id);
+        return rc;
+    }
+
+    if (stream_obj->ch_obj != my_obj) {
+        /* No op. on linked streams */
+        return 0;
+    }
+
+    /* set stream fmt */
+    rc = mm_stream_fsm_fn(stream_obj,
+                          MM_STREAM_EVT_SET_FMT,
+                          (void *)config,
+                          NULL);
+    LOGD("X rc = %d",rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_get_bundle_info
+ *
+ * DESCRIPTION: query bundle info of the channel, which should include all
+ *              streams within this channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @bundle_info  : bundle info to be filled in
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_get_bundle_info(mm_channel_t *my_obj,
+                                   cam_bundle_config_t *bundle_info)
+{
+    int i;
+    mm_stream_t *s_obj = NULL;
+    cam_stream_type_t stream_type = CAM_STREAM_TYPE_DEFAULT;
+    int32_t rc = 0;
+
+    memset(bundle_info, 0, sizeof(cam_bundle_config_t));
+    bundle_info->bundle_id = my_obj->my_hdl;
+    bundle_info->num_of_streams = 0;
+    for (i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+        if (my_obj->streams[i].my_hdl > 0) {
+            s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+                                                          my_obj->streams[i].my_hdl);
+            if (NULL != s_obj) {
+                stream_type = s_obj->stream_info->stream_type;
+                if ((CAM_STREAM_TYPE_METADATA != stream_type) &&
+                        (s_obj->ch_obj == my_obj)) {
+                    bundle_info->stream_ids[bundle_info->num_of_streams++] =
+                                                        s_obj->server_stream_id;
+                }
+            } else {
+                LOGE("cannot find stream obj (%d) by handler (%d)",
+                            i, my_obj->streams[i].my_hdl);
+                rc = -1;
+                break;
+            }
+        }
+    }
+    if (rc != 0) {
+        /* error, reset to 0 */
+        memset(bundle_info, 0, sizeof(cam_bundle_config_t));
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_start
+ *
+ * DESCRIPTION: start a channel, which will start all streams in the channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_start(mm_channel_t *my_obj)
+{
+    int32_t rc = 0;
+    int i = 0, j = 0;
+    mm_stream_t *s_objs[MAX_STREAM_NUM_IN_BUNDLE] = {NULL};
+    uint8_t num_streams_to_start = 0;
+    uint8_t num_streams_in_bundle_queue = 0;
+    mm_stream_t *s_obj = NULL;
+    int meta_stream_idx = 0;
+    cam_stream_type_t stream_type = CAM_STREAM_TYPE_DEFAULT;
+
+    for (i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+        if (my_obj->streams[i].my_hdl > 0) {
+            s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+                                                          my_obj->streams[i].my_hdl);
+            if (NULL != s_obj) {
+                stream_type = s_obj->stream_info->stream_type;
+                /* remember meta data stream index */
+                if ((stream_type == CAM_STREAM_TYPE_METADATA) &&
+                        (s_obj->ch_obj == my_obj)) {
+                    meta_stream_idx = num_streams_to_start;
+                }
+                s_objs[num_streams_to_start++] = s_obj;
+
+                if (!s_obj->stream_info->noFrameExpected) {
+                    num_streams_in_bundle_queue++;
+                }
+            }
+        }
+    }
+
+    if (meta_stream_idx > 0 ) {
+        /* always start meta data stream first, so switch the stream object with the first one */
+        s_obj = s_objs[0];
+        s_objs[0] = s_objs[meta_stream_idx];
+        s_objs[meta_stream_idx] = s_obj;
+    }
+
+    if (NULL != my_obj->bundle.super_buf_notify_cb) {
+        /* need to send up cb, therefore launch thread */
+        /* init superbuf queue */
+        mm_channel_superbuf_queue_init(&my_obj->bundle.superbuf_queue);
+        my_obj->bundle.superbuf_queue.num_streams = num_streams_in_bundle_queue;
+        my_obj->bundle.superbuf_queue.expected_frame_id =
+                my_obj->bundle.superbuf_queue.attr.user_expected_frame_id;
+        my_obj->bundle.superbuf_queue.expected_frame_id_without_led = 0;
+        my_obj->bundle.superbuf_queue.led_off_start_frame_id = 0;
+        my_obj->bundle.superbuf_queue.led_on_start_frame_id = 0;
+        my_obj->bundle.superbuf_queue.led_on_num_frames = 0;
+        my_obj->bundle.superbuf_queue.good_frame_id = 0;
+
+        for (i = 0; i < num_streams_to_start; i++) {
+            /* Only bundle streams that belong to the channel */
+            if(!(s_objs[i]->stream_info->noFrameExpected)) {
+                if (s_objs[i]->ch_obj == my_obj) {
+                    /* set bundled flag to streams */
+                    s_objs[i]->is_bundled = 1;
+                }
+                my_obj->bundle.superbuf_queue.bundled_streams[j++] = s_objs[i]->my_hdl;
+            }
+        }
+
+        /* launch cb thread for dispatching super buf through cb */
+        snprintf(my_obj->cb_thread.threadName, THREAD_NAME_SIZE, "CAM_SuperBuf");
+        mm_camera_cmd_thread_launch(&my_obj->cb_thread,
+                                    mm_channel_dispatch_super_buf,
+                                    (void*)my_obj);
+
+        /* launch cmd thread for super buf dataCB */
+        snprintf(my_obj->cmd_thread.threadName, THREAD_NAME_SIZE, "CAM_SuperBufCB");
+        mm_camera_cmd_thread_launch(&my_obj->cmd_thread,
+                                    mm_channel_process_stream_buf,
+                                    (void*)my_obj);
+
+        /* set flag to TRUE */
+        my_obj->bundle.is_active = TRUE;
+    }
+
+    /* link any streams first before starting the rest of the streams */
+    for (i = 0; i < num_streams_to_start; i++) {
+        if (s_objs[i]->ch_obj != my_obj) {
+            pthread_mutex_lock(&s_objs[i]->linked_stream->buf_lock);
+            s_objs[i]->linked_stream->linked_obj = my_obj;
+            s_objs[i]->linked_stream->is_linked = 1;
+            pthread_mutex_unlock(&s_objs[i]->linked_stream->buf_lock);
+            continue;
+        }
+    }
+
+    for (i = 0; i < num_streams_to_start; i++) {
+        if (s_objs[i]->ch_obj != my_obj) {
+            continue;
+        }
+        /* all streams within a channel should be started at the same time */
+        if (s_objs[i]->state == MM_STREAM_STATE_ACTIVE) {
+            LOGE("stream already started idx(%d)", i);
+            rc = -1;
+            break;
+        }
+
+        /* allocate buf */
+        rc = mm_stream_fsm_fn(s_objs[i],
+                              MM_STREAM_EVT_GET_BUF,
+                              NULL,
+                              NULL);
+        if (0 != rc) {
+            LOGE("get buf failed at idx(%d)", i);
+            break;
+        }
+
+        /* reg buf */
+        rc = mm_stream_fsm_fn(s_objs[i],
+                              MM_STREAM_EVT_REG_BUF,
+                              NULL,
+                              NULL);
+        if (0 != rc) {
+            LOGE("reg buf failed at idx(%d)", i);
+            break;
+        }
+
+        /* start stream */
+        rc = mm_stream_fsm_fn(s_objs[i],
+                              MM_STREAM_EVT_START,
+                              NULL,
+                              NULL);
+        if (0 != rc) {
+            LOGE("start stream failed at idx(%d)", i);
+            break;
+        }
+    }
+
+    /* error handling */
+    if (0 != rc) {
+        /* unlink the streams first */
+        for (j = 0; j < num_streams_to_start; j++) {
+            if (s_objs[j]->ch_obj != my_obj) {
+                pthread_mutex_lock(&s_objs[j]->linked_stream->buf_lock);
+                s_objs[j]->linked_stream->is_linked = 0;
+                s_objs[j]->linked_stream->linked_obj = NULL;
+                pthread_mutex_unlock(&s_objs[j]->linked_stream->buf_lock);
+
+                if (TRUE == my_obj->bundle.is_active) {
+                    mm_channel_flush_super_buf_queue(my_obj, 0,
+                            s_objs[i]->stream_info->stream_type);
+                }
+                memset(s_objs[j], 0, sizeof(mm_stream_t));
+                continue;
+            }
+        }
+
+        for (j = 0; j <= i; j++) {
+            if ((NULL == s_objs[j]) || (s_objs[j]->ch_obj != my_obj)) {
+                continue;
+            }
+            /* stop streams*/
+            mm_stream_fsm_fn(s_objs[j],
+                             MM_STREAM_EVT_STOP,
+                             NULL,
+                             NULL);
+
+            /* unreg buf */
+            mm_stream_fsm_fn(s_objs[j],
+                             MM_STREAM_EVT_UNREG_BUF,
+                             NULL,
+                             NULL);
+
+            /* put buf back */
+            mm_stream_fsm_fn(s_objs[j],
+                             MM_STREAM_EVT_PUT_BUF,
+                             NULL,
+                             NULL);
+        }
+
+        /* destroy super buf cmd thread */
+        if (TRUE == my_obj->bundle.is_active) {
+            /* first stop bundle thread */
+            mm_camera_cmd_thread_release(&my_obj->cmd_thread);
+            mm_camera_cmd_thread_release(&my_obj->cb_thread);
+
+            /* deinit superbuf queue */
+            mm_channel_superbuf_queue_deinit(&my_obj->bundle.superbuf_queue);
+
+            /* memset super buffer queue info */
+            my_obj->bundle.is_active = 0;
+            memset(&my_obj->bundle.superbuf_queue, 0, sizeof(mm_channel_queue_t));
+        }
+    }
+    my_obj->bWaitForPrepSnapshotDone = 0;
+    if (my_obj->bundle.superbuf_queue.attr.enable_frame_sync) {
+        LOGH("registering Channel obj %p", my_obj);
+        mm_frame_sync_register_channel(my_obj);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_stop
+ *
+ * DESCRIPTION: stop a channel, which will stop all streams in the channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_stop(mm_channel_t *my_obj)
+{
+    int32_t rc = 0;
+    int i;
+    mm_stream_t *s_objs[MAX_STREAM_NUM_IN_BUNDLE] = {NULL};
+    uint8_t num_streams_to_stop = 0;
+    mm_stream_t *s_obj = NULL;
+    int meta_stream_idx = 0;
+    cam_stream_type_t stream_type = CAM_STREAM_TYPE_DEFAULT;
+
+    if (my_obj->bundle.superbuf_queue.attr.enable_frame_sync) {
+        mm_frame_sync_unregister_channel(my_obj);
+    }
+
+    for (i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+        if (my_obj->streams[i].my_hdl > 0) {
+            s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+                                                          my_obj->streams[i].my_hdl);
+            if (NULL != s_obj) {
+                if (s_obj->ch_obj == my_obj) {
+                    stream_type = s_obj->stream_info->stream_type;
+                    /* remember meta data stream index */
+                    if (stream_type == CAM_STREAM_TYPE_METADATA) {
+                        meta_stream_idx = num_streams_to_stop;
+                    }
+                }
+                s_objs[num_streams_to_stop++] = s_obj;
+            }
+        }
+    }
+
+    if (meta_stream_idx < num_streams_to_stop - 1 ) {
+        /* always stop meta data stream last, so switch the stream object with the last one */
+        s_obj = s_objs[num_streams_to_stop - 1];
+        s_objs[num_streams_to_stop - 1] = s_objs[meta_stream_idx];
+        s_objs[meta_stream_idx] = s_obj;
+    }
+
+    for (i = 0; i < num_streams_to_stop; i++) {
+        /* stream that are linked to this channel should not be stopped */
+        if (s_objs[i]->ch_obj != my_obj) {
+            continue;
+        }
+
+        /* stream off */
+        mm_stream_fsm_fn(s_objs[i],
+                         MM_STREAM_EVT_STOP,
+                         NULL,
+                         NULL);
+
+        /* unreg buf at kernel */
+        mm_stream_fsm_fn(s_objs[i],
+                         MM_STREAM_EVT_UNREG_BUF,
+                         NULL,
+                         NULL);
+    }
+
+    for (i = 0; i < num_streams_to_stop; i++) {
+        if (s_objs[i]->ch_obj != my_obj) {
+            /* Only unlink stream */
+            pthread_mutex_lock(&s_objs[i]->linked_stream->buf_lock);
+            s_objs[i]->linked_stream->is_linked = 0;
+            s_objs[i]->linked_stream->linked_obj = NULL;
+            pthread_mutex_unlock(&s_objs[i]->linked_stream->buf_lock);
+        }
+    }
+
+    /* destroy super buf cmd thread */
+    if (TRUE == my_obj->bundle.is_active) {
+        mm_channel_flush_super_buf_queue(my_obj, 0, CAM_STREAM_TYPE_DEFAULT);
+        /* first stop bundle thread */
+        mm_camera_cmd_thread_release(&my_obj->cmd_thread);
+        mm_camera_cmd_thread_release(&my_obj->cb_thread);
+
+        /* deinit superbuf queue */
+        mm_channel_superbuf_queue_deinit(&my_obj->bundle.superbuf_queue);
+
+        /* reset few fields in the bundle info */
+        my_obj->bundle.is_active = 0;
+        my_obj->bundle.superbuf_queue.expected_frame_id = 0;
+        my_obj->bundle.superbuf_queue.good_frame_id = 0;
+        my_obj->bundle.superbuf_queue.match_cnt = 0;
+    }
+
+    /* since all streams are stopped, we are safe to
+     * release all buffers allocated in stream */
+    for (i = 0; i < num_streams_to_stop; i++) {
+        if (s_objs[i]->ch_obj != my_obj) {
+            continue;
+        }
+        /* put buf back */
+        mm_stream_fsm_fn(s_objs[i],
+                         MM_STREAM_EVT_PUT_BUF,
+                         NULL,
+                         NULL);
+    }
+
+    for (i = 0; i < num_streams_to_stop; i++) {
+        if (s_objs[i]->ch_obj != my_obj) {
+            memset(s_objs[i], 0, sizeof(mm_stream_t));
+        } else {
+            continue;
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_request_super_buf
+ *
+ * DESCRIPTION: for burst mode in bundle, reuqest certain amount of matched
+ *              frames from superbuf queue
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @num_buf_requested : number of matched frames needed
+ *   @num_retro_buf_requested : number of retro frames needed
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_request_super_buf(mm_channel_t *my_obj,
+        mm_camera_req_buf_t *buf)
+{
+    int32_t rc = 0;
+    mm_camera_cmdcb_t* node = NULL;
+
+    if(!buf) {
+        LOGE("Request info buf is NULL");
+        return -1;
+    }
+
+    /* set pending_cnt
+     * will trigger dispatching super frames if pending_cnt > 0 */
+    /* send cam_sem_post to wake up cmd thread to dispatch super buffer */
+    node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+    if (NULL != node) {
+        memset(node, 0, sizeof(mm_camera_cmdcb_t));
+        node->cmd_type = MM_CAMERA_CMD_TYPE_REQ_DATA_CB;
+        node->u.req_buf = *buf;
+
+        /* enqueue to cmd thread */
+        cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+        /* wake up cmd thread */
+        cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+    } else {
+        LOGE("No memory for mm_camera_node_t");
+        rc = -1;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_cancel_super_buf_request
+ *
+ * DESCRIPTION: for burst mode in bundle, cancel the reuqest for certain amount
+ *              of matched frames from superbuf queue
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_cancel_super_buf_request(mm_channel_t *my_obj)
+{
+    int32_t rc = 0;
+    /* reset pending_cnt */
+    mm_camera_req_buf_t buf;
+    memset(&buf, 0x0, sizeof(buf));
+    buf.type = MM_CAMERA_REQ_SUPER_BUF;
+    buf.num_buf_requested = 0;
+    rc = mm_channel_request_super_buf(my_obj, &buf);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_flush_super_buf_queue
+ *
+ * DESCRIPTION: flush superbuf queue
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *   @frame_idx : frame idx until which to flush all superbufs
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_flush_super_buf_queue(mm_channel_t *my_obj, uint32_t frame_idx,
+                                                     cam_stream_type_t stream_type)
+{
+    int32_t rc = 0;
+    mm_camera_cmdcb_t* node = NULL;
+
+    node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+    if (NULL != node) {
+        memset(node, 0, sizeof(mm_camera_cmdcb_t));
+        node->cmd_type = MM_CAMERA_CMD_TYPE_FLUSH_QUEUE;
+        node->u.flush_cmd.frame_idx = frame_idx;
+        node->u.flush_cmd.stream_type = stream_type;
+
+        /* enqueue to cmd thread */
+        cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+        /* wake up cmd thread */
+        cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+
+        /* wait for ack from cmd thread */
+        cam_sem_wait(&(my_obj->cmd_thread.sync_sem));
+    } else {
+        LOGE("No memory for mm_camera_node_t");
+        rc = -1;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_config_notify_mode
+ *
+ * DESCRIPTION: configure notification mode
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *   @notify_mode : notification mode
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_config_notify_mode(mm_channel_t *my_obj,
+                                      mm_camera_super_buf_notify_mode_t notify_mode)
+{
+    int32_t rc = 0;
+    mm_camera_cmdcb_t* node = NULL;
+
+    node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+    if (NULL != node) {
+        memset(node, 0, sizeof(mm_camera_cmdcb_t));
+        node->u.notify_mode = notify_mode;
+        node->cmd_type = MM_CAMERA_CMD_TYPE_CONFIG_NOTIFY;
+
+        /* enqueue to cmd thread */
+        cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+        /* wake up cmd thread */
+        cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+    } else {
+        LOGE("No memory for mm_camera_node_t");
+        rc = -1;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_start_zsl_snapshot
+ *
+ * DESCRIPTION: start zsl snapshot
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_start_zsl_snapshot(mm_channel_t *my_obj)
+{
+    int32_t rc = 0;
+    mm_camera_cmdcb_t* node = NULL;
+
+    node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+    if (NULL != node) {
+        memset(node, 0, sizeof(mm_camera_cmdcb_t));
+        node->cmd_type = MM_CAMERA_CMD_TYPE_START_ZSL;
+
+        /* enqueue to cmd thread */
+        cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+        /* wake up cmd thread */
+        cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+    } else {
+        LOGE("No memory for mm_camera_node_t");
+        rc = -1;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_stop_zsl_snapshot
+ *
+ * DESCRIPTION: stop zsl snapshot
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_stop_zsl_snapshot(mm_channel_t *my_obj)
+{
+    int32_t rc = 0;
+    mm_camera_cmdcb_t* node = NULL;
+
+    node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+    if (NULL != node) {
+        memset(node, 0, sizeof(mm_camera_cmdcb_t));
+        node->cmd_type = MM_CAMERA_CMD_TYPE_STOP_ZSL;
+
+        /* enqueue to cmd thread */
+        cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+        /* wake up cmd thread */
+        cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+    } else {
+        LOGE("No memory for mm_camera_node_t");
+        rc = -1;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_qbuf
+ *
+ * DESCRIPTION: enqueue buffer back to kernel
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @buf          : buf ptr to be enqueued
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_qbuf(mm_channel_t *my_obj,
+                        mm_camera_buf_def_t *buf)
+{
+    int32_t rc = -1;
+    mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj, buf->stream_id);
+
+    if (NULL != s_obj) {
+        if (s_obj->ch_obj != my_obj) {
+            /* Redirect to linked stream */
+            rc = mm_stream_fsm_fn(s_obj->linked_stream,
+                    MM_STREAM_EVT_QBUF,
+                    (void *)buf,
+                    NULL);
+        } else {
+            rc = mm_stream_fsm_fn(s_obj,
+                    MM_STREAM_EVT_QBUF,
+                    (void *)buf,
+                    NULL);
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_get_queued_buf_count
+ *
+ * DESCRIPTION: return queued buffer count
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @stream_id    : steam_id
+ *
+ * RETURN     : queued buffer count
+ *==========================================================================*/
+int32_t mm_channel_get_queued_buf_count(mm_channel_t *my_obj, uint32_t stream_id)
+{
+    int32_t rc = -1;
+    mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj, stream_id);
+
+    if (NULL != s_obj) {
+        if (s_obj->ch_obj != my_obj) {
+            /* Redirect to linked stream */
+            rc = mm_stream_fsm_fn(s_obj->linked_stream,
+                    MM_STREAM_EVT_GET_QUEUED_BUF_COUNT,
+                    NULL,
+                    NULL);
+        } else {
+            rc = mm_stream_fsm_fn(s_obj,
+                    MM_STREAM_EVT_GET_QUEUED_BUF_COUNT,
+                    NULL,
+                    NULL);
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_set_stream_parms
+ *
+ * DESCRIPTION: set parameters per stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @s_id         : stream handle
+ *   @parms        : ptr to a param struct to be set to server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of parameters to be set
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_channel_set_stream_parm(mm_channel_t *my_obj,
+                                   mm_evt_paylod_set_get_stream_parms_t *payload)
+{
+    int32_t rc = -1;
+    mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+                                                               payload->stream_id);
+    if (NULL != s_obj) {
+        if (s_obj->ch_obj != my_obj) {
+            /* No op. on linked streams */
+            return 0;
+        }
+
+        rc = mm_stream_fsm_fn(s_obj,
+                              MM_STREAM_EVT_SET_PARM,
+                              (void *)payload,
+                              NULL);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_get_stream_parms
+ *
+ * DESCRIPTION: get parameters per stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @s_id         : stream handle
+ *   @parms        : ptr to a param struct to be get from server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Parameters to be get from server are already
+ *              filled in by upper layer caller. After this call, corresponding
+ *              fields of requested parameters will be filled in by server with
+ *              detailed information.
+ *==========================================================================*/
+int32_t mm_channel_get_stream_parm(mm_channel_t *my_obj,
+                                   mm_evt_paylod_set_get_stream_parms_t *payload)
+{
+    int32_t rc = -1;
+    mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+                                                               payload->stream_id);
+    if (NULL != s_obj) {
+        if (s_obj->ch_obj != my_obj) {
+            /* No op. on linked streams */
+            return 0;
+        }
+
+        rc = mm_stream_fsm_fn(s_obj,
+                              MM_STREAM_EVT_GET_PARM,
+                              (void *)payload,
+                              NULL);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_do_stream_action
+ *
+ * DESCRIPTION: request server to perform stream based action. Maybe removed later
+ *              if the functionality is included in mm_camera_set_parms
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @s_id         : stream handle
+ *   @actions      : ptr to an action struct buf to be performed by server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the action struct buf is already mapped to server via
+ *              domain socket. Actions to be performed by server are already
+ *              filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_channel_do_stream_action(mm_channel_t *my_obj,
+                                   mm_evt_paylod_do_stream_action_t *payload)
+{
+    int32_t rc = -1;
+    mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+                                                               payload->stream_id);
+    if (NULL != s_obj) {
+        if (s_obj->ch_obj != my_obj) {
+            /* No op. on linked streams */
+            return 0;
+        }
+
+        rc = mm_stream_fsm_fn(s_obj,
+                              MM_STREAM_EVT_DO_ACTION,
+                              (void *)payload,
+                              NULL);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_map_stream_buf
+ *
+ * DESCRIPTION: mapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @payload      : ptr to payload for mapping
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_map_stream_buf(mm_channel_t *my_obj,
+                                  cam_buf_map_type *payload)
+{
+    int32_t rc = -1;
+    mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+            payload->stream_id);
+    if (NULL != s_obj) {
+        if (s_obj->ch_obj != my_obj) {
+            /* No op. on linked streams */
+            return 0;
+        }
+        rc = mm_stream_map_buf(s_obj,
+                payload->type, payload->frame_idx,
+                payload->plane_idx, payload->fd,
+                payload->size, payload->buffer);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_map_stream_bufs
+ *
+ * DESCRIPTION: mapping stream buffers via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @payload      : ptr to payload for mapping
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_map_stream_bufs(mm_channel_t *my_obj,
+                                   cam_buf_map_type_list *payload)
+{
+    int32_t rc = -1;
+    if ((payload == NULL) || (payload->length == 0)) {
+        return rc;
+    }
+
+    mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+                                                               payload->buf_maps[0].stream_id);
+    if (NULL != s_obj) {
+        if (s_obj->ch_obj != my_obj) {
+            /* No op. on linked streams */
+            return 0;
+        }
+
+        rc = mm_stream_map_bufs(s_obj, payload);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_unmap_stream_buf
+ *
+ * DESCRIPTION: unmapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @payload      : ptr to unmap payload
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_unmap_stream_buf(mm_channel_t *my_obj,
+                                    cam_buf_unmap_type *payload)
+{
+    int32_t rc = -1;
+    mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+                                                               payload->stream_id);
+    if (NULL != s_obj) {
+        if (s_obj->ch_obj != my_obj) {
+            /* No op. on linked streams */
+            return 0;
+        }
+
+        rc = mm_stream_unmap_buf(s_obj, payload->type,
+                                 payload->frame_idx, payload->plane_idx);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_queue_init
+ *
+ * DESCRIPTION: initialize superbuf queue in the channel
+ *
+ * PARAMETERS :
+ *   @queue   : ptr to superbuf queue to be initialized
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_queue_init(mm_channel_queue_t * queue)
+{
+    return cam_queue_init(&queue->que);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_queue_deinit
+ *
+ * DESCRIPTION: deinitialize superbuf queue in the channel
+ *
+ * PARAMETERS :
+ *   @queue   : ptr to superbuf queue to be deinitialized
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_queue_deinit(mm_channel_queue_t * queue)
+{
+    return cam_queue_deinit(&queue->que);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_util_seq_comp_w_rollover
+ *
+ * DESCRIPTION: utility function to handle sequence number comparison with rollover
+ *
+ * PARAMETERS :
+ *   @v1      : first value to be compared
+ *   @v2      : second value to be compared
+ *
+ * RETURN     : int8_t type of comparison result
+ *              >0  -- v1 larger than v2
+ *              =0  -- vi equal to v2
+ *              <0  -- v1 smaller than v2
+ *==========================================================================*/
+int8_t mm_channel_util_seq_comp_w_rollover(uint32_t v1,
+                                           uint32_t v2)
+{
+    int8_t ret = 0;
+
+    /* TODO: need to handle the case if v2 roll over to 0 */
+    if (v1 > v2) {
+        ret = 1;
+    } else if (v1 < v2) {
+        ret = -1;
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_validate_super_buf.
+ *
+ * DESCRIPTION: Validate incoming buffer with existing super buffer.
+ *
+ * PARAMETERS :
+ *   @ch_obj  : channel object
+ *   @queue   : superbuf queue
+ *   @buf_info: new buffer from stream
+ *
+ * RETURN     : int8_t type of validation result
+ *              >0  -- Valid frame
+ *              =0  -- Cannot validate
+ *              <0  -- Invalid frame. Can be freed
+ *==========================================================================*/
+int8_t mm_channel_validate_super_buf(mm_channel_t* ch_obj,
+        mm_channel_queue_t *queue, mm_camera_buf_info_t *buf_info)
+{
+    int8_t ret = 0;
+    cam_node_t* node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+    mm_channel_queue_node_t* super_buf = NULL;
+
+    (void)ch_obj;
+
+    /* comp */
+    pthread_mutex_lock(&queue->que.lock);
+    head = &queue->que.head.list;
+    /* get the last one in the queue which is possibly having no matching */
+    pos = head->next;
+    while (pos != head) {
+        node = member_of(pos, cam_node_t, list);
+        super_buf = (mm_channel_queue_node_t*)node->data;
+        if (NULL != super_buf) {
+            if ((super_buf->expected_frame) &&
+                    (buf_info->frame_idx == super_buf->frame_idx)) {
+                //This is good frame. Expecting more frames. Keeping this frame.
+                ret = 1;
+                break;
+            } else {
+                pos = pos->next;
+                continue;
+            }
+        }
+    }
+    pthread_mutex_unlock(&queue->que.lock);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_handle_metadata
+ *
+ * DESCRIPTION: Handle frame matching logic change due to metadata
+ *
+ * PARAMETERS :
+ *   @ch_obj  : channel object
+ *   @queue   : superbuf queue
+ *   @buf_info: new buffer from stream
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_handle_metadata(
+                        mm_channel_t* ch_obj,
+                        mm_channel_queue_t * queue,
+                        mm_camera_buf_info_t *buf_info)
+{
+
+    int rc = 0 ;
+    mm_stream_t* stream_obj = NULL;
+    stream_obj = mm_channel_util_get_stream_by_handler(ch_obj,
+                buf_info->stream_id);
+    uint8_t is_prep_snapshot_done_valid = 0;
+    uint8_t is_good_frame_idx_range_valid = 0;
+    int32_t prep_snapshot_done_state = 0;
+    cam_frame_idx_range_t good_frame_idx_range;
+    uint8_t is_crop_1x_found = 0;
+    uint32_t snapshot_stream_id = 0;
+    uint32_t i;
+    /* Set expected frame id to a future frame idx, large enough to wait
+    * for good_frame_idx_range, and small enough to still capture an image */
+    const uint32_t max_future_frame_offset = MM_CAMERA_MAX_FUTURE_FRAME_WAIT;
+
+    memset(&good_frame_idx_range, 0, sizeof(good_frame_idx_range));
+
+    if (NULL == stream_obj) {
+        LOGE("Invalid Stream Object for stream_id = %d",
+                    buf_info->stream_id);
+        rc = -1;
+        goto end;
+    }
+    if (NULL == stream_obj->stream_info) {
+        LOGE("NULL stream info for stream_id = %d",
+                     buf_info->stream_id);
+        rc = -1;
+        goto end;
+    }
+
+    if ((CAM_STREAM_TYPE_METADATA == stream_obj->stream_info->stream_type) &&
+            ((stream_obj->ch_obj == ch_obj) ||
+            ((stream_obj->linked_stream != NULL) &&
+            (stream_obj->linked_stream->linked_obj == ch_obj)))) {
+        const metadata_buffer_t *metadata;
+        metadata = (const metadata_buffer_t *)buf_info->buf->buffer;
+
+        if (NULL == metadata) {
+            LOGE("NULL metadata buffer for metadata stream");
+            rc = -1;
+            goto end;
+        }
+        LOGL("E , expected frame id: %d", queue->expected_frame_id);
+
+        IF_META_AVAILABLE(const int32_t, p_prep_snapshot_done_state,
+                CAM_INTF_META_PREP_SNAPSHOT_DONE, metadata) {
+            prep_snapshot_done_state = *p_prep_snapshot_done_state;
+            is_prep_snapshot_done_valid = 1;
+            LOGH("prepare snapshot done valid ");
+        }
+        IF_META_AVAILABLE(const cam_frame_idx_range_t, p_good_frame_idx_range,
+                CAM_INTF_META_GOOD_FRAME_IDX_RANGE, metadata) {
+            good_frame_idx_range = *p_good_frame_idx_range;
+            is_good_frame_idx_range_valid = 1;
+            LOGH("good_frame_idx_range : min: %d, max: %d , num frames = %d",
+                 good_frame_idx_range.min_frame_idx,
+                good_frame_idx_range.max_frame_idx, good_frame_idx_range.num_led_on_frames);
+        }
+        IF_META_AVAILABLE(const cam_crop_data_t, p_crop_data,
+                CAM_INTF_META_CROP_DATA, metadata) {
+            cam_crop_data_t crop_data = *p_crop_data;
+
+            for (i = 0; i < ARRAY_SIZE(ch_obj->streams); i++) {
+                if (MM_STREAM_STATE_NOTUSED == ch_obj->streams[i].state) {
+                    continue;
+                }
+                if (CAM_STREAM_TYPE_SNAPSHOT ==
+                    ch_obj->streams[i].stream_info->stream_type) {
+                    snapshot_stream_id = ch_obj->streams[i].server_stream_id;
+                    break;
+                }
+            }
+
+            for (i=0; i<crop_data.num_of_streams; i++) {
+                if (snapshot_stream_id == crop_data.crop_info[i].stream_id) {
+                    if (!crop_data.crop_info[i].crop.left &&
+                            !crop_data.crop_info[i].crop.top) {
+                        is_crop_1x_found = 1;
+                        break;
+                    }
+                }
+            }
+        }
+
+        IF_META_AVAILABLE(const cam_buf_divert_info_t, p_divert_info,
+                CAM_INTF_BUF_DIVERT_INFO, metadata) {
+            cam_buf_divert_info_t divert_info = *p_divert_info;
+            if (divert_info.frame_id >= buf_info->frame_idx) {
+                ch_obj->diverted_frame_id = divert_info.frame_id;
+            } else {
+                ch_obj->diverted_frame_id = 0;
+            }
+        }
+
+        if (ch_obj->isZoom1xFrameRequested) {
+            if (is_crop_1x_found) {
+                ch_obj->isZoom1xFrameRequested = 0;
+                queue->expected_frame_id = buf_info->frame_idx + 1;
+            } else {
+                queue->expected_frame_id += max_future_frame_offset;
+                /* Flush unwanted frames */
+                mm_channel_superbuf_flush_matched(ch_obj, queue);
+            }
+            goto end;
+        }
+
+        if (ch_obj->startZSlSnapshotCalled && is_good_frame_idx_range_valid) {
+            LOGI("frameID = %d, expected = %d good_frame_idx = %d",
+                    buf_info->frame_idx, queue->expected_frame_id,
+                    good_frame_idx_range.min_frame_idx);
+        }
+
+        if (is_prep_snapshot_done_valid) {
+            ch_obj->bWaitForPrepSnapshotDone = 0;
+            if (prep_snapshot_done_state == NEED_FUTURE_FRAME) {
+                queue->expected_frame_id += max_future_frame_offset;
+                LOGI("PreFlash Done. Need Main Flash");
+
+                mm_channel_superbuf_flush(ch_obj,
+                        queue, CAM_STREAM_TYPE_DEFAULT);
+
+                ch_obj->needLEDFlash = TRUE;
+            } else {
+                ch_obj->needLEDFlash = FALSE;
+            }
+        }
+        if (is_good_frame_idx_range_valid) {
+            queue->expected_frame_id =
+                good_frame_idx_range.min_frame_idx;
+            queue->good_frame_id = good_frame_idx_range.min_frame_idx;
+            if((ch_obj->needLEDFlash == TRUE) && (ch_obj->burstSnapNum > 1)) {
+                queue->led_on_start_frame_id =
+                good_frame_idx_range.min_frame_idx;
+                queue->led_off_start_frame_id =
+                good_frame_idx_range.max_frame_idx;
+                queue->once = 0;
+                queue->led_on_num_frames =
+                  good_frame_idx_range.num_led_on_frames;
+                queue->frame_skip_count = good_frame_idx_range.frame_skip_count;
+                LOGD("Need Flash, expected frame id = %d,"
+                        " led_on start = %d, led off start = %d, led on frames = %d ",
+                           queue->expected_frame_id, queue->led_on_start_frame_id,
+                        queue->led_off_start_frame_id, queue->led_on_num_frames);
+            } else {
+                LOGD("No flash, expected frame id = %d ",
+                         queue->expected_frame_id);
+            }
+        } else if ((MM_CHANNEL_BRACKETING_STATE_WAIT_GOOD_FRAME_IDX == ch_obj->bracketingState) &&
+                !is_prep_snapshot_done_valid) {
+            /* Flush unwanted frames */
+            mm_channel_superbuf_flush_matched(ch_obj, queue);
+            queue->expected_frame_id += max_future_frame_offset;
+        }
+        if (ch_obj->isFlashBracketingEnabled &&
+            is_good_frame_idx_range_valid) {
+            /* Flash bracketing needs two frames, with & without led flash.
+            * in valid range min frame is with led flash and max frame is
+            * without led flash */
+            queue->expected_frame_id =
+                    good_frame_idx_range.min_frame_idx;
+            /* max frame is without led flash */
+            queue->expected_frame_id_without_led =
+                    good_frame_idx_range.max_frame_idx;
+            queue->good_frame_id =
+                    good_frame_idx_range.min_frame_idx;
+        } else if (is_good_frame_idx_range_valid) {
+            queue->expected_frame_id =
+                    good_frame_idx_range.min_frame_idx;
+            ch_obj->bracketingState = MM_CHANNEL_BRACKETING_STATE_ACTIVE;
+            queue->good_frame_id =
+                    good_frame_idx_range.min_frame_idx;
+        }
+
+        if (ch_obj->isConfigCapture && is_good_frame_idx_range_valid
+                && (good_frame_idx_range.config_batch_idx < ch_obj->frameConfig.num_batch)) {
+
+            LOGI("Frame Config: Expcted ID = %d batch index = %d",
+                    good_frame_idx_range.min_frame_idx, good_frame_idx_range.config_batch_idx);
+            ch_obj->capture_frame_id[good_frame_idx_range.config_batch_idx] =
+                    good_frame_idx_range.min_frame_idx;
+
+            if (ch_obj->cur_capture_idx == good_frame_idx_range.config_batch_idx) {
+                queue->expected_frame_id =
+                        good_frame_idx_range.min_frame_idx;
+            } else {
+                queue->expected_frame_id =
+                        ch_obj->capture_frame_id[ch_obj->cur_capture_idx];
+            }
+            queue->good_frame_id = queue->expected_frame_id;
+        }
+
+        if ((ch_obj->burstSnapNum > 1) && (ch_obj->needLEDFlash == TRUE)
+            && !ch_obj->isFlashBracketingEnabled
+            && (MM_CHANNEL_BRACKETING_STATE_OFF == ch_obj->bracketingState)
+            && !ch_obj->isConfigCapture) {
+            if((buf_info->frame_idx >= queue->led_off_start_frame_id)
+                    &&  !queue->once) {
+                LOGD("Burst snap num = %d ",
+                         ch_obj->burstSnapNum);
+                // Skip frames from LED OFF frame to get a good frame
+                queue->expected_frame_id = queue->led_off_start_frame_id +
+                        queue->frame_skip_count;
+                queue->once = 1;
+                ch_obj->stopZslSnapshot = 1;
+                ch_obj->needLEDFlash = FALSE;
+                LOGD("Reached max led on frames = %d , expected id = %d",
+                         buf_info->frame_idx, queue->expected_frame_id);
+         }
+       }
+
+        IF_META_AVAILABLE(const cam_low_light_mode_t, low_light_level,
+            CAM_INTF_META_LOW_LIGHT, metadata) {
+            ch_obj->needLowLightZSL = *low_light_level;
+        }
+
+        // For the instant capture case, if AEC settles before expected frame ID from user,
+        // reset the expected frame ID to current frame index.
+        if (queue->attr.user_expected_frame_id > 0) {
+            if (queue->attr.user_expected_frame_id > buf_info->frame_idx) {
+                IF_META_AVAILABLE(const cam_3a_params_t, ae_params,
+                    CAM_INTF_META_AEC_INFO, metadata) {
+                    if (ae_params->settled) {
+                        queue->expected_frame_id = buf_info->frame_idx;
+                        // Reset the expected frame ID from HAL to 0
+                        queue->attr.user_expected_frame_id = 0;
+                        LOGD("AEC settled, reset expected frame ID from user");
+                    }
+                }
+            } else {
+                 // Reset the expected frame ID from HAL to 0 after
+                 // current frame index is greater than expected id.
+                queue->attr.user_expected_frame_id = 0;
+                LOGD("reset expected frame ID from user as it reached the bound");
+            }
+        }
+    }
+end:
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_comp_and_enqueue
+ *
+ * DESCRIPTION: implementation for matching logic for superbuf
+ *
+ * PARAMETERS :
+ *   @ch_obj  : channel object
+ *   @queue   : superbuf queue
+ *   @buf_info: new buffer from stream
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_comp_and_enqueue(
+                        mm_channel_t* ch_obj,
+                        mm_channel_queue_t *queue,
+                        mm_camera_buf_info_t *buf_info)
+{
+    cam_node_t* node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+    mm_channel_queue_node_t* super_buf = NULL;
+    uint8_t buf_s_idx, i, found_super_buf, unmatched_bundles;
+    struct cam_list *last_buf, *insert_before_buf, *last_buf_ptr;
+
+    LOGD("E");
+
+    for (buf_s_idx = 0; buf_s_idx < queue->num_streams; buf_s_idx++) {
+        if (buf_info->stream_id == queue->bundled_streams[buf_s_idx]) {
+            break;
+        }
+    }
+
+    if (buf_s_idx == queue->num_streams) {
+        LOGE("buf from stream (%d) not bundled", buf_info->stream_id);
+        return -1;
+    }
+
+    if(buf_info->frame_idx == 0) {
+        mm_channel_qbuf(ch_obj, buf_info->buf);
+        return 0;
+    }
+
+    if (mm_channel_handle_metadata(ch_obj, queue, buf_info) < 0) {
+        mm_channel_qbuf(ch_obj, buf_info->buf);
+        return -1;
+    }
+
+    if ((mm_channel_util_seq_comp_w_rollover(buf_info->frame_idx,
+            queue->expected_frame_id) < 0) &&
+            (mm_channel_validate_super_buf(ch_obj, queue, buf_info) <= 0)) {
+        LOGH("incoming buf id(%d) is older than expected buf id(%d), will discard it",
+                 buf_info->frame_idx, queue->expected_frame_id);
+        mm_channel_qbuf(ch_obj, buf_info->buf);
+        return 0;
+    }
+
+    /* comp */
+    pthread_mutex_lock(&queue->que.lock);
+    head = &queue->que.head.list;
+    /* get the last one in the queue which is possibly having no matching */
+    pos = head->next;
+
+    found_super_buf = 0;
+    unmatched_bundles = 0;
+    last_buf = NULL;
+    insert_before_buf = NULL;
+    last_buf_ptr = NULL;
+
+    while (pos != head) {
+        node = member_of(pos, cam_node_t, list);
+        super_buf = (mm_channel_queue_node_t*)node->data;
+
+        if (NULL != super_buf) {
+            if (super_buf->matched) {
+                /* find a matched super buf, move to next one */
+                pos = pos->next;
+                continue;
+            } else if (( buf_info->frame_idx == super_buf->frame_idx )
+                    /*Pick metadata greater than available frameID*/
+                    || ((queue->attr.priority == MM_CAMERA_SUPER_BUF_PRIORITY_LOW)
+                    && (super_buf->super_buf[buf_s_idx].frame_idx == 0)
+                    && (buf_info->buf->stream_type == CAM_STREAM_TYPE_METADATA)
+                    && (super_buf->frame_idx < buf_info->frame_idx))
+                    /*Pick available metadata closest to frameID*/
+                    || ((queue->attr.priority == MM_CAMERA_SUPER_BUF_PRIORITY_LOW)
+                    && (buf_info->buf->stream_type != CAM_STREAM_TYPE_METADATA)
+                    && (super_buf->super_buf[buf_s_idx].frame_idx == 0)
+                    && (super_buf->unmatched_meta_idx > buf_info->frame_idx))){
+                /*super buffer frame IDs matching OR In low priority bundling
+                metadata frameID greater than avialbale super buffer frameID  OR
+                metadata frame closest to incoming frameID will be bundled*/
+                found_super_buf = 1;
+                /* If we are filling into a 'meta only' superbuf, make sure to reset
+                the super_buf frame_idx so that missing streams in this superbuf
+                are filled as per matching frame id logic. Note that, in low priority
+                queue, only meta frame id need not match (closest suffices) but
+                the other streams in this superbuf should have same frame id. */
+                if (super_buf->unmatched_meta_idx > 0) {
+                    super_buf->unmatched_meta_idx = 0;
+                    super_buf->frame_idx = buf_info->frame_idx;
+                }
+                break;
+            } else {
+                unmatched_bundles++;
+                if ( NULL == last_buf ) {
+                    if ( super_buf->frame_idx < buf_info->frame_idx ) {
+                        last_buf = pos;
+                    }
+                }
+                if ( NULL == insert_before_buf ) {
+                    if ( super_buf->frame_idx > buf_info->frame_idx ) {
+                        insert_before_buf = pos;
+                    }
+                }
+                pos = pos->next;
+            }
+        }
+    }
+
+    if ( found_super_buf ) {
+        if(super_buf->super_buf[buf_s_idx].frame_idx != 0) {
+            //This can cause frame drop. We are overwriting same memory.
+            pthread_mutex_unlock(&queue->que.lock);
+            LOGW("Warning: frame is already in camera ZSL queue");
+            mm_channel_qbuf(ch_obj, buf_info->buf);
+            return 0;
+        }
+
+        /*Insert incoming buffer to super buffer*/
+        super_buf->super_buf[buf_s_idx] = *buf_info;
+
+        /* check if superbuf is all matched */
+        super_buf->matched = 1;
+        for (i=0; i < super_buf->num_of_bufs; i++) {
+            if (super_buf->super_buf[i].frame_idx == 0) {
+                super_buf->matched = 0;
+                break;
+            }
+        }
+
+        if (super_buf->matched) {
+            if(ch_obj->isFlashBracketingEnabled) {
+               queue->expected_frame_id =
+                   queue->expected_frame_id_without_led;
+               if (buf_info->frame_idx >=
+                       queue->expected_frame_id_without_led) {
+                   ch_obj->isFlashBracketingEnabled = FALSE;
+               }
+            } else {
+               queue->expected_frame_id = buf_info->frame_idx
+                                          + queue->attr.post_frame_skip;
+            }
+
+            super_buf->expected_frame = FALSE;
+
+            LOGD("curr = %d, skip = %d , Expected Frame ID: %d",
+                     buf_info->frame_idx,
+                    queue->attr.post_frame_skip, queue->expected_frame_id);
+
+            queue->match_cnt++;
+            if (ch_obj->bundle.superbuf_queue.attr.enable_frame_sync) {
+                pthread_mutex_lock(&fs_lock);
+                mm_frame_sync_add(buf_info->frame_idx, ch_obj);
+                pthread_mutex_unlock(&fs_lock);
+            }
+            /* Any older unmatched buffer need to be released */
+            if ( last_buf ) {
+                while ( last_buf != pos ) {
+                    node = member_of(last_buf, cam_node_t, list);
+                    super_buf = (mm_channel_queue_node_t*)node->data;
+                    if (NULL != super_buf) {
+                        for (i=0; i<super_buf->num_of_bufs; i++) {
+                            if (super_buf->super_buf[i].frame_idx != 0) {
+                                mm_channel_qbuf(ch_obj, super_buf->super_buf[i].buf);
+                            }
+                        }
+                        queue->que.size--;
+                        last_buf = last_buf->next;
+                        cam_list_del_node(&node->list);
+                        free(node);
+                        free(super_buf);
+                    } else {
+                        LOGE("Invalid superbuf in queue!");
+                        break;
+                    }
+                }
+            }
+        }else {
+            if (ch_obj->diverted_frame_id == buf_info->frame_idx) {
+                super_buf->expected_frame = TRUE;
+                ch_obj->diverted_frame_id = 0;
+            }
+        }
+    } else {
+        if ((queue->attr.max_unmatched_frames < unmatched_bundles)
+                && ( NULL == last_buf )) {
+            /* incoming frame is older than the last bundled one */
+            mm_channel_qbuf(ch_obj, buf_info->buf);
+        } else {
+            last_buf_ptr = last_buf;
+
+            /* Loop to remove unmatched frames */
+            while ((queue->attr.max_unmatched_frames < unmatched_bundles)
+                    && (last_buf_ptr != NULL && last_buf_ptr != pos)) {
+                node = member_of(last_buf_ptr, cam_node_t, list);
+                super_buf = (mm_channel_queue_node_t*)node->data;
+                if (NULL != super_buf && super_buf->expected_frame == FALSE
+                        && (&node->list != insert_before_buf)) {
+                    for (i=0; i<super_buf->num_of_bufs; i++) {
+                        if (super_buf->super_buf[i].frame_idx != 0) {
+                            mm_channel_qbuf(ch_obj, super_buf->super_buf[i].buf);
+                        }
+                    }
+                    queue->que.size--;
+                    cam_list_del_node(&node->list);
+                    free(node);
+                    free(super_buf);
+                    unmatched_bundles--;
+                }
+                last_buf_ptr = last_buf_ptr->next;
+            }
+
+            if (queue->attr.max_unmatched_frames < unmatched_bundles) {
+                node = member_of(last_buf, cam_node_t, list);
+                super_buf = (mm_channel_queue_node_t*)node->data;
+                for (i=0; i<super_buf->num_of_bufs; i++) {
+                    if (super_buf->super_buf[i].frame_idx != 0) {
+                        mm_channel_qbuf(ch_obj, super_buf->super_buf[i].buf);
+                    }
+                }
+                queue->que.size--;
+                cam_list_del_node(&node->list);
+                free(node);
+                free(super_buf);
+            }
+
+            /* insert the new frame at the appropriate position. */
+
+            mm_channel_queue_node_t *new_buf = NULL;
+            cam_node_t* new_node = NULL;
+
+            new_buf = (mm_channel_queue_node_t*)malloc(sizeof(mm_channel_queue_node_t));
+            new_node = (cam_node_t*)malloc(sizeof(cam_node_t));
+            if (NULL != new_buf && NULL != new_node) {
+                memset(new_buf, 0, sizeof(mm_channel_queue_node_t));
+                memset(new_node, 0, sizeof(cam_node_t));
+                new_node->data = (void *)new_buf;
+                new_buf->num_of_bufs = queue->num_streams;
+                new_buf->super_buf[buf_s_idx] = *buf_info;
+                new_buf->frame_idx = buf_info->frame_idx;
+
+                if ((ch_obj->diverted_frame_id == buf_info->frame_idx)
+                        || (buf_info->frame_idx == queue->good_frame_id)) {
+                    new_buf->expected_frame = TRUE;
+                    ch_obj->diverted_frame_id = 0;
+                }
+
+                /* enqueue */
+                if ( insert_before_buf ) {
+                    cam_list_insert_before_node(&new_node->list, insert_before_buf);
+                } else {
+                    cam_list_add_tail_node(&new_node->list, &queue->que.head.list);
+                }
+                queue->que.size++;
+
+                if(queue->num_streams == 1) {
+                    new_buf->matched = 1;
+                    new_buf->expected_frame = FALSE;
+                    queue->expected_frame_id = buf_info->frame_idx + queue->attr.post_frame_skip;
+                    queue->match_cnt++;
+                    if (ch_obj->bundle.superbuf_queue.attr.enable_frame_sync) {
+                        pthread_mutex_lock(&fs_lock);
+                        mm_frame_sync_add(buf_info->frame_idx, ch_obj);
+                        pthread_mutex_unlock(&fs_lock);
+                    }
+                }
+                /* In low priority queue, this will become a 'meta only' superbuf. Set the
+                unmatched_frame_idx so that the upcoming stream buffers (other than meta)
+                can be filled into this which are nearest to this idx. */
+                if ((queue->attr.priority == MM_CAMERA_SUPER_BUF_PRIORITY_LOW)
+                    && (buf_info->buf->stream_type == CAM_STREAM_TYPE_METADATA)) {
+                    new_buf->unmatched_meta_idx = buf_info->frame_idx;
+                }
+            } else {
+                /* No memory */
+                if (NULL != new_buf) {
+                    free(new_buf);
+                }
+                if (NULL != new_node) {
+                    free(new_node);
+                }
+                /* qbuf the new buf since we cannot enqueue */
+                mm_channel_qbuf(ch_obj, buf_info->buf);
+            }
+        }
+    }
+
+    pthread_mutex_unlock(&queue->que.lock);
+    LOGD("X");
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_dequeue_internal
+ *
+ * DESCRIPTION: internal implementation for dequeue from the superbuf queue
+ *
+ * PARAMETERS :
+ *   @queue   : superbuf queue
+ *   @matched_only : if dequeued buf should be matched
+ *   @ch_obj  : channel object
+ *
+ * RETURN     : ptr to a node from superbuf queue
+ *==========================================================================*/
+mm_channel_queue_node_t* mm_channel_superbuf_dequeue_internal(
+        mm_channel_queue_t * queue,
+        uint8_t matched_only, mm_channel_t *ch_obj)
+{
+    cam_node_t* node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+    mm_channel_queue_node_t* super_buf = NULL;
+
+    head = &queue->que.head.list;
+    pos = head->next;
+    if (pos != head) {
+        /* get the first node */
+        node = member_of(pos, cam_node_t, list);
+        super_buf = (mm_channel_queue_node_t*)node->data;
+        if ( (NULL != super_buf) &&
+             (matched_only == TRUE) &&
+             (super_buf->matched == FALSE) ) {
+            /* require to dequeue matched frame only, but this superbuf is not matched,
+               simply set return ptr to NULL */
+            super_buf = NULL;
+        }
+        if (NULL != super_buf) {
+            /* remove from the queue */
+            cam_list_del_node(&node->list);
+            queue->que.size--;
+            if (super_buf->matched == TRUE) {
+                queue->match_cnt--;
+                if (ch_obj->bundle.superbuf_queue.attr.enable_frame_sync) {
+                    pthread_mutex_lock(&fs_lock);
+                    mm_frame_sync_remove(super_buf->frame_idx);
+                    pthread_mutex_unlock(&fs_lock);
+                }
+            }
+            free(node);
+        }
+    }
+
+    return super_buf;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_dequeue_frame_internal
+ *
+ * DESCRIPTION: internal implementation for dequeue based on frame index
+ *                     from the superbuf queue
+ *
+ * PARAMETERS :
+ *   @queue       : superbuf queue
+ *   @frame_idx  : frame index to be dequeued
+ *
+ * RETURN     : ptr to a node from superbuf queue with matched frame index
+ *                : NULL if not found
+ *==========================================================================*/
+mm_channel_queue_node_t* mm_channel_superbuf_dequeue_frame_internal(
+        mm_channel_queue_t * queue, uint32_t frame_idx)
+{
+    cam_node_t* node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+    mm_channel_queue_node_t* super_buf = NULL;
+
+    if (!queue) {
+        LOGE("queue is NULL");
+        return NULL;
+    }
+
+    head = &queue->que.head.list;
+    pos = head->next;
+    LOGL("Searching for match frame %d", frame_idx);
+    while ((pos != head) && (pos != NULL)) {
+        /* get the first node */
+        node = member_of(pos, cam_node_t, list);
+        super_buf = (mm_channel_queue_node_t*)node->data;
+        if (super_buf && super_buf->matched &&
+                (super_buf->frame_idx == frame_idx)) {
+            /* remove from the queue */
+            cam_list_del_node(&node->list);
+            queue->que.size--;
+            queue->match_cnt--;
+            LOGH("Found match frame %d", frame_idx);
+            free(node);
+            break;
+        }
+        else {
+            LOGH("match frame not found %d", frame_idx);
+            super_buf = NULL;
+        }
+        pos = pos->next;
+    }
+    return super_buf;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_dequeue
+ *
+ * DESCRIPTION: dequeue from the superbuf queue
+ *
+ * PARAMETERS :
+ *   @queue   : superbuf queue
+ *   @ch_obj  : channel object
+ *
+ * RETURN     : ptr to a node from superbuf queue
+ *==========================================================================*/
+mm_channel_queue_node_t* mm_channel_superbuf_dequeue(
+        mm_channel_queue_t * queue, mm_channel_t *ch_obj)
+{
+    mm_channel_queue_node_t* super_buf = NULL;
+
+    pthread_mutex_lock(&queue->que.lock);
+    super_buf = mm_channel_superbuf_dequeue_internal(queue, TRUE, ch_obj);
+    pthread_mutex_unlock(&queue->que.lock);
+
+    return super_buf;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_bufdone_overflow
+ *
+ * DESCRIPTION: keep superbuf queue no larger than watermark set by upper layer
+ *              via channel attribute
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *   @queue   : superbuf queue
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_bufdone_overflow(mm_channel_t* my_obj,
+                                             mm_channel_queue_t * queue)
+{
+    int32_t rc = 0, i;
+    mm_channel_queue_node_t* super_buf = NULL;
+    if (MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS == queue->attr.notify_mode) {
+        /* for continuous streaming mode, no overflow is needed */
+        return 0;
+    }
+
+    LOGD("before match_cnt=%d, water_mark=%d",
+          queue->match_cnt, queue->attr.water_mark);
+    /* bufdone overflowed bufs */
+    pthread_mutex_lock(&queue->que.lock);
+    while (queue->match_cnt > queue->attr.water_mark) {
+        super_buf = mm_channel_superbuf_dequeue_internal(queue, TRUE, my_obj);
+        if (NULL != super_buf) {
+            for (i=0; i<super_buf->num_of_bufs; i++) {
+                if (NULL != super_buf->super_buf[i].buf) {
+                    mm_channel_qbuf(my_obj, super_buf->super_buf[i].buf);
+                }
+            }
+            free(super_buf);
+        }
+    }
+    pthread_mutex_unlock(&queue->que.lock);
+    LOGD("after match_cnt=%d, water_mark=%d",
+          queue->match_cnt, queue->attr.water_mark);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_skip
+ *
+ * DESCRIPTION: depends on the lookback configuration of the channel attribute,
+ *              unwanted superbufs will be removed from the superbuf queue.
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *   @queue   : superbuf queue
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_skip(mm_channel_t* my_obj,
+                                 mm_channel_queue_t * queue)
+{
+    int32_t rc = 0, i;
+    mm_channel_queue_node_t* super_buf = NULL;
+    if (MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS == queue->attr.notify_mode) {
+        /* for continuous streaming mode, no skip is needed */
+        return 0;
+    }
+
+    /* bufdone overflowed bufs */
+    pthread_mutex_lock(&queue->que.lock);
+    while (queue->match_cnt > queue->attr.look_back) {
+        super_buf = mm_channel_superbuf_dequeue_internal(queue, TRUE, my_obj);
+        if (NULL != super_buf) {
+            for (i=0; i<super_buf->num_of_bufs; i++) {
+                if (NULL != super_buf->super_buf[i].buf) {
+                    mm_channel_qbuf(my_obj, super_buf->super_buf[i].buf);
+                }
+            }
+            free(super_buf);
+        }
+    }
+    pthread_mutex_unlock(&queue->que.lock);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_flush
+ *
+ * DESCRIPTION: flush the superbuf queue.
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *   @queue   : superbuf queue
+ *   @cam_type: flush only particular type (default flushes all)
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_flush(mm_channel_t* my_obj,
+        mm_channel_queue_t * queue, cam_stream_type_t cam_type)
+{
+    int32_t rc = 0, i;
+    mm_channel_queue_node_t* super_buf = NULL;
+    cam_stream_type_t stream_type = CAM_STREAM_TYPE_DEFAULT;
+
+    /* bufdone bufs */
+    pthread_mutex_lock(&queue->que.lock);
+    super_buf = mm_channel_superbuf_dequeue_internal(queue, FALSE, my_obj);
+    while (super_buf != NULL) {
+        for (i=0; i<super_buf->num_of_bufs; i++) {
+            if (NULL != super_buf->super_buf[i].buf) {
+                stream_type = super_buf->super_buf[i].buf->stream_type;
+                if ((CAM_STREAM_TYPE_DEFAULT == cam_type) ||
+                        (cam_type == stream_type)) {
+                    mm_channel_qbuf(my_obj, super_buf->super_buf[i].buf);
+                }
+            }
+        }
+        free(super_buf);
+        super_buf = mm_channel_superbuf_dequeue_internal(queue, FALSE, my_obj);
+    }
+    pthread_mutex_unlock(&queue->que.lock);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_proc_general_cmd
+ *
+ * DESCRIPTION: process general command
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *   @notify_mode : notification mode
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_proc_general_cmd(mm_channel_t *my_obj,
+                                      mm_camera_generic_cmd_t *p_gen_cmd)
+{
+    LOGD("E");
+    int32_t rc = 0;
+    mm_camera_cmdcb_t* node = NULL;
+
+    node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+    if (NULL != node) {
+        memset(node, 0, sizeof(mm_camera_cmdcb_t));
+        node->u.gen_cmd = *p_gen_cmd;
+        node->cmd_type = MM_CAMERA_CMD_TYPE_GENERAL;
+
+        /* enqueue to cmd thread */
+        cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+        /* wake up cmd thread */
+        cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+    } else {
+        LOGE("No memory for mm_camera_node_t");
+        rc = -1;
+    }
+    LOGD("X");
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_flush_matched
+ *
+ * DESCRIPTION: flush matched buffers from the superbuf queue.
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *   @queue   : superbuf queue
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_flush_matched(mm_channel_t* my_obj,
+                                  mm_channel_queue_t * queue)
+{
+    int32_t rc = 0, i;
+    mm_channel_queue_node_t* super_buf = NULL;
+
+    /* bufdone bufs */
+    pthread_mutex_lock(&queue->que.lock);
+    super_buf = mm_channel_superbuf_dequeue_internal(queue, TRUE, my_obj);
+    while (super_buf != NULL) {
+        for (i=0; i<super_buf->num_of_bufs; i++) {
+            if (NULL != super_buf->super_buf[i].buf) {
+                mm_channel_qbuf(my_obj, super_buf->super_buf[i].buf);
+            }
+        }
+        free(super_buf);
+        super_buf = mm_channel_superbuf_dequeue_internal(queue, TRUE, my_obj);
+    }
+    pthread_mutex_unlock(&queue->que.lock);
+
+    return rc;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : mm_frame_sync_reset
+ *
+ * DESCRIPTION: Reset Frame sync info
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void mm_frame_sync_reset() {
+    memset(&fs, 0x0, sizeof(fs));
+    LOGD("Reset Done");
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_frame_sync_register_channel
+ *
+ * DESCRIPTION: Register Channel for frame sync
+ *
+ * PARAMETERS :
+ *   @ch_obj  : channel object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_frame_sync_register_channel(mm_channel_t *ch_obj) {
+    // Lock frame sync info
+    pthread_mutex_lock(&fs_lock);
+    if ((fs.num_cam >= MAX_NUM_CAMERA_PER_BUNDLE) || (!ch_obj)) {
+        LOGE("Error!! num cam(%d) is out of range ",
+                 fs.num_cam);
+        pthread_mutex_unlock(&fs_lock);
+        return -1;
+    }
+    if (fs.num_cam == 0) {
+        LOGH("First channel registering!!");
+        mm_frame_sync_reset();
+    }
+    uint8_t i = 0;
+    for (i = 0; i < MAX_NUM_CAMERA_PER_BUNDLE; i++) {
+        if (fs.ch_obj[i] == NULL) {
+            fs.ch_obj[i] = ch_obj;
+            fs.cb[i] = ch_obj->bundle.super_buf_notify_cb;
+            fs.num_cam++;
+            LOGD("DBG_FS index %d", i);
+            break;
+        }
+    }
+    if (i >= MAX_NUM_CAMERA_PER_BUNDLE) {
+        LOGH("X, DBG_FS Cannot register channel!!");
+        pthread_mutex_unlock(&fs_lock);
+        return -1;
+    }
+    LOGH("num_cam %d ", fs.num_cam);
+    pthread_mutex_unlock(&fs_lock);
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_frame_sync_unregister_channel
+ *
+ * DESCRIPTION: un-register Channel for frame sync
+ *
+ * PARAMETERS :
+ *   @ch_obj  : channel object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_frame_sync_unregister_channel(mm_channel_t *ch_obj) {
+    uint8_t i = 0;
+    // Lock frame sync info
+    pthread_mutex_lock(&fs_lock);
+    if (!fs.num_cam || !ch_obj) {
+        LOGH("X, DBG_FS: channel not found  !!");
+        // Lock frame sync info
+        pthread_mutex_unlock(&fs_lock);
+        return -1;
+    }
+    for (i = 0; i < MAX_NUM_CAMERA_PER_BUNDLE; i++) {
+        if (fs.ch_obj[i] == ch_obj) {
+            LOGD("found ch_obj at i (%d) ", i);
+            break;
+        }
+    }
+    if (i < MAX_NUM_CAMERA_PER_BUNDLE) {
+        LOGD("remove channel info ");
+        fs.ch_obj[i] = NULL;
+        fs.cb[i] = NULL;
+        fs.num_cam--;
+    } else {
+        LOGD("DBG_FS Channel not found ");
+    }
+    if (fs.num_cam == 0) {
+        mm_frame_sync_reset();
+    }
+    LOGH("X, fs.num_cam %d", fs.num_cam);
+    pthread_mutex_unlock(&fs_lock);
+    return 0;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : mm_frame_sync_add
+ *
+ * DESCRIPTION: Add frame info into frame sync nodes
+ *
+ * PARAMETERS :
+ *   @frame_id  : frame id to be added
+ *   @ch_obj  : channel object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_frame_sync_add(uint32_t frame_id, mm_channel_t *ch_obj) {
+
+    LOGD("E, frame id %d ch_obj %p", frame_id, ch_obj);
+    if (!frame_id || !ch_obj) {
+        LOGH("X : Error, cannot add sync frame !!");
+        return -1;
+    }
+
+    int8_t ch_idx = -1;
+    uint8_t i = 0;
+    for (i = 0; i < MAX_NUM_CAMERA_PER_BUNDLE; i++) {
+        if (fs.ch_obj[i] == ch_obj) {
+            ch_idx = i;
+            LOGD("ch id %d ", ch_idx);
+            break;
+        }
+    }
+    if (ch_idx < 0) {
+        LOGH("X : DBG_FS ch not found!!");
+        return -1;
+    }
+    int8_t index = mm_frame_sync_find_frame_index(frame_id);
+    if ((index >= 0) && (index < MM_CAMERA_FRAME_SYNC_NODES)) {
+        fs.node[index].frame_valid[ch_idx] = 1;
+    } else if (index < 0) {
+        if (fs.pos >= MM_CAMERA_FRAME_SYNC_NODES) {
+            fs.pos = 0;
+        }
+        index = fs.pos;
+        memset(&fs.node[index], 0x00, sizeof(mm_channel_sync_node_t));
+        fs.pos++;
+        fs.node[index].frame_idx = frame_id;
+        fs.node[index].frame_valid[ch_idx] = 1;
+        if (fs.num_cam == 1) {
+            LOGD("Single camera frame %d , matched ", frame_id);
+            fs.node[index].matched = 1;
+        }
+    }
+    uint8_t frames_valid = 0;
+    if (!fs.node[index].matched) {
+        for (i = 0; i < MAX_NUM_CAMERA_PER_BUNDLE; i++) {
+            if (fs.node[index].frame_valid[i]) {
+                frames_valid++;
+            }
+        }
+        if (frames_valid == fs.num_cam) {
+            fs.node[index].matched = 1;
+            LOGD("dual camera frame %d , matched ",
+                     frame_id);
+        }
+    }
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_frame_sync_remove
+ *
+ * DESCRIPTION: Remove frame info from frame sync nodes
+ *
+ * PARAMETERS :
+ *   @frame_id  : frame id to be removed
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_frame_sync_remove(uint32_t frame_id) {
+    int8_t index = -1;
+
+    LOGD("E, frame_id %d", frame_id);
+    if (!frame_id) {
+        LOGE("X, DBG_FS frame id invalid");
+        return -1;
+    }
+
+    index = mm_frame_sync_find_frame_index(frame_id);
+    if ((index >= 0) && (index < MM_CAMERA_FRAME_SYNC_NODES)) {
+        LOGD("Removing sync frame %d", frame_id);
+        memset(&fs.node[index], 0x00, sizeof(mm_channel_sync_node_t));
+    }
+    LOGD("X ");
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_frame_sync_find_matched
+ *
+ * DESCRIPTION: Find  a matched sync frame from the node array
+ *
+ * PARAMETERS :
+ *   @oldest  : If enabled, find oldest matched frame.,
+ *                  If not enabled, get the first matched frame found
+ *
+ * RETURN     : unt32_t type of status
+ *              0  -- If no matched frames found
+ *              frame index: inf matched frame found
+ *==========================================================================*/
+uint32_t mm_frame_sync_find_matched(uint8_t oldest) {
+    LOGH("E, oldest %d ", oldest);
+    uint8_t i = 0;
+    uint32_t frame_idx = 0;
+    uint32_t curr_frame_idx = 0;
+    for (i = 0; i < MM_CAMERA_FRAME_SYNC_NODES; i++) {
+        if (fs.node[i].matched) {
+            curr_frame_idx = fs.node[i].frame_idx;
+            if (!frame_idx) {
+                frame_idx = curr_frame_idx;
+            }
+            if (!oldest) {
+                break;
+            } else if (frame_idx > curr_frame_idx) {
+                frame_idx = curr_frame_idx;
+            }
+        }
+    }
+    LOGH("X, oldest %d frame idx %d", oldest, frame_idx);
+    return frame_idx;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_frame_sync_find_frame_index
+ *
+ * DESCRIPTION: Find sync frame index if present
+ *
+ * PARAMETERS :
+ *   @frame_id  : frame id to be searched
+ *
+ * RETURN     : int8_t type of status
+ *              -1  -- If desired frame not found
+ *              index: node array index if frame is found
+ *==========================================================================*/
+int8_t mm_frame_sync_find_frame_index(uint32_t frame_id) {
+
+    LOGD("E, frame_id %d", frame_id);
+    int8_t index = -1, i = 0;
+    for (i = 0; i < MM_CAMERA_FRAME_SYNC_NODES; i++) {
+        if (fs.node[i].frame_idx == frame_id) {
+            index = i;
+            break;
+        }
+    }
+    LOGD("X index :%d", index);
+    return index;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_frame_sync_lock_queues
+ *
+ * DESCRIPTION: Lock all channel queues present in node info
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void mm_frame_sync_lock_queues() {
+    uint8_t j = 0;
+    LOGD("E ");
+    for (j = 0; j < MAX_NUM_CAMERA_PER_BUNDLE; j++) {
+        if (fs.ch_obj[j]) {
+            mm_channel_queue_t *ch_queue =
+                    &fs.ch_obj[j]->bundle.superbuf_queue;
+            if (ch_queue) {
+                pthread_mutex_lock(&ch_queue->que.lock);
+                LOGL("Done locking fs.ch_obj[%d] ", j);
+            }
+        }
+    }
+    pthread_mutex_lock(&fs_lock);
+    LOGD("X ");
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_frame_sync_unlock_queues
+ *
+ * DESCRIPTION: Unlock all channel queues
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void mm_frame_sync_unlock_queues() {
+    // Unlock all queues
+    uint8_t j = 0;
+    LOGD("E ");
+    pthread_mutex_unlock(&fs_lock);
+    LOGL("Done unlocking fs ");
+    for (j = 0; j < MAX_NUM_CAMERA_PER_BUNDLE; j++) {
+        if (fs.ch_obj[j]) {
+            mm_channel_queue_t *ch_queue =
+                    &fs.ch_obj[j]->bundle.superbuf_queue;
+            if (ch_queue) {
+                pthread_mutex_unlock(&ch_queue->que.lock);
+                LOGL("Done unlocking fs.ch_obj[%d] ", j);
+            }
+        }
+    }
+    LOGD("X ");
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_node_qbuf
+ *
+ * DESCRIPTION: qbuf all buffers in a node
+ *
+ * PARAMETERS :
+ *   @ch_obj  : Channel info
+ *   @node    : node to qbuf
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void mm_channel_node_qbuf(mm_channel_t *ch_obj, mm_channel_queue_node_t *node) {
+    uint8_t i;
+    if (!ch_obj || !node) {
+        return;
+    }
+    for (i = 0; i < node->num_of_bufs; i++) {
+        mm_channel_qbuf(ch_obj, node->super_buf[i].buf);
+    }
+    return;
+}
diff --git a/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c b/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
new file mode 100644
index 0000000..cdd9ab9
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
@@ -0,0 +1,2257 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// To remove
+#include <cutils/properties.h>
+
+// System dependencies
+#include <pthread.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <stdlib.h>
+#include <linux/media.h>
+#include <media/msm_cam_sensor.h>
+#include <dlfcn.h>
+
+#define IOCTL_H <SYSTEM_HEADER_PREFIX/ioctl.h>
+#include IOCTL_H
+
+// Camera dependencies
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+static pthread_mutex_t g_intf_lock = PTHREAD_MUTEX_INITIALIZER;
+
+static mm_camera_ctrl_t g_cam_ctrl;
+
+static pthread_mutex_t g_handler_lock = PTHREAD_MUTEX_INITIALIZER;
+static uint16_t g_handler_history_count = 0; /* history count for handler */
+
+// 16th (starting from 0) bit tells its a BACK or FRONT camera
+#define CAM_SENSOR_FACING_MASK (1U<<16)
+// 24th (starting from 0) bit tells its a MAIN or AUX camera
+#define CAM_SENSOR_TYPE_MASK (1U<<24)
+// 25th (starting from 0) bit tells its YUV sensor or not
+#define CAM_SENSOR_FORMAT_MASK (1U<<25)
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_generate_handler
+ *
+ * DESCRIPTION: utility function to generate handler for camera/channel/stream
+ *
+ * PARAMETERS :
+ *   @index: index of the object to have handler
+ *
+ * RETURN     : uint32_t type of handle that uniquely identify the object
+ *==========================================================================*/
+uint32_t mm_camera_util_generate_handler(uint8_t index)
+{
+    uint32_t handler = 0;
+    pthread_mutex_lock(&g_handler_lock);
+    g_handler_history_count++;
+    if (0 == g_handler_history_count) {
+        g_handler_history_count++;
+    }
+    handler = g_handler_history_count;
+    handler = (handler<<8) | index;
+    pthread_mutex_unlock(&g_handler_lock);
+    return handler;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_get_index_by_handler
+ *
+ * DESCRIPTION: utility function to get index from handle
+ *
+ * PARAMETERS :
+ *   @handler: object handle
+ *
+ * RETURN     : uint8_t type of index derived from handle
+ *==========================================================================*/
+uint8_t mm_camera_util_get_index_by_handler(uint32_t handler)
+{
+    return (handler&0x000000ff);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_get_dev_name
+ *
+ * DESCRIPTION: utility function to get device name from camera handle
+ *
+ * PARAMETERS :
+ *   @cam_handle: camera handle
+ *
+ * RETURN     : char ptr to the device name stored in global variable
+ * NOTE       : caller should not free the char ptr
+ *==========================================================================*/
+const char *mm_camera_util_get_dev_name(uint32_t cam_handle)
+{
+    char *dev_name = NULL;
+    uint8_t cam_idx = mm_camera_util_get_index_by_handler(cam_handle);
+    if(cam_idx < MM_CAMERA_MAX_NUM_SENSORS) {
+        dev_name = g_cam_ctrl.video_dev_name[cam_idx];
+    }
+    return dev_name;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_get_camera_by_handler
+ *
+ * DESCRIPTION: utility function to get camera object from camera handle
+ *
+ * PARAMETERS :
+ *   @cam_handle: camera handle
+ *
+ * RETURN     : ptr to the camera object stored in global variable
+ * NOTE       : caller should not free the camera object ptr
+ *==========================================================================*/
+mm_camera_obj_t* mm_camera_util_get_camera_by_handler(uint32_t cam_handle)
+{
+    mm_camera_obj_t *cam_obj = NULL;
+    uint8_t cam_idx = mm_camera_util_get_index_by_handler(cam_handle);
+
+    if (cam_idx < MM_CAMERA_MAX_NUM_SENSORS &&
+        (NULL != g_cam_ctrl.cam_obj[cam_idx]) &&
+        (cam_handle == g_cam_ctrl.cam_obj[cam_idx]->my_hdl)) {
+        cam_obj = g_cam_ctrl.cam_obj[cam_idx];
+    }
+    return cam_obj;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_get_camera_by_session_id
+ *
+ * DESCRIPTION: utility function to get camera object from camera sessionID
+ *
+ * PARAMETERS :
+ *   @session_id: sessionid for which cam obj mapped
+ *
+ * RETURN     : ptr to the camera object stored in global variable
+ * NOTE       : caller should not free the camera object ptr
+ *==========================================================================*/
+mm_camera_obj_t* mm_camera_util_get_camera_by_session_id(uint32_t session_id)
+{
+   int cam_idx = 0;
+   mm_camera_obj_t *cam_obj = NULL;
+   for (cam_idx = 0; cam_idx < MM_CAMERA_MAX_NUM_SENSORS; cam_idx++) {
+        if ((NULL != g_cam_ctrl.cam_obj[cam_idx]) &&
+                (session_id == (uint32_t)g_cam_ctrl.cam_obj[cam_idx]->sessionid)) {
+            LOGD("session id:%d match idx:%d\n", session_id, cam_idx);
+            cam_obj = g_cam_ctrl.cam_obj[cam_idx];
+        }
+    }
+    return cam_obj;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_query_capability
+ *
+ * DESCRIPTION: query camera capability
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_query_capability(uint32_t camera_handle)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    LOGD("E: camera_handler = %d ", camera_handle);
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_query_capability(my_obj);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    LOGD("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_set_parms
+ *
+ * DESCRIPTION: set parameters per camera
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @parms        : ptr to a param struct to be set to server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of parameters to be set
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+static int32_t mm_camera_intf_set_parms(uint32_t camera_handle,
+                                        parm_buffer_t *parms)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_set_parms(my_obj, parms);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_get_parms
+ *
+ * DESCRIPTION: get parameters per camera
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @parms        : ptr to a param struct to be get from server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Parameters to be get from server are already
+ *              filled in by upper layer caller. After this call, corresponding
+ *              fields of requested parameters will be filled in by server with
+ *              detailed information.
+ *==========================================================================*/
+static int32_t mm_camera_intf_get_parms(uint32_t camera_handle,
+                                        parm_buffer_t *parms)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_get_parms(my_obj, parms);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_do_auto_focus
+ *
+ * DESCRIPTION: performing auto focus
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : if this call success, we will always assume there will
+ *              be an auto_focus event following up.
+ *==========================================================================*/
+static int32_t mm_camera_intf_do_auto_focus(uint32_t camera_handle)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_do_auto_focus(my_obj);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_cancel_auto_focus
+ *
+ * DESCRIPTION: cancel auto focus
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_cancel_auto_focus(uint32_t camera_handle)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_cancel_auto_focus(my_obj);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_prepare_snapshot
+ *
+ * DESCRIPTION: prepare hardware for snapshot
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @do_af_flag   : flag indicating if AF is needed
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_prepare_snapshot(uint32_t camera_handle,
+                                               int32_t do_af_flag)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_prepare_snapshot(my_obj, do_af_flag);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_flush
+ *
+ * DESCRIPTION: flush the current camera state and buffers
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_flush(uint32_t camera_handle)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_flush(my_obj);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_close
+ *
+ * DESCRIPTION: close a camera by its handle
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_close(uint32_t camera_handle)
+{
+    int32_t rc = -1;
+    uint8_t cam_idx = camera_handle & 0x00ff;
+    mm_camera_obj_t * my_obj = NULL;
+
+    LOGD("E: camera_handler = %d ", camera_handle);
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if (my_obj){
+        my_obj->ref_count--;
+
+        if(my_obj->ref_count > 0) {
+            /* still have reference to obj, return here */
+            LOGD("ref_count=%d\n", my_obj->ref_count);
+            pthread_mutex_unlock(&g_intf_lock);
+            rc = 0;
+        } else {
+            /* need close camera here as no other reference
+             * first empty g_cam_ctrl's referent to cam_obj */
+            g_cam_ctrl.cam_obj[cam_idx] = NULL;
+
+            pthread_mutex_lock(&my_obj->cam_lock);
+            pthread_mutex_unlock(&g_intf_lock);
+            rc = mm_camera_close(my_obj);
+            pthread_mutex_destroy(&my_obj->cam_lock);
+            free(my_obj);
+        }
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_add_channel
+ *
+ * DESCRIPTION: add a channel
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @attr         : bundle attribute of the channel if needed
+ *   @channel_cb   : callback function for bundle data notify
+ *   @userdata     : user data ptr
+ *
+ * RETURN     : uint32_t type of channel handle
+ *              0  -- invalid channel handle, meaning the op failed
+ *              >0 -- successfully added a channel with a valid handle
+ * NOTE       : if no bundle data notify is needed, meaning each stream in the
+ *              channel will have its own stream data notify callback, then
+ *              attr, channel_cb, and userdata can be NULL. In this case,
+ *              no matching logic will be performed in channel for the bundling.
+ *==========================================================================*/
+static uint32_t mm_camera_intf_add_channel(uint32_t camera_handle,
+                                           mm_camera_channel_attr_t *attr,
+                                           mm_camera_buf_notify_t channel_cb,
+                                           void *userdata)
+{
+    uint32_t ch_id = 0;
+    mm_camera_obj_t * my_obj = NULL;
+
+    LOGD("E camera_handler = %d", camera_handle);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        ch_id = mm_camera_add_channel(my_obj, attr, channel_cb, userdata);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    LOGD("X ch_id = %d", ch_id);
+    return ch_id;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_del_channel
+ *
+ * DESCRIPTION: delete a channel by its handle
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : all streams in the channel should be stopped already before
+ *              this channel can be deleted.
+ *==========================================================================*/
+static int32_t mm_camera_intf_del_channel(uint32_t camera_handle,
+                                          uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    LOGD("E ch_id = %d", ch_id);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_del_channel(my_obj, ch_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    LOGD("X");
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_get_bundle_info
+ *
+ * DESCRIPTION: query bundle info of the channel
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @bundle_info  : bundle info to be filled in
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : all streams in the channel should be stopped already before
+ *              this channel can be deleted.
+ *==========================================================================*/
+static int32_t mm_camera_intf_get_bundle_info(uint32_t camera_handle,
+                                              uint32_t ch_id,
+                                              cam_bundle_config_t *bundle_info)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    LOGD("E ch_id = %d", ch_id);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_get_bundle_info(my_obj, ch_id, bundle_info);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    LOGD("X");
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_register_event_notify
+ *
+ * DESCRIPTION: register for event notify
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @evt_cb       : callback for event notify
+ *   @user_data    : user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_register_event_notify(uint32_t camera_handle,
+                                                    mm_camera_event_notify_t evt_cb,
+                                                    void * user_data)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    LOGD("E ");
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_register_event_notify(my_obj, evt_cb, user_data);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    LOGD("E rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_qbuf
+ *
+ * DESCRIPTION: enqueue buffer back to kernel
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @buf          : buf ptr to be enqueued
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_qbuf(uint32_t camera_handle,
+                                    uint32_t ch_id,
+                                    mm_camera_buf_def_t *buf)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_qbuf(my_obj, ch_id, buf);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    LOGD("X evt_type = %d",rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_get_queued_buf_count
+ *
+ * DESCRIPTION: returns the queued buffer count
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @stream_id : stream id
+ *
+ * RETURN     : int32_t - queued buffer count
+ *
+ *==========================================================================*/
+static int32_t mm_camera_intf_get_queued_buf_count(uint32_t camera_handle,
+        uint32_t ch_id, uint32_t stream_id)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_get_queued_buf_count(my_obj, ch_id, stream_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    LOGD("X queued buffer count = %d",rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_link_stream
+ *
+ * DESCRIPTION: link a stream into a new channel
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @stream_id    : stream id
+ *   @linked_ch_id : channel in which the stream will be linked
+ *
+ * RETURN     : int32_t type of stream handle
+ *              0  -- invalid stream handle, meaning the op failed
+ *              >0 -- successfully linked a stream with a valid handle
+ *==========================================================================*/
+static int32_t mm_camera_intf_link_stream(uint32_t camera_handle,
+        uint32_t ch_id,
+        uint32_t stream_id,
+        uint32_t linked_ch_id)
+{
+    uint32_t id = 0;
+    mm_camera_obj_t * my_obj = NULL;
+
+    LOGD("E handle = %u ch_id = %u",
+          camera_handle, ch_id);
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        id = mm_camera_link_stream(my_obj, ch_id, stream_id, linked_ch_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+
+    LOGD("X stream_id = %u", stream_id);
+    return (int32_t)id;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_add_stream
+ *
+ * DESCRIPTION: add a stream into a channel
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : uint32_t type of stream handle
+ *              0  -- invalid stream handle, meaning the op failed
+ *              >0 -- successfully added a stream with a valid handle
+ *==========================================================================*/
+static uint32_t mm_camera_intf_add_stream(uint32_t camera_handle,
+                                          uint32_t ch_id)
+{
+    uint32_t stream_id = 0;
+    mm_camera_obj_t * my_obj = NULL;
+
+    LOGD("E handle = %d ch_id = %d",
+          camera_handle, ch_id);
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        stream_id = mm_camera_add_stream(my_obj, ch_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    LOGD("X stream_id = %d", stream_id);
+    return stream_id;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_del_stream
+ *
+ * DESCRIPTION: delete a stream by its handle
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @stream_id    : stream handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : stream should be stopped already before it can be deleted.
+ *==========================================================================*/
+static int32_t mm_camera_intf_del_stream(uint32_t camera_handle,
+                                         uint32_t ch_id,
+                                         uint32_t stream_id)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    LOGD("E handle = %d ch_id = %d stream_id = %d",
+          camera_handle, ch_id, stream_id);
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_del_stream(my_obj, ch_id, stream_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    LOGD("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_config_stream
+ *
+ * DESCRIPTION: configure a stream
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @stream_id    : stream handle
+ *   @config       : stream configuration
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_config_stream(uint32_t camera_handle,
+                                            uint32_t ch_id,
+                                            uint32_t stream_id,
+                                            mm_camera_stream_config_t *config)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    LOGD("E handle = %d, ch_id = %d,stream_id = %d",
+          camera_handle, ch_id, stream_id);
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    LOGD("mm_camera_intf_config_stream stream_id = %d",stream_id);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_config_stream(my_obj, ch_id, stream_id, config);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    LOGD("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_start_channel
+ *
+ * DESCRIPTION: start a channel, which will start all streams in the channel
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_start_channel(uint32_t camera_handle,
+                                            uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_start_channel(my_obj, ch_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    LOGD("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_stop_channel
+ *
+ * DESCRIPTION: stop a channel, which will stop all streams in the channel
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_stop_channel(uint32_t camera_handle,
+                                           uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_stop_channel(my_obj, ch_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    LOGD("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_request_super_buf
+ *
+ * DESCRIPTION: for burst mode in bundle, reuqest certain amount of matched
+ *              frames from superbuf queue
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id             : channel handle
+ *   @buf                : request buffer info
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_request_super_buf(uint32_t camera_handle,
+        uint32_t ch_id, mm_camera_req_buf_t *buf)
+{
+    int32_t rc = -1;
+    LOGD("E camera_handler = %d,ch_id = %d",
+          camera_handle, ch_id);
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj && buf) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_request_super_buf (my_obj, ch_id, buf);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    LOGD("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_cancel_super_buf_request
+ *
+ * DESCRIPTION: for burst mode in bundle, cancel the reuqest for certain amount
+ *              of matched frames from superbuf queue
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_cancel_super_buf_request(uint32_t camera_handle,
+                                                       uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    LOGD("E camera_handler = %d,ch_id = %d",
+          camera_handle, ch_id);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_cancel_super_buf_request(my_obj, ch_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    LOGD("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_flush_super_buf_queue
+ *
+ * DESCRIPTION: flush out all frames in the superbuf queue
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @frame_idx    : frame index
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_flush_super_buf_queue(uint32_t camera_handle,
+                                                    uint32_t ch_id, uint32_t frame_idx)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    LOGD("E camera_handler = %d,ch_id = %d",
+          camera_handle, ch_id);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_flush_super_buf_queue(my_obj, ch_id, frame_idx);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    LOGD("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_start_zsl_snapshot
+ *
+ * DESCRIPTION: Starts zsl snapshot
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_start_zsl_snapshot(uint32_t camera_handle,
+        uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    LOGD("E camera_handler = %d,ch_id = %d",
+          camera_handle, ch_id);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_start_zsl_snapshot_ch(my_obj, ch_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    LOGD("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_stop_zsl_snapshot
+ *
+ * DESCRIPTION: Stops zsl snapshot
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_stop_zsl_snapshot(uint32_t camera_handle,
+        uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    LOGD("E camera_handler = %d,ch_id = %d",
+          camera_handle, ch_id);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_stop_zsl_snapshot_ch(my_obj, ch_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    LOGD("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_configure_notify_mode
+ *
+ * DESCRIPTION: Configures channel notification mode
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @notify_mode  : notification mode
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_configure_notify_mode(uint32_t camera_handle,
+                                                    uint32_t ch_id,
+                                                    mm_camera_super_buf_notify_mode_t notify_mode)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    LOGD("E camera_handler = %d,ch_id = %d",
+          camera_handle, ch_id);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_config_channel_notify(my_obj, ch_id, notify_mode);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    LOGD("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_map_buf
+ *
+ * DESCRIPTION: mapping camera buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @buf_type     : type of buffer to be mapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_CAPABILITY
+ *                   CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+ *   @fd           : file descriptor of the buffer
+ *   @size         : size of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_map_buf(uint32_t camera_handle,
+    uint8_t buf_type, int fd, size_t size, void *buffer)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_map_buf(my_obj, buf_type, fd, size, buffer);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_map_bufs
+ *
+ * DESCRIPTION: mapping camera buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @buf_type     : type of buffer to be mapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_CAPABILITY
+ *                   CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_map_bufs(uint32_t camera_handle,
+        const cam_buf_map_type_list *buf_map_list)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_map_bufs(my_obj, buf_map_list);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_unmap_buf
+ *
+ * DESCRIPTION: unmapping camera buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @buf_type     : type of buffer to be unmapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_CAPABILITY
+ *                   CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_unmap_buf(uint32_t camera_handle,
+                                        uint8_t buf_type)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_unmap_buf(my_obj, buf_type);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_set_stream_parms
+ *
+ * DESCRIPTION: set parameters per stream
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @parms        : ptr to a param struct to be set to server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of parameters to be set
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+static int32_t mm_camera_intf_set_stream_parms(uint32_t camera_handle,
+                                               uint32_t ch_id,
+                                               uint32_t s_id,
+                                               cam_stream_parm_buffer_t *parms)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    LOGD("E camera_handle = %d,ch_id = %d,s_id = %d",
+          camera_handle, ch_id, s_id);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_set_stream_parms(my_obj, ch_id, s_id, parms);
+    }else{
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    LOGD("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_get_stream_parms
+ *
+ * DESCRIPTION: get parameters per stream
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @parms        : ptr to a param struct to be get from server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Parameters to be get from server are already
+ *              filled in by upper layer caller. After this call, corresponding
+ *              fields of requested parameters will be filled in by server with
+ *              detailed information.
+ *==========================================================================*/
+static int32_t mm_camera_intf_get_stream_parms(uint32_t camera_handle,
+                                               uint32_t ch_id,
+                                               uint32_t s_id,
+                                               cam_stream_parm_buffer_t *parms)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    LOGD("E camera_handle = %d,ch_id = %d,s_id = %d",
+          camera_handle, ch_id, s_id);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_get_stream_parms(my_obj, ch_id, s_id, parms);
+    }else{
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+
+    LOGD("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_map_stream_buf
+ *
+ * DESCRIPTION: mapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @buf_type     : type of buffer to be mapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @buf_idx      : index of buffer within the stream buffers, only valid if
+ *                   buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @plane_idx    : plane index. If all planes share the same fd,
+ *                   plane_idx = -1; otherwise, plean_idx is the
+ *                   index to plane (0..num_of_planes)
+ *   @fd           : file descriptor of the buffer
+ *   @size         : size of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_map_stream_buf(uint32_t camera_handle,
+        uint32_t ch_id, uint32_t stream_id, uint8_t buf_type,
+        uint32_t buf_idx, int32_t plane_idx, int fd,
+        size_t size, void *buffer)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    LOGD("E camera_handle = %d, ch_id = %d, s_id = %d, buf_idx = %d, plane_idx = %d",
+          camera_handle, ch_id, stream_id, buf_idx, plane_idx);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_map_stream_buf(my_obj, ch_id, stream_id,
+                                      buf_type, buf_idx, plane_idx,
+                                      fd, size, buffer);
+    }else{
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+
+    LOGD("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_map_stream_bufs
+ *
+ * DESCRIPTION: mapping stream buffers via domain socket to server
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @buf_map_list : list of buffers to be mapped
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_map_stream_bufs(uint32_t camera_handle,
+                                              uint32_t ch_id,
+                                              const cam_buf_map_type_list *buf_map_list)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    LOGD("E camera_handle = %d, ch_id = %d",
+          camera_handle, ch_id);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_map_stream_bufs(my_obj, ch_id, buf_map_list);
+    }else{
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+
+    LOGD("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_unmap_stream_buf
+ *
+ * DESCRIPTION: unmapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @buf_type     : type of buffer to be unmapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @buf_idx      : index of buffer within the stream buffers, only valid if
+ *                   buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @plane_idx    : plane index. If all planes share the same fd,
+ *                   plane_idx = -1; otherwise, plean_idx is the
+ *                   index to plane (0..num_of_planes)
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_unmap_stream_buf(uint32_t camera_handle,
+                                               uint32_t ch_id,
+                                               uint32_t stream_id,
+                                               uint8_t buf_type,
+                                               uint32_t buf_idx,
+                                               int32_t plane_idx)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    LOGD("E camera_handle = %d, ch_id = %d, s_id = %d, buf_idx = %d, plane_idx = %d",
+          camera_handle, ch_id, stream_id, buf_idx, plane_idx);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_unmap_stream_buf(my_obj, ch_id, stream_id,
+                                        buf_type, buf_idx, plane_idx);
+    }else{
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+
+    LOGD("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_get_session_id
+ *
+ * DESCRIPTION: retrieve the session ID from the kernel for this HWI instance
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @sessionid: session id to be retrieved from server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : if this call succeeds, we will get a valid session id.
+ *==========================================================================*/
+static int32_t mm_camera_intf_get_session_id(uint32_t camera_handle,
+                                                       uint32_t* sessionid)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        *sessionid = my_obj->sessionid;
+        pthread_mutex_unlock(&my_obj->cam_lock);
+        rc = 0;
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_sync_related_sensors
+ *
+ * DESCRIPTION: retrieve the session ID from the kernel for this HWI instance
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @related_cam_info: pointer to the related cam info to be sent to the server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : if this call succeeds, we will get linking established in back end
+ *==========================================================================*/
+static int32_t mm_camera_intf_sync_related_sensors(uint32_t camera_handle,
+                              cam_sync_related_sensors_event_info_t* related_cam_info)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_sync_related_sensors(my_obj, related_cam_info);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : get_sensor_info
+ *
+ * DESCRIPTION: get sensor info like facing(back/front) and mount angle
+ *
+ * PARAMETERS :
+ *
+ * RETURN     :
+ *==========================================================================*/
+void get_sensor_info()
+{
+    int rc = 0;
+    int dev_fd = -1;
+    struct media_device_info mdev_info;
+    int num_media_devices = 0;
+    size_t num_cameras = 0;
+
+    LOGD("E");
+    while (1) {
+        char dev_name[32];
+        snprintf(dev_name, sizeof(dev_name), "/dev/media%d", num_media_devices);
+        dev_fd = open(dev_name, O_RDWR | O_NONBLOCK);
+        if (dev_fd < 0) {
+            LOGD("Done discovering media devices\n");
+            break;
+        }
+        num_media_devices++;
+        memset(&mdev_info, 0, sizeof(mdev_info));
+        rc = ioctl(dev_fd, MEDIA_IOC_DEVICE_INFO, &mdev_info);
+        if (rc < 0) {
+            LOGE("Error: ioctl media_dev failed: %s\n", strerror(errno));
+            close(dev_fd);
+            dev_fd = -1;
+            num_cameras = 0;
+            break;
+        }
+
+        if(strncmp(mdev_info.model,  MSM_CONFIGURATION_NAME, sizeof(mdev_info.model)) != 0) {
+            close(dev_fd);
+            dev_fd = -1;
+            continue;
+        }
+
+        unsigned int num_entities = 1;
+        while (1) {
+            struct media_entity_desc entity;
+            uint32_t temp;
+            uint32_t mount_angle;
+            uint32_t facing;
+            int32_t type = 0;
+            uint8_t is_yuv;
+
+            memset(&entity, 0, sizeof(entity));
+            entity.id = num_entities++;
+            rc = ioctl(dev_fd, MEDIA_IOC_ENUM_ENTITIES, &entity);
+            if (rc < 0) {
+                LOGD("Done enumerating media entities\n");
+                rc = 0;
+                break;
+            }
+            if(entity.type == MEDIA_ENT_T_V4L2_SUBDEV &&
+                entity.group_id == MSM_CAMERA_SUBDEV_SENSOR) {
+                temp = entity.flags >> 8;
+                mount_angle = (temp & 0xFF) * 90;
+                facing = ((entity.flags & CAM_SENSOR_FACING_MASK) ?
+                        CAMERA_FACING_FRONT:CAMERA_FACING_BACK);
+                /* TODO: Need to revisit this logic if front AUX is available. */
+                if ((unsigned int)facing == CAMERA_FACING_FRONT) {
+                    type = CAM_TYPE_STANDALONE;
+                } else if (entity.flags & CAM_SENSOR_TYPE_MASK) {
+                    type = CAM_TYPE_AUX;
+                } else {
+                    type = CAM_TYPE_MAIN;
+                }
+                is_yuv = ((entity.flags & CAM_SENSOR_FORMAT_MASK) ?
+                        CAM_SENSOR_YUV:CAM_SENSOR_RAW);
+                LOGL("index = %u flag = %x mount_angle = %u "
+                        "facing = %u type: %u is_yuv = %u\n",
+                        (unsigned int)num_cameras, (unsigned int)temp,
+                        (unsigned int)mount_angle, (unsigned int)facing,
+                        (unsigned int)type, (uint8_t)is_yuv);
+                g_cam_ctrl.info[num_cameras].facing = (int)facing;
+                g_cam_ctrl.info[num_cameras].orientation = (int)mount_angle;
+                g_cam_ctrl.cam_type[num_cameras] = type;
+                g_cam_ctrl.is_yuv[num_cameras] = is_yuv;
+                LOGD("dev_info[id=%zu,name='%s']\n",
+                         num_cameras, g_cam_ctrl.video_dev_name[num_cameras]);
+                num_cameras++;
+                continue;
+            }
+        }
+        close(dev_fd);
+        dev_fd = -1;
+    }
+
+    LOGD("num_cameras=%d\n", g_cam_ctrl.num_cam);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : sort_camera_info
+ *
+ * DESCRIPTION: sort camera info to keep back cameras idx is smaller than front cameras idx
+ *
+ * PARAMETERS : number of cameras
+ *
+ * RETURN     :
+ *==========================================================================*/
+void sort_camera_info(int num_cam)
+{
+    int idx = 0, i;
+    int8_t is_yuv_aux_cam_exposed = 0;
+    char prop[PROPERTY_VALUE_MAX];
+    struct camera_info temp_info[MM_CAMERA_MAX_NUM_SENSORS];
+    cam_sync_type_t temp_type[MM_CAMERA_MAX_NUM_SENSORS];
+    cam_sync_mode_t temp_mode[MM_CAMERA_MAX_NUM_SENSORS];
+    uint8_t temp_is_yuv[MM_CAMERA_MAX_NUM_SENSORS];
+    char temp_dev_name[MM_CAMERA_MAX_NUM_SENSORS][MM_CAMERA_DEV_NAME_LEN];
+
+    memset(temp_info, 0, sizeof(temp_info));
+    memset(temp_dev_name, 0, sizeof(temp_dev_name));
+    memset(temp_type, 0, sizeof(temp_type));
+    memset(temp_mode, 0, sizeof(temp_mode));
+    memset(temp_is_yuv, 0, sizeof(temp_is_yuv));
+
+    // Signifies whether YUV AUX camera has to be exposed as physical camera
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.aux.yuv", prop, "0");
+    is_yuv_aux_cam_exposed = atoi(prop);
+    LOGI("YUV Aux camera exposed %d",is_yuv_aux_cam_exposed);
+
+    /* Order of the camera exposed is
+    Back main, Front main, Back Aux and then Front Aux.
+    It is because that lot of 3rd party cameras apps
+    blindly assume 0th is Back and 1st is front */
+
+    /* Firstly save the main back cameras info */
+    for (i = 0; i < num_cam; i++) {
+        if ((g_cam_ctrl.info[i].facing == CAMERA_FACING_BACK) &&
+            (g_cam_ctrl.cam_type[i] != CAM_TYPE_AUX)) {
+            temp_info[idx] = g_cam_ctrl.info[i];
+            temp_type[idx] = g_cam_ctrl.cam_type[i];
+            temp_mode[idx] = g_cam_ctrl.cam_mode[i];
+            temp_is_yuv[idx] = g_cam_ctrl.is_yuv[i];
+            LOGD("Found Back Main Camera: i: %d idx: %d", i, idx);
+            memcpy(temp_dev_name[idx++],g_cam_ctrl.video_dev_name[i],
+                MM_CAMERA_DEV_NAME_LEN);
+        }
+    }
+
+    /* Save the main front cameras info */
+    for (i = 0; i < num_cam; i++) {
+        if ((g_cam_ctrl.info[i].facing == CAMERA_FACING_FRONT) &&
+            (g_cam_ctrl.cam_type[i] != CAM_TYPE_AUX)) {
+            temp_info[idx] = g_cam_ctrl.info[i];
+            temp_type[idx] = g_cam_ctrl.cam_type[i];
+            temp_mode[idx] = g_cam_ctrl.cam_mode[i];
+            temp_is_yuv[idx] = g_cam_ctrl.is_yuv[i];
+            LOGD("Found Front Main Camera: i: %d idx: %d", i, idx);
+            memcpy(temp_dev_name[idx++],g_cam_ctrl.video_dev_name[i],
+                    MM_CAMERA_DEV_NAME_LEN);
+        }
+    }
+
+    /* Expose YUV AUX camera if persist.camera.aux.yuv is set to 1.
+    Otherwsie expose AUX camera if it is not YUV. */
+    for (i = 0; i < num_cam; i++) {
+        if ((g_cam_ctrl.info[i].facing == CAMERA_FACING_BACK) &&
+                (g_cam_ctrl.cam_type[i] == CAM_TYPE_AUX) &&
+                (is_yuv_aux_cam_exposed || !(g_cam_ctrl.is_yuv[i]))) {
+            temp_info[idx] = g_cam_ctrl.info[i];
+            temp_type[idx] = g_cam_ctrl.cam_type[i];
+            temp_mode[idx] = g_cam_ctrl.cam_mode[i];
+            temp_is_yuv[idx] = g_cam_ctrl.is_yuv[i];
+            LOGD("Found back Aux Camera: i: %d idx: %d", i, idx);
+            memcpy(temp_dev_name[idx++],g_cam_ctrl.video_dev_name[i],
+                MM_CAMERA_DEV_NAME_LEN);
+        }
+    }
+
+    /* Expose YUV AUX camera if persist.camera.aux.yuv is set to 1.
+    Otherwsie expose AUX camera if it is not YUV. */
+    for (i = 0; i < num_cam; i++) {
+        if ((g_cam_ctrl.info[i].facing == CAMERA_FACING_FRONT) &&
+                (g_cam_ctrl.cam_type[i] == CAM_TYPE_AUX) &&
+                (is_yuv_aux_cam_exposed || !(g_cam_ctrl.is_yuv[i]))) {
+            temp_info[idx] = g_cam_ctrl.info[i];
+            temp_type[idx] = g_cam_ctrl.cam_type[i];
+            temp_mode[idx] = g_cam_ctrl.cam_mode[i];
+            temp_is_yuv[idx] = g_cam_ctrl.is_yuv[i];
+            LOGD("Found Front Aux Camera: i: %d idx: %d", i, idx);
+            memcpy(temp_dev_name[idx++],g_cam_ctrl.video_dev_name[i],
+                MM_CAMERA_DEV_NAME_LEN);
+        }
+    }
+
+    if (idx <= num_cam) {
+        memcpy(g_cam_ctrl.info, temp_info, sizeof(temp_info));
+        memcpy(g_cam_ctrl.cam_type, temp_type, sizeof(temp_type));
+        memcpy(g_cam_ctrl.cam_mode, temp_mode, sizeof(temp_mode));
+        memcpy(g_cam_ctrl.is_yuv, temp_is_yuv, sizeof(temp_is_yuv));
+        memcpy(g_cam_ctrl.video_dev_name, temp_dev_name, sizeof(temp_dev_name));
+        //Set num cam based on the cameras exposed finally via dual/aux properties.
+        g_cam_ctrl.num_cam = idx;
+        for (i = 0; i < idx; i++) {
+            LOGI("Camera id: %d facing: %d, type: %d is_yuv: %d",
+                i, g_cam_ctrl.info[i].facing, g_cam_ctrl.cam_type[i], g_cam_ctrl.is_yuv[i]);
+        }
+    }
+    LOGI("Number of cameras %d sorted %d", num_cam, idx);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : get_num_of_cameras
+ *
+ * DESCRIPTION: get number of cameras
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : number of cameras supported
+ *==========================================================================*/
+uint8_t get_num_of_cameras()
+{
+    int rc = 0;
+    int dev_fd = -1;
+    struct media_device_info mdev_info;
+    int num_media_devices = 0;
+    int8_t num_cameras = 0;
+    char subdev_name[32];
+    int32_t sd_fd = -1;
+    struct sensor_init_cfg_data cfg;
+    char prop[PROPERTY_VALUE_MAX];
+
+    LOGD("E");
+
+    property_get("vold.decrypt", prop, "0");
+    int decrypt = atoi(prop);
+    if (decrypt == 1)
+     return 0;
+    pthread_mutex_lock(&g_intf_lock);
+
+    memset (&g_cam_ctrl, 0, sizeof (g_cam_ctrl));
+#ifndef DAEMON_PRESENT
+    if (mm_camera_load_shim_lib() < 0) {
+        LOGE ("Failed to module shim library");
+        return 0;
+    }
+#endif /* DAEMON_PRESENT */
+
+    while (1) {
+        uint32_t num_entities = 1U;
+        char dev_name[32];
+
+        snprintf(dev_name, sizeof(dev_name), "/dev/media%d", num_media_devices);
+        dev_fd = open(dev_name, O_RDWR | O_NONBLOCK);
+        if (dev_fd < 0) {
+            LOGD("Done discovering media devices\n");
+            break;
+        }
+        num_media_devices++;
+        rc = ioctl(dev_fd, MEDIA_IOC_DEVICE_INFO, &mdev_info);
+        if (rc < 0) {
+            LOGE("Error: ioctl media_dev failed: %s\n", strerror(errno));
+            close(dev_fd);
+            dev_fd = -1;
+            break;
+        }
+
+        if (strncmp(mdev_info.model, MSM_CONFIGURATION_NAME,
+          sizeof(mdev_info.model)) != 0) {
+            close(dev_fd);
+            dev_fd = -1;
+            continue;
+        }
+
+        while (1) {
+            struct media_entity_desc entity;
+            memset(&entity, 0, sizeof(entity));
+            entity.id = num_entities++;
+            LOGD("entity id %d", entity.id);
+            rc = ioctl(dev_fd, MEDIA_IOC_ENUM_ENTITIES, &entity);
+            if (rc < 0) {
+                LOGD("Done enumerating media entities");
+                rc = 0;
+                break;
+            }
+            LOGD("entity name %s type %d group id %d",
+                entity.name, entity.type, entity.group_id);
+            if (entity.type == MEDIA_ENT_T_V4L2_SUBDEV &&
+                entity.group_id == MSM_CAMERA_SUBDEV_SENSOR_INIT) {
+                snprintf(subdev_name, sizeof(dev_name), "/dev/%s", entity.name);
+                break;
+            }
+        }
+        close(dev_fd);
+        dev_fd = -1;
+    }
+
+    /* Open sensor_init subdev */
+    sd_fd = open(subdev_name, O_RDWR);
+    if (sd_fd < 0) {
+        LOGE("Open sensor_init subdev failed");
+        return FALSE;
+    }
+
+    cfg.cfgtype = CFG_SINIT_PROBE_WAIT_DONE;
+    cfg.cfg.setting = NULL;
+    if (ioctl(sd_fd, VIDIOC_MSM_SENSOR_INIT_CFG, &cfg) < 0) {
+        LOGE("failed");
+    }
+    close(sd_fd);
+    dev_fd = -1;
+
+
+    num_media_devices = 0;
+    while (1) {
+        uint32_t num_entities = 1U;
+        char dev_name[32];
+
+        snprintf(dev_name, sizeof(dev_name), "/dev/media%d", num_media_devices);
+        dev_fd = open(dev_name, O_RDWR | O_NONBLOCK);
+        if (dev_fd < 0) {
+            LOGD("Done discovering media devices: %s\n", strerror(errno));
+            break;
+        }
+        num_media_devices++;
+        memset(&mdev_info, 0, sizeof(mdev_info));
+        rc = ioctl(dev_fd, MEDIA_IOC_DEVICE_INFO, &mdev_info);
+        if (rc < 0) {
+            LOGE("Error: ioctl media_dev failed: %s\n", strerror(errno));
+            close(dev_fd);
+            dev_fd = -1;
+            num_cameras = 0;
+            break;
+        }
+
+        if(strncmp(mdev_info.model, MSM_CAMERA_NAME, sizeof(mdev_info.model)) != 0) {
+            close(dev_fd);
+            dev_fd = -1;
+            continue;
+        }
+
+        while (1) {
+            struct media_entity_desc entity;
+            memset(&entity, 0, sizeof(entity));
+            entity.id = num_entities++;
+            rc = ioctl(dev_fd, MEDIA_IOC_ENUM_ENTITIES, &entity);
+            if (rc < 0) {
+                LOGD("Done enumerating media entities\n");
+                rc = 0;
+                break;
+            }
+            if(entity.type == MEDIA_ENT_T_DEVNODE_V4L && entity.group_id == QCAMERA_VNODE_GROUP_ID) {
+                strlcpy(g_cam_ctrl.video_dev_name[num_cameras],
+                     entity.name, sizeof(entity.name));
+                LOGI("dev_info[id=%d,name='%s']\n",
+                    (int)num_cameras, g_cam_ctrl.video_dev_name[num_cameras]);
+                num_cameras++;
+                break;
+            }
+        }
+        close(dev_fd);
+        dev_fd = -1;
+        if (num_cameras >= MM_CAMERA_MAX_NUM_SENSORS) {
+            LOGW("Maximum number of camera reached %d", num_cameras);
+            break;
+        }
+    }
+    g_cam_ctrl.num_cam = num_cameras;
+
+    get_sensor_info();
+    sort_camera_info(g_cam_ctrl.num_cam);
+    /* unlock the mutex */
+    pthread_mutex_unlock(&g_intf_lock);
+    LOGI("num_cameras=%d\n", (int)g_cam_ctrl.num_cam);
+    return(uint8_t)g_cam_ctrl.num_cam;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_process_advanced_capture
+ *
+ * DESCRIPTION: Configures channel advanced capture mode
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @type : advanced capture type
+ *   @ch_id        : channel handle
+ *   @trigger  : 1 for start and 0 for cancel/stop
+ *   @value  : input capture configaration
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_process_advanced_capture(uint32_t camera_handle,
+        uint32_t ch_id, mm_camera_advanced_capture_t type,
+        int8_t trigger, void *in_value)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    LOGD("E camera_handler = %d,ch_id = %d",
+          camera_handle, ch_id);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_channel_advanced_capture(my_obj, ch_id, type,
+                (uint32_t)trigger, in_value);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    LOGD("X ");
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_register_stream_buf_cb
+ *
+ * DESCRIPTION: Register special callback for stream buffer
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @stream_id    : stream handle
+ *   @buf_cb       : callback function
+ *   @buf_type     :SYNC/ASYNC
+ *   @userdata     : userdata pointer
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_register_stream_buf_cb(uint32_t camera_handle,
+        uint32_t ch_id, uint32_t stream_id, mm_camera_buf_notify_t buf_cb,
+        mm_camera_stream_cb_type cb_type, void *userdata)
+{
+    int32_t rc = 0;
+    mm_camera_obj_t * my_obj = NULL;
+
+    LOGD("E handle = %u ch_id = %u",
+          camera_handle, ch_id);
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_reg_stream_buf_cb(my_obj, ch_id, stream_id,
+                buf_cb, cb_type, userdata);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return (int32_t)rc;
+}
+
+struct camera_info *get_cam_info(uint32_t camera_id, cam_sync_type_t *pCamType)
+{
+    *pCamType = g_cam_ctrl.cam_type[camera_id];
+    return &g_cam_ctrl.info[camera_id];
+}
+
+uint8_t is_yuv_sensor(uint32_t camera_id)
+{
+    return g_cam_ctrl.is_yuv[camera_id];
+}
+
+/* camera ops v-table */
+static mm_camera_ops_t mm_camera_ops = {
+    .query_capability = mm_camera_intf_query_capability,
+    .register_event_notify = mm_camera_intf_register_event_notify,
+    .close_camera = mm_camera_intf_close,
+    .set_parms = mm_camera_intf_set_parms,
+    .get_parms = mm_camera_intf_get_parms,
+    .do_auto_focus = mm_camera_intf_do_auto_focus,
+    .cancel_auto_focus = mm_camera_intf_cancel_auto_focus,
+    .prepare_snapshot = mm_camera_intf_prepare_snapshot,
+    .start_zsl_snapshot = mm_camera_intf_start_zsl_snapshot,
+    .stop_zsl_snapshot = mm_camera_intf_stop_zsl_snapshot,
+    .map_buf = mm_camera_intf_map_buf,
+    .map_bufs = mm_camera_intf_map_bufs,
+    .unmap_buf = mm_camera_intf_unmap_buf,
+    .add_channel = mm_camera_intf_add_channel,
+    .delete_channel = mm_camera_intf_del_channel,
+    .get_bundle_info = mm_camera_intf_get_bundle_info,
+    .add_stream = mm_camera_intf_add_stream,
+    .link_stream = mm_camera_intf_link_stream,
+    .delete_stream = mm_camera_intf_del_stream,
+    .config_stream = mm_camera_intf_config_stream,
+    .qbuf = mm_camera_intf_qbuf,
+    .get_queued_buf_count = mm_camera_intf_get_queued_buf_count,
+    .map_stream_buf = mm_camera_intf_map_stream_buf,
+    .map_stream_bufs = mm_camera_intf_map_stream_bufs,
+    .unmap_stream_buf = mm_camera_intf_unmap_stream_buf,
+    .set_stream_parms = mm_camera_intf_set_stream_parms,
+    .get_stream_parms = mm_camera_intf_get_stream_parms,
+    .start_channel = mm_camera_intf_start_channel,
+    .stop_channel = mm_camera_intf_stop_channel,
+    .request_super_buf = mm_camera_intf_request_super_buf,
+    .cancel_super_buf_request = mm_camera_intf_cancel_super_buf_request,
+    .flush_super_buf_queue = mm_camera_intf_flush_super_buf_queue,
+    .configure_notify_mode = mm_camera_intf_configure_notify_mode,
+    .process_advanced_capture = mm_camera_intf_process_advanced_capture,
+    .get_session_id = mm_camera_intf_get_session_id,
+    .sync_related_sensors = mm_camera_intf_sync_related_sensors,
+    .flush = mm_camera_intf_flush,
+    .register_stream_buf_cb = mm_camera_intf_register_stream_buf_cb
+};
+
+/*===========================================================================
+ * FUNCTION   : camera_open
+ *
+ * DESCRIPTION: open a camera by camera index
+ *
+ * PARAMETERS :
+ *   @camera_idx  : camera index. should within range of 0 to num_of_cameras
+ *   @camera_vtbl : ptr to a virtual table containing camera handle and operation table.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              non-zero error code -- failure
+ *==========================================================================*/
+int32_t camera_open(uint8_t camera_idx, mm_camera_vtbl_t **camera_vtbl)
+{
+    int32_t rc = 0;
+    mm_camera_obj_t *cam_obj = NULL;
+
+#ifdef QCAMERA_REDEFINE_LOG
+    mm_camera_set_dbg_log_properties();
+#endif
+
+    LOGD("E camera_idx = %d\n", camera_idx);
+    if (camera_idx >= g_cam_ctrl.num_cam) {
+        LOGE("Invalid camera_idx (%d)", camera_idx);
+        return -EINVAL;
+    }
+
+    pthread_mutex_lock(&g_intf_lock);
+    /* opened already */
+    if(NULL != g_cam_ctrl.cam_obj[camera_idx]) {
+        /* Add reference */
+        g_cam_ctrl.cam_obj[camera_idx]->ref_count++;
+        pthread_mutex_unlock(&g_intf_lock);
+        LOGD("opened alreadyn");
+        *camera_vtbl = &g_cam_ctrl.cam_obj[camera_idx]->vtbl;
+        return rc;
+    }
+
+    cam_obj = (mm_camera_obj_t *)malloc(sizeof(mm_camera_obj_t));
+    if(NULL == cam_obj) {
+        pthread_mutex_unlock(&g_intf_lock);
+        LOGE("no mem");
+        return -EINVAL;
+    }
+
+    /* initialize camera obj */
+    memset(cam_obj, 0, sizeof(mm_camera_obj_t));
+    cam_obj->ctrl_fd = -1;
+    cam_obj->ds_fd = -1;
+    cam_obj->ref_count++;
+    cam_obj->my_hdl = mm_camera_util_generate_handler(camera_idx);
+    cam_obj->vtbl.camera_handle = cam_obj->my_hdl; /* set handler */
+    cam_obj->vtbl.ops = &mm_camera_ops;
+    pthread_mutex_init(&cam_obj->cam_lock, NULL);
+    /* unlock global interface lock, if not, in dual camera use case,
+      * current open will block operation of another opened camera obj*/
+    pthread_mutex_lock(&cam_obj->cam_lock);
+    pthread_mutex_unlock(&g_intf_lock);
+
+    rc = mm_camera_open(cam_obj);
+
+    pthread_mutex_lock(&g_intf_lock);
+    if (rc != 0) {
+        LOGE("mm_camera_open err = %d", rc);
+        pthread_mutex_destroy(&cam_obj->cam_lock);
+        g_cam_ctrl.cam_obj[camera_idx] = NULL;
+        free(cam_obj);
+        cam_obj = NULL;
+        pthread_mutex_unlock(&g_intf_lock);
+        *camera_vtbl = NULL;
+        return rc;
+    } else {
+        LOGD("Open succeded\n");
+        g_cam_ctrl.cam_obj[camera_idx] = cam_obj;
+        pthread_mutex_unlock(&g_intf_lock);
+        *camera_vtbl = &cam_obj->vtbl;
+        return 0;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_load_shim_lib
+ *
+ * DESCRIPTION: Load shim layer library
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : status of load shim library
+ *==========================================================================*/
+int32_t mm_camera_load_shim_lib()
+{
+    const char* error = NULL;
+    void *qdaemon_lib = NULL;
+
+    LOGD("E");
+    qdaemon_lib = dlopen(SHIMLAYER_LIB, RTLD_NOW);
+    if (!qdaemon_lib) {
+        error = dlerror();
+        LOGE("dlopen failed with error %s", error ? error : "");
+        return -1;
+    }
+
+    *(void **)&mm_camera_shim_module_init =
+            dlsym(qdaemon_lib, "mct_shimlayer_process_module_init");
+    if (!mm_camera_shim_module_init) {
+        error = dlerror();
+        LOGE("dlsym failed with error code %s", error ? error: "");
+        dlclose(qdaemon_lib);
+        return -1;
+    }
+
+    return mm_camera_shim_module_init(&g_cam_ctrl.cam_shim_ops);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_module_open_session
+ *
+ * DESCRIPTION: wrapper function to call shim layer API to open session.
+ *
+ * PARAMETERS :
+ *   @sessionid  : sessionID to open session
+ *   @evt_cb     : Event callback function
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              non-zero error code -- failure
+ *==========================================================================*/
+cam_status_t mm_camera_module_open_session(int sessionid,
+        mm_camera_shim_event_handler_func evt_cb)
+{
+    cam_status_t rc = -1;
+    if(g_cam_ctrl.cam_shim_ops.mm_camera_shim_open_session) {
+        rc = g_cam_ctrl.cam_shim_ops.mm_camera_shim_open_session(
+                sessionid, evt_cb);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_module_close_session
+ *
+ * DESCRIPTION: wrapper function to call shim layer API to close session
+ *
+ * PARAMETERS :
+ *   @sessionid  : sessionID to open session
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              non-zero error code -- failure
+ *==========================================================================*/
+int32_t mm_camera_module_close_session(int session)
+{
+    int32_t rc = -1;
+    if(g_cam_ctrl.cam_shim_ops.mm_camera_shim_close_session) {
+        rc = g_cam_ctrl.cam_shim_ops.mm_camera_shim_close_session(session);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_module_open_session
+ *
+ * DESCRIPTION: wrapper function to call shim layer API
+ *
+ * PARAMETERS :
+ *   @sessionid  : sessionID to open session
+ *   @evt_cb     : Event callback function
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              non-zero error code -- failure
+ *==========================================================================*/
+int32_t mm_camera_module_send_cmd(cam_shim_packet_t *event)
+{
+    int32_t rc = -1;
+    if(g_cam_ctrl.cam_shim_ops.mm_camera_shim_send_cmd) {
+        rc = g_cam_ctrl.cam_shim_ops.mm_camera_shim_send_cmd(event);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_module_event_handler
+ *
+ * DESCRIPTION: call back function for shim layer
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : status of call back function
+ *==========================================================================*/
+int mm_camera_module_event_handler(uint32_t session_id, cam_event_t *event)
+{
+    if (!event) {
+        LOGE("null event");
+        return FALSE;
+    }
+    mm_camera_event_t evt;
+
+    LOGD("session_id:%d, cmd:0x%x", session_id, event->server_event_type);
+    memset(&evt, 0, sizeof(mm_camera_event_t));
+
+    evt = *event;
+    mm_camera_obj_t *my_obj =
+         mm_camera_util_get_camera_by_session_id(session_id);
+    if (!my_obj) {
+        LOGE("my_obj:%p", my_obj);
+        return FALSE;
+    }
+    switch( evt.server_event_type) {
+       case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
+       case CAM_EVENT_TYPE_CAC_DONE:
+       case CAM_EVENT_TYPE_DAEMON_DIED:
+       case CAM_EVENT_TYPE_INT_TAKE_JPEG:
+       case CAM_EVENT_TYPE_INT_TAKE_RAW:
+           mm_camera_enqueue_evt(my_obj, &evt);
+           break;
+       default:
+           LOGE("cmd:%x from shim layer is not handled", evt.server_event_type);
+           break;
+   }
+   return TRUE;
+}
+
diff --git a/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_sock.c b/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_sock.c
new file mode 100644
index 0000000..85a5d3b
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_sock.c
@@ -0,0 +1,294 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <stdio.h>
+#include <stdlib.h>
+#include <errno.h>
+#include <string.h>
+
+// Camera dependencies
+#include "mm_camera_dbg.h"
+#include "mm_camera_sock.h"
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_socket_create
+ *
+ * DESCRIPTION: opens a domain socket tied to camera ID and socket type
+ *  @cam_id   : camera ID
+ *  @sock_type: socket type, TCP/UDP
+ *
+ * RETURN     : fd related to the domain socket
+ *==========================================================================*/
+int mm_camera_socket_create(int cam_id, mm_camera_sock_type_t sock_type)
+{
+    int socket_fd;
+    mm_camera_sock_addr_t sock_addr;
+    int sktype;
+    int rc;
+
+    switch (sock_type)
+    {
+      case MM_CAMERA_SOCK_TYPE_UDP:
+        sktype = SOCK_DGRAM;
+        break;
+      case MM_CAMERA_SOCK_TYPE_TCP:
+        sktype = SOCK_STREAM;
+        break;
+      default:
+        LOGE("unknown socket type =%d", sock_type);
+        return -1;
+    }
+    socket_fd = socket(AF_UNIX, sktype, 0);
+    if (socket_fd < 0) {
+        LOGE("error create socket fd =%d", socket_fd);
+        return socket_fd;
+    }
+
+    memset(&sock_addr, 0, sizeof(sock_addr));
+    sock_addr.addr_un.sun_family = AF_UNIX;
+    snprintf(sock_addr.addr_un.sun_path,
+             UNIX_PATH_MAX, QCAMERA_DUMP_FRM_LOCATION"cam_socket%d", cam_id);
+    rc = connect(socket_fd, &sock_addr.addr, sizeof(sock_addr.addr_un));
+    if (0 != rc) {
+      close(socket_fd);
+      socket_fd = -1;
+      LOGE("socket_fd=%d %s ", socket_fd, strerror(errno));
+    }
+
+    LOGD("socket_fd=%d %s", socket_fd,
+        sock_addr.addr_un.sun_path);
+    return socket_fd;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_socket_close
+ *
+ * DESCRIPTION:  close domain socket by its fd
+ *   @fd      : file descriptor for the domain socket to be closed
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void mm_camera_socket_close(int fd)
+{
+    if (fd >= 0) {
+      close(fd);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_socket_sendmsg
+ *
+ * DESCRIPTION:  send msg through domain socket
+ *   @fd      : socket fd
+ *   @msg     : pointer to msg to be sent over domain socket
+ *   @sendfd  : file descriptors to be sent
+ *
+ * RETURN     : the total bytes of sent msg
+ *==========================================================================*/
+int mm_camera_socket_sendmsg(
+  int fd,
+  void *msg,
+  size_t buf_size,
+  int sendfd)
+{
+    struct msghdr msgh;
+    struct iovec iov[1];
+    struct cmsghdr * cmsghp = NULL;
+    char control[CMSG_SPACE(sizeof(int))];
+
+    if (msg == NULL) {
+      LOGD("msg is NULL");
+      return -1;
+    }
+    memset(&msgh, 0, sizeof(msgh));
+    msgh.msg_name = NULL;
+    msgh.msg_namelen = 0;
+
+    iov[0].iov_base = msg;
+    iov[0].iov_len = buf_size;
+    msgh.msg_iov = iov;
+    msgh.msg_iovlen = 1;
+    LOGD("iov_len=%llu",
+            (unsigned long long int)iov[0].iov_len);
+
+    msgh.msg_control = NULL;
+    msgh.msg_controllen = 0;
+
+    /* if sendfd is valid, we need to pass it through control msg */
+    if( sendfd >= 0) {
+      msgh.msg_control = control;
+      msgh.msg_controllen = sizeof(control);
+      cmsghp = CMSG_FIRSTHDR(&msgh);
+      if (cmsghp != NULL) {
+        LOGD("Got ctrl msg pointer");
+        cmsghp->cmsg_level = SOL_SOCKET;
+        cmsghp->cmsg_type = SCM_RIGHTS;
+        cmsghp->cmsg_len = CMSG_LEN(sizeof(int));
+        *((int *)CMSG_DATA(cmsghp)) = sendfd;
+        LOGD("cmsg data=%d", *((int *) CMSG_DATA(cmsghp)));
+      } else {
+        LOGD("ctrl msg NULL");
+        return -1;
+      }
+    }
+
+    return sendmsg(fd, &(msgh), 0);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_socket_bundle_sendmsg
+ *
+ * DESCRIPTION:  send msg through domain socket
+ *   @fd      : socket fd
+ *   @msg     : pointer to msg to be sent over domain socket
+ *   @sendfds : file descriptors to be sent
+ *   @numfds  : num of file descriptors to be sent
+ *
+ * RETURN     : the total bytes of sent msg
+ *==========================================================================*/
+int mm_camera_socket_bundle_sendmsg(
+  int fd,
+  void *msg,
+  size_t buf_size,
+  int sendfds[CAM_MAX_NUM_BUFS_PER_STREAM],
+  int numfds)
+{
+    struct msghdr msgh;
+    struct iovec iov[1];
+    struct cmsghdr * cmsghp = NULL;
+    char control[CMSG_SPACE(sizeof(int) * numfds)];
+    int *fds_ptr = NULL;
+
+    if (msg == NULL) {
+      LOGD("msg is NULL");
+      return -1;
+    }
+    memset(&msgh, 0, sizeof(msgh));
+    msgh.msg_name = NULL;
+    msgh.msg_namelen = 0;
+
+    iov[0].iov_base = msg;
+    iov[0].iov_len = buf_size;
+    msgh.msg_iov = iov;
+    msgh.msg_iovlen = 1;
+    LOGD("iov_len=%llu",
+            (unsigned long long int)iov[0].iov_len);
+
+    msgh.msg_control = NULL;
+    msgh.msg_controllen = 0;
+
+    /* if numfds is valid, we need to pass it through control msg */
+    if (numfds > 0) {
+      msgh.msg_control = control;
+      msgh.msg_controllen = sizeof(control);
+      cmsghp = CMSG_FIRSTHDR(&msgh);
+      if (cmsghp != NULL) {
+        cmsghp->cmsg_level = SOL_SOCKET;
+        cmsghp->cmsg_type = SCM_RIGHTS;
+        cmsghp->cmsg_len = CMSG_LEN(sizeof(int) * numfds);
+
+        fds_ptr = (int*) CMSG_DATA(cmsghp);
+        memcpy(fds_ptr, sendfds, sizeof(int) * numfds);
+      } else {
+        LOGE("ctrl msg NULL");
+        return -1;
+      }
+    }
+
+    return sendmsg(fd, &(msgh), 0);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_socket_recvmsg
+ *
+ * DESCRIPTION:  receive msg from domain socket.
+ *   @fd      : socket fd
+ *   @msg     : pointer to mm_camera_sock_msg_packet_t to hold incoming msg,
+ *              need be allocated by the caller
+ *   @buf_size: the size of the buf that holds incoming msg
+ *   @rcvdfd  : pointer to hold recvd file descriptor if not NULL.
+ *
+ * RETURN     : the total bytes of received msg
+ *==========================================================================*/
+int mm_camera_socket_recvmsg(
+  int fd,
+  void *msg,
+  uint32_t buf_size,
+  int *rcvdfd)
+{
+    struct msghdr msgh;
+    struct iovec iov[1];
+    struct cmsghdr *cmsghp = NULL;
+    char control[CMSG_SPACE(sizeof(int))];
+    int rcvd_fd = -1;
+    int rcvd_len = 0;
+
+    if ( (msg == NULL) || (buf_size <= 0) ) {
+      LOGE("msg buf is NULL");
+      return -1;
+    }
+
+    memset(&msgh, 0, sizeof(msgh));
+    msgh.msg_name = NULL;
+    msgh.msg_namelen = 0;
+    msgh.msg_control = control;
+    msgh.msg_controllen = sizeof(control);
+
+    iov[0].iov_base = msg;
+    iov[0].iov_len = buf_size;
+    msgh.msg_iov = iov;
+    msgh.msg_iovlen = 1;
+
+    if ( (rcvd_len = recvmsg(fd, &(msgh), 0)) <= 0) {
+      LOGE("recvmsg failed");
+      return rcvd_len;
+    }
+
+    LOGD("msg_ctrl %p len %zd", msgh.msg_control,
+        msgh.msg_controllen);
+
+    if( ((cmsghp = CMSG_FIRSTHDR(&msgh)) != NULL) &&
+        (cmsghp->cmsg_len == CMSG_LEN(sizeof(int))) ) {
+      if (cmsghp->cmsg_level == SOL_SOCKET &&
+        cmsghp->cmsg_type == SCM_RIGHTS) {
+        LOGD("CtrlMsg is valid");
+        rcvd_fd = *((int *) CMSG_DATA(cmsghp));
+        LOGD("Receieved fd=%d", rcvd_fd);
+      } else {
+        LOGE("Unexpected Control Msg. Line=%d");
+      }
+    }
+
+    if (rcvdfd) {
+      *rcvdfd = rcvd_fd;
+    }
+
+    return rcvd_len;
+}
diff --git a/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c b/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
new file mode 100644
index 0000000..fd90e83
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
@@ -0,0 +1,4703 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <stdlib.h>
+#include <pthread.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <media/msm_media_info.h>
+#define TIME_H <SYSTEM_HEADER_PREFIX/time.h>
+#include TIME_H
+#define IOCTL_H <SYSTEM_HEADER_PREFIX/ioctl.h>
+#include IOCTL_H
+
+// Camera dependencies
+#include "cam_semaphore.h"
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+/* internal function decalre */
+int32_t mm_stream_qbuf(mm_stream_t *my_obj,
+                       mm_camera_buf_def_t *buf);
+int32_t mm_stream_set_ext_mode(mm_stream_t * my_obj);
+int32_t mm_stream_set_fmt(mm_stream_t * my_obj);
+int32_t mm_stream_sync_info(mm_stream_t *my_obj);
+int32_t mm_stream_init_bufs(mm_stream_t * my_obj);
+int32_t mm_stream_deinit_bufs(mm_stream_t * my_obj);
+int32_t mm_stream_request_buf(mm_stream_t * my_obj);
+int32_t mm_stream_unreg_buf(mm_stream_t * my_obj);
+int32_t mm_stream_release(mm_stream_t *my_obj);
+int32_t mm_stream_set_parm(mm_stream_t *my_obj,
+                           cam_stream_parm_buffer_t *value);
+int32_t mm_stream_get_parm(mm_stream_t *my_obj,
+                           cam_stream_parm_buffer_t *value);
+int32_t mm_stream_do_action(mm_stream_t *my_obj,
+                            void *in_value);
+int32_t mm_stream_streamon(mm_stream_t *my_obj);
+int32_t mm_stream_streamoff(mm_stream_t *my_obj);
+int32_t mm_stream_read_msm_frame(mm_stream_t * my_obj,
+                                 mm_camera_buf_info_t* buf_info,
+                                 uint8_t num_planes);
+int32_t mm_stream_read_user_buf(mm_stream_t * my_obj,
+        mm_camera_buf_info_t* buf_info);
+int32_t mm_stream_write_user_buf(mm_stream_t * my_obj,
+        mm_camera_buf_def_t *buf);
+
+int32_t mm_stream_config(mm_stream_t *my_obj,
+                         mm_camera_stream_config_t *config);
+int32_t mm_stream_reg_buf(mm_stream_t * my_obj);
+int32_t mm_stream_buf_done(mm_stream_t * my_obj,
+                           mm_camera_buf_def_t *frame);
+int32_t mm_stream_get_queued_buf_count(mm_stream_t * my_obj);
+
+int32_t mm_stream_calc_offset(mm_stream_t *my_obj);
+int32_t mm_stream_calc_offset_preview(cam_stream_info_t *stream_info,
+                                      cam_dimension_t *dim,
+                                      cam_padding_info_t *padding,
+                                      cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_post_view(cam_format_t fmt,
+                                      cam_dimension_t *dim,
+                                      cam_stream_buf_plane_info_t *buf_planes);
+
+int32_t mm_stream_calc_offset_snapshot(cam_format_t fmt,
+                                       cam_dimension_t *dim,
+                                       cam_padding_info_t *padding,
+                                       cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_raw(cam_format_t fmt,
+                                  cam_dimension_t *dim,
+                                  cam_padding_info_t *padding,
+                                  cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_video(cam_format_t fmt,
+        cam_dimension_t *dim,
+        cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_metadata(cam_dimension_t *dim,
+                                       cam_padding_info_t *padding,
+                                       cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_postproc(cam_stream_info_t *stream_info,
+                                       cam_padding_info_t *padding,
+                                       cam_stream_buf_plane_info_t *plns);
+uint32_t mm_stream_calc_lcm(int32_t num1, int32_t num2);
+
+
+/* state machine function declare */
+int32_t mm_stream_fsm_inited(mm_stream_t * my_obj,
+                             mm_stream_evt_type_t evt,
+                             void * in_val,
+                             void * out_val);
+int32_t mm_stream_fsm_acquired(mm_stream_t * my_obj,
+                               mm_stream_evt_type_t evt,
+                               void * in_val,
+                               void * out_val);
+int32_t mm_stream_fsm_cfg(mm_stream_t * my_obj,
+                          mm_stream_evt_type_t evt,
+                          void * in_val,
+                          void * out_val);
+int32_t mm_stream_fsm_buffed(mm_stream_t * my_obj,
+                             mm_stream_evt_type_t evt,
+                             void * in_val,
+                             void * out_val);
+int32_t mm_stream_fsm_reg(mm_stream_t * my_obj,
+                          mm_stream_evt_type_t evt,
+                          void * in_val,
+                          void * out_val);
+int32_t mm_stream_fsm_active(mm_stream_t * my_obj,
+                             mm_stream_evt_type_t evt,
+                             void * in_val,
+                             void * out_val);
+uint32_t mm_stream_get_v4l2_fmt(cam_format_t fmt);
+
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_notify_channel
+ *
+ * DESCRIPTION: function to notify channel object on received buffer
+ *
+ * PARAMETERS :
+ *   @ch_obj  : channel object
+ *   @buf_info: ptr to struct storing buffer information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              0> -- failure
+ *==========================================================================*/
+int32_t mm_stream_notify_channel(struct mm_channel* ch_obj,
+        mm_camera_buf_info_t *buf_info)
+{
+    int32_t rc = 0;
+    mm_camera_cmdcb_t* node = NULL;
+
+    if ((NULL == ch_obj) || (NULL == buf_info)) {
+        LOGD("Invalid channel/buffer");
+        return -ENODEV;
+    }
+
+    /* send cam_sem_post to wake up channel cmd thread to enqueue
+     * to super buffer */
+    node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+    if (NULL != node) {
+        memset(node, 0, sizeof(mm_camera_cmdcb_t));
+        node->cmd_type = MM_CAMERA_CMD_TYPE_DATA_CB;
+        node->u.buf = *buf_info;
+
+        /* enqueue to cmd thread */
+        cam_queue_enq(&(ch_obj->cmd_thread.cmd_queue), node);
+
+        /* wake up cmd thread */
+        cam_sem_post(&(ch_obj->cmd_thread.cmd_sem));
+    } else {
+        LOGE("No memory for mm_camera_node_t");
+        rc = -ENOMEM;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_handle_rcvd_buf
+ *
+ * DESCRIPTION: function to handle newly received stream buffer
+ *
+ * PARAMETERS :
+ *   @cam_obj : stream object
+ *   @buf_info: ptr to struct storing buffer information
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void mm_stream_handle_rcvd_buf(mm_stream_t *my_obj,
+                               mm_camera_buf_info_t *buf_info,
+                               uint8_t has_cb)
+{
+    int32_t rc = 0;
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    /* enqueue to super buf thread */
+    if (my_obj->is_bundled) {
+        rc = mm_stream_notify_channel(my_obj->ch_obj, buf_info);
+        if (rc < 0) {
+            LOGE("Unable to notify channel");
+        }
+    }
+
+    pthread_mutex_lock(&my_obj->buf_lock);
+    if(my_obj->is_linked) {
+        /* need to add into super buf for linking, add ref count */
+        my_obj->buf_status[buf_info->buf->buf_idx].buf_refcnt++;
+
+        rc = mm_stream_notify_channel(my_obj->linked_obj, buf_info);
+        if (rc < 0) {
+            LOGE("Unable to notify channel");
+        }
+    }
+    pthread_mutex_unlock(&my_obj->buf_lock);
+
+    pthread_mutex_lock(&my_obj->cmd_lock);
+    if(has_cb && my_obj->cmd_thread.is_active) {
+        mm_camera_cmdcb_t* node = NULL;
+
+        /* send cam_sem_post to wake up cmd thread to dispatch dataCB */
+        node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+        if (NULL != node) {
+            memset(node, 0, sizeof(mm_camera_cmdcb_t));
+            node->cmd_type = MM_CAMERA_CMD_TYPE_DATA_CB;
+            node->u.buf = *buf_info;
+
+            /* enqueue to cmd thread */
+            cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+            /* wake up cmd thread */
+            cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+        } else {
+            LOGE("No memory for mm_camera_node_t");
+        }
+    }
+    pthread_mutex_unlock(&my_obj->cmd_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_dispatch_sync_data
+ *
+ * DESCRIPTION: dispatch stream buffer to registered users on poll thread
+ *
+ * PARAMETERS :
+ *   @cmd_cb  : ptr storing stream buffer information
+ *   @userdata: user data ptr (stream object)
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_stream_dispatch_sync_data(mm_stream_t * my_obj,
+         mm_stream_data_cb_t *buf_cb, mm_camera_buf_info_t *buf_info)
+{
+    mm_camera_super_buf_t super_buf;
+
+    if (NULL == my_obj || buf_info == NULL ||
+            buf_cb == NULL) {
+        return;
+    }
+
+    memset(&super_buf, 0, sizeof(mm_camera_super_buf_t));
+    super_buf.num_bufs = 1;
+    super_buf.bufs[0] = buf_info->buf;
+    super_buf.camera_handle = my_obj->ch_obj->cam_obj->my_hdl;
+    super_buf.ch_id = my_obj->ch_obj->my_hdl;
+    if ((buf_cb != NULL) && (buf_cb->cb_type == MM_CAMERA_STREAM_CB_TYPE_SYNC)
+            && (buf_cb->cb_count != 0)) {
+        /* callback */
+        buf_cb->cb(&super_buf, buf_cb->user_data);
+
+        /* if >0, reduce count by 1 every time we called CB until reaches 0
+             * when count reach 0, reset the buf_cb to have no CB */
+        if (buf_cb->cb_count > 0) {
+            buf_cb->cb_count--;
+            if (0 == buf_cb->cb_count) {
+                buf_cb->cb = NULL;
+                buf_cb->user_data = NULL;
+            }
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_data_notify
+ *
+ * DESCRIPTION: callback to handle data notify from kernel
+ *
+ * PARAMETERS :
+ *   @user_data : user data ptr (stream object)
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_stream_data_notify(void* user_data)
+{
+    mm_stream_t *my_obj = (mm_stream_t*)user_data;
+    int32_t i, rc;
+    uint8_t has_cb = 0, length = 0;
+    mm_camera_buf_info_t buf_info;
+
+    if (NULL == my_obj) {
+        return;
+    }
+
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state);
+    if (MM_STREAM_STATE_ACTIVE != my_obj->state) {
+        /* this Cb will only received in active_stream_on state
+         * if not so, return here */
+        LOGE("ERROR!! Wrong state (%d) to receive data notify!",
+                    my_obj->state);
+        return;
+    }
+
+    if (my_obj->stream_info->streaming_mode == CAM_STREAMING_MODE_BATCH) {
+        length = 1;
+    } else {
+        length = my_obj->frame_offset.num_planes;
+    }
+
+    memset(&buf_info, 0, sizeof(mm_camera_buf_info_t));
+    rc = mm_stream_read_msm_frame(my_obj, &buf_info,
+        (uint8_t)length);
+    if (rc != 0) {
+        return;
+    }
+    uint32_t idx = buf_info.buf->buf_idx;
+
+    pthread_mutex_lock(&my_obj->cb_lock);
+    for (i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+        if(NULL != my_obj->buf_cb[i].cb) {
+            if (my_obj->buf_cb[i].cb_type == MM_CAMERA_STREAM_CB_TYPE_SYNC) {
+                /*For every SYNC callback, send data*/
+                mm_stream_dispatch_sync_data(my_obj,
+                        &my_obj->buf_cb[i], &buf_info);
+            } else {
+                /* for every ASYNC CB, need ref count */
+                has_cb = 1;
+            }
+        }
+    }
+    pthread_mutex_unlock(&my_obj->cb_lock);
+
+    pthread_mutex_lock(&my_obj->buf_lock);
+    /* update buffer location */
+    my_obj->buf_status[idx].in_kernel = 0;
+
+    /* update buf ref count */
+    if (my_obj->is_bundled) {
+        /* need to add into super buf since bundled, add ref count */
+        my_obj->buf_status[idx].buf_refcnt++;
+    }
+    my_obj->buf_status[idx].buf_refcnt =
+        (uint8_t)(my_obj->buf_status[idx].buf_refcnt + has_cb);
+    pthread_mutex_unlock(&my_obj->buf_lock);
+
+    mm_stream_handle_rcvd_buf(my_obj, &buf_info, has_cb);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_dispatch_app_data
+ *
+ * DESCRIPTION: dispatch stream buffer to registered users
+ *
+ * PARAMETERS :
+ *   @cmd_cb  : ptr storing stream buffer information
+ *   @userdata: user data ptr (stream object)
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_stream_dispatch_app_data(mm_camera_cmdcb_t *cmd_cb,
+                                        void* user_data)
+{
+    int i;
+    mm_stream_t * my_obj = (mm_stream_t *)user_data;
+    mm_camera_buf_info_t* buf_info = NULL;
+    mm_camera_super_buf_t super_buf;
+
+    if (NULL == my_obj) {
+        return;
+    }
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    if (MM_CAMERA_CMD_TYPE_DATA_CB != cmd_cb->cmd_type) {
+        LOGE("Wrong cmd_type (%d) for dataCB",
+                    cmd_cb->cmd_type);
+        return;
+    }
+
+    buf_info = &cmd_cb->u.buf;
+    memset(&super_buf, 0, sizeof(mm_camera_super_buf_t));
+    super_buf.num_bufs = 1;
+    super_buf.bufs[0] = buf_info->buf;
+    super_buf.camera_handle = my_obj->ch_obj->cam_obj->my_hdl;
+    super_buf.ch_id = my_obj->ch_obj->my_hdl;
+
+    pthread_mutex_lock(&my_obj->cb_lock);
+    for(i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+        if(NULL != my_obj->buf_cb[i].cb
+                && (my_obj->buf_cb[i].cb_type !=
+                MM_CAMERA_STREAM_CB_TYPE_SYNC)) {
+            if (my_obj->buf_cb[i].cb_count != 0) {
+                /* if <0, means infinite CB
+                 * if >0, means CB for certain times
+                 * both case we need to call CB */
+
+                /* increase buf ref cnt */
+                pthread_mutex_lock(&my_obj->buf_lock);
+                my_obj->buf_status[buf_info->buf->buf_idx].buf_refcnt++;
+                pthread_mutex_unlock(&my_obj->buf_lock);
+
+                /* callback */
+                my_obj->buf_cb[i].cb(&super_buf,
+                                     my_obj->buf_cb[i].user_data);
+            }
+
+            /* if >0, reduce count by 1 every time we called CB until reaches 0
+             * when count reach 0, reset the buf_cb to have no CB */
+            if (my_obj->buf_cb[i].cb_count > 0) {
+                my_obj->buf_cb[i].cb_count--;
+                if (0 == my_obj->buf_cb[i].cb_count) {
+                    my_obj->buf_cb[i].cb = NULL;
+                    my_obj->buf_cb[i].user_data = NULL;
+                }
+            }
+        }
+    }
+    pthread_mutex_unlock(&my_obj->cb_lock);
+
+    /* do buf_done since we increased refcnt by one when has_cb */
+    mm_stream_buf_done(my_obj, buf_info->buf);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_fsm_fn
+ *
+ * DESCRIPTION: stream finite state machine entry function. Depends on stream
+ *              state, incoming event will be handled differently.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a stream object
+ *   @evt      : stream event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_fn(mm_stream_t *my_obj,
+                         mm_stream_evt_type_t evt,
+                         void * in_val,
+                         void * out_val)
+{
+    int32_t rc = -1;
+
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state);
+    switch (my_obj->state) {
+    case MM_STREAM_STATE_NOTUSED:
+        LOGD("Not handling evt in unused state");
+        break;
+    case MM_STREAM_STATE_INITED:
+        rc = mm_stream_fsm_inited(my_obj, evt, in_val, out_val);
+        break;
+    case MM_STREAM_STATE_ACQUIRED:
+        rc = mm_stream_fsm_acquired(my_obj, evt, in_val, out_val);
+        break;
+    case MM_STREAM_STATE_CFG:
+        rc = mm_stream_fsm_cfg(my_obj, evt, in_val, out_val);
+        break;
+    case MM_STREAM_STATE_BUFFED:
+        rc = mm_stream_fsm_buffed(my_obj, evt, in_val, out_val);
+        break;
+    case MM_STREAM_STATE_REG:
+        rc = mm_stream_fsm_reg(my_obj, evt, in_val, out_val);
+        break;
+    case MM_STREAM_STATE_ACTIVE:
+        rc = mm_stream_fsm_active(my_obj, evt, in_val, out_val);
+        break;
+    default:
+        LOGD("Not a valid state (%d)", my_obj->state);
+        break;
+    }
+    LOGD("X rc =%d",rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_fsm_inited
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in INITED
+ *              state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a stream object
+ *   @evt      : stream event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_inited(mm_stream_t *my_obj,
+                             mm_stream_evt_type_t evt,
+                             void * in_val,
+                             void * out_val)
+{
+    int32_t rc = 0;
+    char dev_name[MM_CAMERA_DEV_NAME_LEN];
+    const char *dev_name_value = NULL;
+    if (NULL == my_obj) {
+      LOGE("NULL camera object\n");
+      return -1;
+    }
+
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state);
+    switch(evt) {
+    case MM_STREAM_EVT_ACQUIRE:
+        if ((NULL == my_obj->ch_obj) || (NULL == my_obj->ch_obj->cam_obj)) {
+            LOGE("NULL channel or camera obj\n");
+            rc = -1;
+            break;
+        }
+
+        dev_name_value = mm_camera_util_get_dev_name(my_obj->ch_obj->cam_obj->my_hdl);
+        if (NULL == dev_name_value) {
+            LOGE("NULL device name\n");
+            rc = -1;
+            break;
+        }
+
+        snprintf(dev_name, sizeof(dev_name), "/dev/%s",
+                 dev_name_value);
+
+        my_obj->fd = open(dev_name, O_RDWR | O_NONBLOCK);
+        if (my_obj->fd < 0) {
+            LOGE("open dev returned %d\n", my_obj->fd);
+            rc = -1;
+            break;
+        }
+        LOGD("open dev fd = %d\n", my_obj->fd);
+        rc = mm_stream_set_ext_mode(my_obj);
+        if (0 == rc) {
+            my_obj->state = MM_STREAM_STATE_ACQUIRED;
+        } else {
+            /* failed setting ext_mode
+             * close fd */
+            close(my_obj->fd);
+            my_obj->fd = -1;
+            break;
+        }
+        break;
+    default:
+        LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+                    my_obj->state, evt, in_val, out_val);
+        break;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_fsm_acquired
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in AQUIRED
+ *              state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a stream object
+ *   @evt      : stream event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_acquired(mm_stream_t *my_obj,
+                               mm_stream_evt_type_t evt,
+                               void * in_val,
+                               void * out_val)
+{
+    int32_t rc = 0;
+
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state);
+    switch(evt) {
+    case MM_STREAM_EVT_SET_FMT:
+        {
+            mm_camera_stream_config_t *config =
+                (mm_camera_stream_config_t *)in_val;
+
+            rc = mm_stream_config(my_obj, config);
+
+            /* change state to configed */
+            my_obj->state = MM_STREAM_STATE_CFG;
+
+            break;
+        }
+    case MM_STREAM_EVT_RELEASE:
+        rc = mm_stream_release(my_obj);
+        /* change state to not used */
+         my_obj->state = MM_STREAM_STATE_NOTUSED;
+        break;
+    case MM_STREAM_EVT_SET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_set_parm(my_obj, payload->parms);
+        }
+        break;
+    case MM_STREAM_EVT_GET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_get_parm(my_obj, payload->parms);
+        }
+        break;
+    default:
+        LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+                    my_obj->state, evt, in_val, out_val);
+    }
+    LOGD("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_fsm_cfg
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in CONFIGURED
+ *              state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a stream object
+ *   @evt      : stream event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_cfg(mm_stream_t * my_obj,
+                          mm_stream_evt_type_t evt,
+                          void * in_val,
+                          void * out_val)
+{
+    int32_t rc = 0;
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state);
+    switch(evt) {
+    case MM_STREAM_EVT_SET_FMT:
+        {
+            mm_camera_stream_config_t *config =
+                (mm_camera_stream_config_t *)in_val;
+
+            rc = mm_stream_config(my_obj, config);
+
+            /* change state to configed */
+            my_obj->state = MM_STREAM_STATE_CFG;
+
+            break;
+        }
+    case MM_STREAM_EVT_RELEASE:
+        rc = mm_stream_release(my_obj);
+        my_obj->state = MM_STREAM_STATE_NOTUSED;
+        break;
+    case MM_STREAM_EVT_SET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_set_parm(my_obj, payload->parms);
+        }
+        break;
+    case MM_STREAM_EVT_GET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_get_parm(my_obj, payload->parms);
+        }
+        break;
+    case MM_STREAM_EVT_GET_BUF:
+        rc = mm_stream_init_bufs(my_obj);
+        /* change state to buff allocated */
+        if(0 == rc) {
+            my_obj->state = MM_STREAM_STATE_BUFFED;
+        }
+        break;
+    default:
+        LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+                    my_obj->state, evt, in_val, out_val);
+    }
+    LOGD("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_fsm_buffed
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in BUFFED
+ *              state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a stream object
+ *   @evt      : stream event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_buffed(mm_stream_t * my_obj,
+                             mm_stream_evt_type_t evt,
+                             void * in_val,
+                             void * out_val)
+{
+    int32_t rc = 0;
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state);
+    switch(evt) {
+    case MM_STREAM_EVT_PUT_BUF:
+        rc = mm_stream_deinit_bufs(my_obj);
+        /* change state to configed */
+        my_obj->state = MM_STREAM_STATE_CFG;
+        break;
+    case MM_STREAM_EVT_REG_BUF:
+        rc = mm_stream_reg_buf(my_obj);
+        /* change state to regged */
+        if(0 == rc) {
+            my_obj->state = MM_STREAM_STATE_REG;
+        }
+        break;
+    case MM_STREAM_EVT_SET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_set_parm(my_obj, payload->parms);
+        }
+        break;
+    case MM_STREAM_EVT_GET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_get_parm(my_obj, payload->parms);
+        }
+        break;
+    default:
+        LOGW("invalid state (%d) for evt (%d), in(%p), out(%p)",
+                    my_obj->state, evt, in_val, out_val);
+    }
+    LOGD("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_fsm_reg
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in REGGED
+ *              state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a stream object
+ *   @evt      : stream event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_reg(mm_stream_t * my_obj,
+                          mm_stream_evt_type_t evt,
+                          void * in_val,
+                          void * out_val)
+{
+    int32_t rc = 0;
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    switch(evt) {
+    case MM_STREAM_EVT_UNREG_BUF:
+        rc = mm_stream_unreg_buf(my_obj);
+
+        /* change state to buffed */
+        my_obj->state = MM_STREAM_STATE_BUFFED;
+        break;
+    case MM_STREAM_EVT_START:
+        {
+            uint8_t has_cb = 0;
+            uint8_t i;
+            /* launch cmd thread if CB is not null */
+            pthread_mutex_lock(&my_obj->cb_lock);
+            for (i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+                if((NULL != my_obj->buf_cb[i].cb) &&
+                        (my_obj->buf_cb[i].cb_type != MM_CAMERA_STREAM_CB_TYPE_SYNC)) {
+                    has_cb = 1;
+                    break;
+                }
+            }
+            pthread_mutex_unlock(&my_obj->cb_lock);
+
+            pthread_mutex_lock(&my_obj->cmd_lock);
+            if (has_cb) {
+                snprintf(my_obj->cmd_thread.threadName, THREAD_NAME_SIZE, "CAM_StrmAppData");
+                mm_camera_cmd_thread_launch(&my_obj->cmd_thread,
+                                            mm_stream_dispatch_app_data,
+                                            (void *)my_obj);
+            }
+            pthread_mutex_unlock(&my_obj->cmd_lock);
+
+            my_obj->state = MM_STREAM_STATE_ACTIVE;
+            rc = mm_stream_streamon(my_obj);
+            if (0 != rc) {
+                /* failed stream on, need to release cmd thread if it's launched */
+                pthread_mutex_lock(&my_obj->cmd_lock);
+                if (has_cb) {
+                    mm_camera_cmd_thread_release(&my_obj->cmd_thread);
+                }
+                pthread_mutex_unlock(&my_obj->cmd_lock);
+                my_obj->state = MM_STREAM_STATE_REG;
+                break;
+            }
+        }
+        break;
+    case MM_STREAM_EVT_SET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_set_parm(my_obj, payload->parms);
+        }
+        break;
+    case MM_STREAM_EVT_GET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_get_parm(my_obj, payload->parms);
+        }
+        break;
+    default:
+        LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+                    my_obj->state, evt, in_val, out_val);
+    }
+    LOGD("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_fsm_active
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in ACTIVE
+ *              state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a stream object
+ *   @evt      : stream event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_active(mm_stream_t * my_obj,
+                             mm_stream_evt_type_t evt,
+                             void * in_val,
+                             void * out_val)
+{
+    int32_t rc = 0;
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state);
+    switch(evt) {
+    case MM_STREAM_EVT_QBUF:
+        rc = mm_stream_buf_done(my_obj, (mm_camera_buf_def_t *)in_val);
+        break;
+    case MM_STREAM_EVT_GET_QUEUED_BUF_COUNT:
+        rc = mm_stream_get_queued_buf_count(my_obj);
+        break;
+    case MM_STREAM_EVT_STOP:
+        {
+            uint8_t has_cb = 0;
+            uint8_t i;
+            rc = mm_stream_streamoff(my_obj);
+
+            pthread_mutex_lock(&my_obj->cb_lock);
+            for (i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+                if(NULL != my_obj->buf_cb[i].cb
+                        && my_obj->buf_cb[i].cb_type != MM_CAMERA_STREAM_CB_TYPE_SYNC) {
+                    has_cb = 1;
+                    break;
+                }
+            }
+            pthread_mutex_unlock(&my_obj->cb_lock);
+
+            pthread_mutex_lock(&my_obj->cmd_lock);
+            if (has_cb) {
+                mm_camera_cmd_thread_release(&my_obj->cmd_thread);
+            }
+            pthread_mutex_unlock(&my_obj->cmd_lock);
+            my_obj->state = MM_STREAM_STATE_REG;
+        }
+        break;
+    case MM_STREAM_EVT_SET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_set_parm(my_obj, payload->parms);
+        }
+        break;
+    case MM_STREAM_EVT_GET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_get_parm(my_obj, payload->parms);
+        }
+        break;
+    case MM_STREAM_EVT_DO_ACTION:
+        rc = mm_stream_do_action(my_obj, in_val);
+        break;
+    default:
+        LOGE("invalid state (%d) for evt (%d), in(%p), out(%p)",
+                    my_obj->state, evt, in_val, out_val);
+    }
+    LOGD("X rc = %d", rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_map_buf_ops
+ *
+ * DESCRIPTION: ops for mapping stream buffer via domain socket to server.
+ *              This function will be passed to upper layer as part of ops table
+ *              to be used by upper layer when allocating stream buffers and mapping
+ *              buffers to server via domain socket.
+ *
+ * PARAMETERS :
+ *   @frame_idx    : index of buffer within the stream buffers, only valid if
+ *                   buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @plane_idx    : plane index. If all planes share the same fd,
+ *                   plane_idx = -1; otherwise, plean_idx is the
+ *                   index to plane (0..num_of_planes)
+ *   @fd           : file descriptor of the buffer
+ *   @size         : size of the buffer
+ *   @userdata     : user data ptr (stream object)
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_stream_map_buf_ops(uint32_t frame_idx,
+        int32_t plane_idx, int fd, size_t size,
+        void *buffer, cam_mapping_buf_type type,
+        void *userdata)
+{
+    mm_stream_t *my_obj = (mm_stream_t *)userdata;
+    return mm_stream_map_buf(my_obj,
+            type, frame_idx, plane_idx, fd, size, buffer);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_bundled_map_buf_ops
+ *
+ * DESCRIPTION: ops for mapping bundled stream buffers via domain socket to server.
+ *              This function will be passed to upper layer as part of ops table
+ *              to be used by upper layer when allocating stream buffers and mapping
+ *              buffers to server via domain socket.
+ *
+ * PARAMETERS :
+ *   @buf_map_list : list of buffer mapping information
+ *   @userdata     : user data ptr (stream object)
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_stream_bundled_map_buf_ops(
+        const cam_buf_map_type_list *buf_map_list,
+        void *userdata)
+{
+    mm_stream_t *my_obj = (mm_stream_t *)userdata;
+    return mm_stream_map_bufs(my_obj,
+                              buf_map_list);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_unmap_buf_ops
+ *
+ * DESCRIPTION: ops for unmapping stream buffer via domain socket to server.
+ *              This function will be passed to upper layer as part of ops table
+ *              to be used by upper layer when allocating stream buffers and unmapping
+ *              buffers to server via domain socket.
+ *
+ * PARAMETERS :
+ *   @frame_idx    : index of buffer within the stream buffers, only valid if
+ *                   buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @plane_idx    : plane index. If all planes share the same fd,
+ *                   plane_idx = -1; otherwise, plean_idx is the
+ *                   index to plane (0..num_of_planes)
+ *   @userdata     : user data ptr (stream object)
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_stream_unmap_buf_ops(uint32_t frame_idx,
+                                       int32_t plane_idx,
+                                       cam_mapping_buf_type type,
+                                       void *userdata)
+{
+    mm_stream_t *my_obj = (mm_stream_t *)userdata;
+    return mm_stream_unmap_buf(my_obj,
+                               type,
+                               frame_idx,
+                               plane_idx);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_config
+ *
+ * DESCRIPTION: configure a stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @config       : stream configuration
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_config(mm_stream_t *my_obj,
+                         mm_camera_stream_config_t *config)
+{
+    int32_t rc = 0;
+    int32_t cb_index = 0;
+
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state);
+    my_obj->stream_info = config->stream_info;
+    my_obj->buf_num = (uint8_t) config->stream_info->num_bufs;
+    my_obj->mem_vtbl = config->mem_vtbl;
+    my_obj->padding_info = config->padding_info;
+
+    if (config->stream_cb_sync != NULL) {
+        /* SYNC callback is always placed at index 0*/
+        my_obj->buf_cb[cb_index].cb = config->stream_cb_sync;
+        my_obj->buf_cb[cb_index].user_data = config->userdata;
+        my_obj->buf_cb[cb_index].cb_count = -1; /* infinite by default */
+        my_obj->buf_cb[cb_index].cb_type = MM_CAMERA_STREAM_CB_TYPE_SYNC;
+        cb_index++;
+    }
+    my_obj->buf_cb[cb_index].cb = config->stream_cb;
+    my_obj->buf_cb[cb_index].user_data = config->userdata;
+    my_obj->buf_cb[cb_index].cb_count = -1; /* infinite by default */
+    my_obj->buf_cb[cb_index].cb_type = MM_CAMERA_STREAM_CB_TYPE_ASYNC;
+
+    rc = mm_stream_sync_info(my_obj);
+    if (rc == 0) {
+        rc = mm_stream_set_fmt(my_obj);
+        if (rc < 0) {
+            LOGE("mm_stream_set_fmt failed %d",
+                     rc);
+        }
+    }
+
+    my_obj->map_ops.map_ops = mm_stream_map_buf_ops;
+    my_obj->map_ops.bundled_map_ops = mm_stream_bundled_map_buf_ops;
+    my_obj->map_ops.unmap_ops = mm_stream_unmap_buf_ops;
+    my_obj->map_ops.userdata = my_obj;
+
+    if(my_obj->mem_vtbl.set_config_ops != NULL) {
+        my_obj->mem_vtbl.set_config_ops(&my_obj->map_ops,
+                my_obj->mem_vtbl.user_data);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_release
+ *
+ * DESCRIPTION: release a stream resource
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_release(mm_stream_t *my_obj)
+{
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    pthread_mutex_lock(&my_obj->buf_lock);
+    memset(my_obj->buf_status, 0, sizeof(my_obj->buf_status));
+    pthread_mutex_unlock(&my_obj->buf_lock);
+
+    /* close fd */
+    if (my_obj->fd >= 0) {
+#ifndef DAEMON_PRESENT
+        int32_t rc = 0;
+        cam_shim_packet_t *shim_cmd;
+        cam_shim_cmd_data shim_cmd_data;
+        mm_camera_obj_t *cam_obj = my_obj->ch_obj->cam_obj;
+
+        memset(&shim_cmd_data, 0, sizeof(shim_cmd_data));
+        shim_cmd_data.command = MSM_CAMERA_PRIV_DEL_STREAM;
+        shim_cmd_data.stream_id = my_obj->server_stream_id;
+        shim_cmd_data.value = NULL;
+        shim_cmd = mm_camera_create_shim_cmd_packet(CAM_SHIM_SET_PARM,
+                cam_obj->sessionid, &shim_cmd_data);
+        rc = mm_camera_module_send_cmd(shim_cmd);
+        if (rc < 0) {
+            LOGE("failed to DELETE STREAM");
+        }
+        mm_camera_destroy_shim_cmd_packet(shim_cmd);
+#endif /* DAEMON_PRESENT */
+        close(my_obj->fd);
+    }
+
+    /* destroy mutex */
+    pthread_cond_destroy(&my_obj->buf_cond);
+    pthread_mutex_destroy(&my_obj->buf_lock);
+    pthread_mutex_destroy(&my_obj->cb_lock);
+    pthread_mutex_destroy(&my_obj->cmd_lock);
+
+    /* reset stream obj */
+    memset(my_obj, 0, sizeof(mm_stream_t));
+    my_obj->fd = -1;
+
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_streamon
+ *
+ * DESCRIPTION: stream on a stream. sending v4l2 request to kernel
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_streamon(mm_stream_t *my_obj)
+{
+    int32_t rc = 0;
+    int8_t i;
+    enum v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    pthread_mutex_lock(&my_obj->buf_lock);
+    for (i = 0; i < my_obj->buf_num; i++) {
+        if ((my_obj->buf_status[i].map_status == 0) &&
+                (my_obj->buf_status[i].in_kernel)) {
+            LOGD("waiting for mapping to done: strm fd = %d",
+                     my_obj->fd);
+            struct timespec ts;
+            clock_gettime(CLOCK_REALTIME, &ts);
+            ts.tv_sec += WAIT_TIMEOUT;
+            rc = pthread_cond_timedwait(&my_obj->buf_cond, &my_obj->buf_lock, &ts);
+            if (rc == ETIMEDOUT) {
+                LOGE("Timed out. Abort stream-on \n");
+                rc = -1;
+            }
+            break;
+        } else if (my_obj->buf_status[i].map_status < 0) {
+            LOGD("Buffer mapping failed. Abort Stream On");
+            rc = -1;
+            break;
+        }
+    }
+    pthread_mutex_unlock(&my_obj->buf_lock);
+
+    if (rc < 0) {
+        /* remove fd from data poll thread in case of failure */
+        mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0],
+                my_obj->my_hdl, mm_camera_sync_call);
+        return rc;
+    }
+    mm_camera_obj_t *cam_obj = my_obj->ch_obj->cam_obj;
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d session_id:%d stream_id:%d",
+            my_obj->my_hdl, my_obj->fd, my_obj->state, cam_obj->sessionid,
+            my_obj->server_stream_id);
+
+    rc = ioctl(my_obj->fd, VIDIOC_STREAMON, &buf_type);
+    if (rc < 0 && my_obj->stream_info->num_bufs != 0) {
+        LOGE("ioctl VIDIOC_STREAMON failed: rc=%d, errno %d",
+                rc, errno);
+        goto error_case;
+    }
+
+#ifndef DAEMON_PRESENT
+    cam_shim_packet_t *shim_cmd;
+    cam_shim_cmd_data shim_cmd_data;
+
+    memset(&shim_cmd_data, 0, sizeof(shim_cmd_data));
+    shim_cmd_data.command = MSM_CAMERA_PRIV_STREAM_ON;
+    shim_cmd_data.stream_id = my_obj->server_stream_id;
+    shim_cmd_data.value = NULL;
+    shim_cmd = mm_camera_create_shim_cmd_packet(CAM_SHIM_SET_PARM,
+            cam_obj->sessionid, &shim_cmd_data);
+    rc = mm_camera_module_send_cmd(shim_cmd);
+    mm_camera_destroy_shim_cmd_packet(shim_cmd);
+    if (rc < 0) {
+        LOGE("Module StreamON failed: rc=%d", rc);
+        ioctl(my_obj->fd, VIDIOC_STREAMOFF, &buf_type);
+        goto error_case;
+    }
+#endif
+    LOGD("X rc = %d",rc);
+    return rc;
+error_case:
+     /* remove fd from data poll thread in case of failure */
+     mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0],
+             my_obj->my_hdl, mm_camera_sync_call);
+
+    LOGD("X rc = %d",rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_streamoff
+ *
+ * DESCRIPTION: stream off a stream. sending v4l2 request to kernel
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_streamoff(mm_stream_t *my_obj)
+{
+    int32_t rc = 0;
+    enum v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    /* step1: remove fd from data poll thread */
+    rc = mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0],
+            my_obj->my_hdl, mm_camera_sync_call);
+    if (rc < 0) {
+        /* The error might be due to async update. In this case
+         * wait for all updates to complete before proceeding. */
+        rc = mm_camera_poll_thread_commit_updates(&my_obj->ch_obj->poll_thread[0]);
+        if (rc < 0) {
+            LOGE("Poll sync failed %d", rc);
+            rc = 0;
+        }
+    }
+
+#ifndef DAEMON_PRESENT
+    cam_shim_packet_t *shim_cmd;
+    cam_shim_cmd_data shim_cmd_data;
+    mm_camera_obj_t *cam_obj = my_obj->ch_obj->cam_obj;
+
+    memset(&shim_cmd_data, 0, sizeof(shim_cmd_data));
+    shim_cmd_data.command = MSM_CAMERA_PRIV_STREAM_OFF;
+    shim_cmd_data.stream_id = my_obj->server_stream_id;
+    shim_cmd_data.value = NULL;
+    shim_cmd = mm_camera_create_shim_cmd_packet(CAM_SHIM_SET_PARM,
+            cam_obj->sessionid, &shim_cmd_data);
+
+    rc |= mm_camera_module_send_cmd(shim_cmd);
+    mm_camera_destroy_shim_cmd_packet(shim_cmd);
+    if (rc < 0) {
+        LOGE("Module StreamOFF failed: rc=%d", rc)
+    }
+#endif
+
+    /* step2: stream off */
+    rc |= ioctl(my_obj->fd, VIDIOC_STREAMOFF, &buf_type);
+    if (rc < 0) {
+        LOGE("STREAMOFF ioctl failed: %s", strerror(errno));
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_write_user_buf
+ *
+ * DESCRIPTION: dequeue a stream buffer from user buffer queue and fill internal structure
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @buf     : ptr to a struct storing buffer information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_write_user_buf(mm_stream_t * my_obj,
+        mm_camera_buf_def_t *buf)
+{
+    int32_t rc = 0, i;
+    int32_t index = -1, count = 0;
+    struct msm_camera_user_buf_cont_t *cont_buf = NULL;
+
+    if (buf->buf_type == CAM_STREAM_BUF_TYPE_USERPTR) {
+        pthread_mutex_lock(&my_obj->buf_lock);
+        my_obj->buf_status[buf->buf_idx].buf_refcnt--;
+        if (0 == my_obj->buf_status[buf->buf_idx].buf_refcnt) {
+            pthread_mutex_unlock(&my_obj->buf_lock);
+            cont_buf = (struct msm_camera_user_buf_cont_t *)my_obj->buf[buf->buf_idx].buffer;
+            cont_buf->buf_cnt = my_obj->buf[buf->buf_idx].user_buf.bufs_used;
+            for (i = 0; i < (int32_t)cont_buf->buf_cnt; i++) {
+                cont_buf->buf_idx[i] = my_obj->buf[buf->buf_idx].user_buf.buf_idx[i];
+            }
+            rc = mm_stream_qbuf(my_obj, buf);
+            if(rc < 0) {
+                LOGE("mm_camera_stream_qbuf(idx=%d) err=%d\n",
+                            buf->buf_idx, rc);
+            } else {
+                for (i = 0; i < (int32_t)cont_buf->buf_cnt; i++) {
+                    my_obj->buf[buf->buf_idx].user_buf.buf_idx[i] = -1;
+                }
+                my_obj->buf_status[buf->buf_idx].in_kernel = 1;
+                my_obj->buf[buf->buf_idx].user_buf.buf_in_use = 1;
+            }
+        } else {
+            LOGD("<DEBUG> : ref count pending count :%d idx = %d",
+                 my_obj->buf_status[buf->buf_idx].buf_refcnt, buf->buf_idx);
+            pthread_mutex_unlock(&my_obj->buf_lock);
+        }
+        return rc;
+    }
+
+    if ((my_obj->cur_buf_idx < 0)
+            || (my_obj->cur_buf_idx >= my_obj->buf_num)) {
+        for (i = 0; i < my_obj->buf_num; i++) {
+            if ((my_obj->buf_status[i].in_kernel)
+                    || (my_obj->buf[i].user_buf.buf_in_use)) {
+                continue;
+            }
+
+            my_obj->cur_buf_idx = index = i;
+            break;
+        }
+    } else {
+        index = my_obj->cur_buf_idx;
+    }
+
+    if (index == -1) {
+        LOGE("No Free batch buffer");
+        rc = -1;
+        return rc;
+    }
+
+    //Insert Buffer to Batch structure.
+    my_obj->buf[index].user_buf.buf_idx[count] = buf->buf_idx;
+    my_obj->cur_bufs_staged++;
+
+    LOGD("index = %d filled = %d used = %d",
+            index,
+            my_obj->cur_bufs_staged,
+            my_obj->buf[index].user_buf.bufs_used);
+
+    if (my_obj->cur_bufs_staged
+            == my_obj->buf[index].user_buf.bufs_used){
+        pthread_mutex_lock(&my_obj->buf_lock);
+        my_obj->buf_status[index].buf_refcnt--;
+        if (0 == my_obj->buf_status[index].buf_refcnt) {
+            pthread_mutex_unlock(&my_obj->buf_lock);
+            cont_buf = (struct msm_camera_user_buf_cont_t *)my_obj->buf[index].buffer;
+            cont_buf->buf_cnt = my_obj->buf[index].user_buf.bufs_used;
+            for (i = 0; i < (int32_t)cont_buf->buf_cnt; i++) {
+                cont_buf->buf_idx[i] = my_obj->buf[index].user_buf.buf_idx[i];
+            }
+            rc = mm_stream_qbuf(my_obj, &my_obj->buf[index]);
+            if(rc < 0) {
+                LOGE("mm_camera_stream_qbuf(idx=%d) err=%d\n",
+                            index, rc);
+            } else {
+                for (i = 0; i < (int32_t)cont_buf->buf_cnt; i++) {
+                    my_obj->buf[index].user_buf.buf_idx[i] = -1;
+                }
+                my_obj->buf_status[index].in_kernel = 1;
+                my_obj->buf[index].user_buf.buf_in_use = 1;
+                my_obj->cur_bufs_staged = 0;
+                my_obj->cur_buf_idx = -1;
+            }
+        }else{
+            LOGD("<DEBUG> : ref count pending count :%d idx = %d",
+                 my_obj->buf_status[index].buf_refcnt, index);
+            pthread_mutex_unlock(&my_obj->buf_lock);
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_read_user_buf
+ *
+ * DESCRIPTION: dequeue a stream buffer from user buffer queue and fill internal structure
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @buf_info     : ptr to a struct storing buffer information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_read_user_buf(mm_stream_t * my_obj,
+        mm_camera_buf_info_t* buf_info)
+{
+    int32_t rc = 0, i;
+    mm_camera_buf_def_t *stream_buf  = NULL;
+    struct msm_camera_user_buf_cont_t *user_buf = NULL;
+    nsecs_t interval_nsec = 0, frame_ts = 0, timeStamp = 0;
+    int ts_delta = 0;
+    uint32_t frameID = 0;
+
+    user_buf = (struct msm_camera_user_buf_cont_t *)buf_info->buf->buffer;
+
+    if(user_buf != my_obj->buf[buf_info->buf->buf_idx].buffer) {
+        LOGD("Buffer modified. ERROR");
+        rc = -1;
+        return rc;
+    }
+
+    if (buf_info->buf->frame_idx == 1) {
+        frameID = buf_info->buf->frame_idx;
+    }else {
+        frameID = (buf_info->buf->frame_idx - 1) * user_buf->buf_cnt;
+    }
+
+    timeStamp = (nsecs_t)(buf_info->buf->ts.tv_sec) *
+            1000000000LL + buf_info->buf->ts.tv_nsec;
+
+    if (timeStamp <= my_obj->prev_timestamp) {
+        LOGE("TimeStamp received less than expected");
+        mm_stream_qbuf(my_obj, buf_info->buf);
+        return rc;
+    } else if (my_obj->prev_timestamp == 0
+            || (my_obj->prev_frameID != buf_info->buf->frame_idx + 1)) {
+        /* For first frame or incase batch is droped */
+        interval_nsec = ((my_obj->stream_info->user_buf_info.frameInterval) * 1000000);
+        my_obj->prev_timestamp = (timeStamp - (nsecs_t)(user_buf->buf_cnt * interval_nsec));
+    } else {
+        ts_delta = timeStamp - my_obj->prev_timestamp;
+        interval_nsec = (nsecs_t)(ts_delta / user_buf->buf_cnt);
+        LOGD("Timestamp delta = %d timestamp = %lld", ts_delta, timeStamp);
+    }
+
+    for (i = 0; i < (int32_t)user_buf->buf_cnt; i++) {
+        buf_info->buf->user_buf.buf_idx[i] = user_buf->buf_idx[i];
+        stream_buf = &my_obj->plane_buf[user_buf->buf_idx[i]];
+        stream_buf->frame_idx = frameID + i;
+
+        frame_ts  = (i * interval_nsec) + my_obj->prev_timestamp;
+
+        stream_buf->ts.tv_sec  = (frame_ts / 1000000000LL);
+        stream_buf->ts.tv_nsec = (frame_ts - (stream_buf->ts.tv_sec * 1000000000LL));
+        stream_buf->is_uv_subsampled = buf_info->buf->is_uv_subsampled;
+
+        LOGD("buf_index %d, frame_idx %d, stream type %d, timestamp = %lld",
+                 stream_buf->buf_idx, stream_buf->frame_idx,
+                my_obj->stream_info->stream_type, frame_ts);
+    }
+
+    buf_info->buf->ts.tv_sec  = (my_obj->prev_timestamp / 1000000000LL);
+    buf_info->buf->ts.tv_nsec = (my_obj->prev_timestamp -
+            (buf_info->buf->ts.tv_sec * 1000000000LL));
+
+    buf_info->buf->user_buf.bufs_used = user_buf->buf_cnt;
+    buf_info->buf->user_buf.buf_in_use = 1;
+
+    my_obj->prev_timestamp = timeStamp;
+    my_obj->prev_frameID = buf_info->buf->frame_idx;
+
+    LOGD("X rc = %d",rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_read_msm_frame
+ *
+ * DESCRIPTION: dequeue a stream buffer from kernel queue
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @buf_info     : ptr to a struct storing buffer information
+ *   @num_planes   : number of planes in the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_read_msm_frame(mm_stream_t * my_obj,
+                                 mm_camera_buf_info_t* buf_info,
+                                 uint8_t num_planes)
+{
+    int32_t rc = 0;
+    struct v4l2_buffer vb;
+    struct v4l2_plane planes[VIDEO_MAX_PLANES];
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    memset(&vb,  0,  sizeof(vb));
+    vb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    vb.memory = V4L2_MEMORY_USERPTR;
+    vb.m.planes = &planes[0];
+    vb.length = num_planes;
+
+    rc = ioctl(my_obj->fd, VIDIOC_DQBUF, &vb);
+    if (0 > rc) {
+        LOGE("VIDIOC_DQBUF ioctl call failed on stream type %d (rc=%d): %s",
+             my_obj->stream_info->stream_type, rc, strerror(errno));
+    } else {
+        pthread_mutex_lock(&my_obj->buf_lock);
+        my_obj->queued_buffer_count--;
+        if (0 == my_obj->queued_buffer_count) {
+            LOGH("Stoping poll on stream %p type: %d",
+                my_obj, my_obj->stream_info->stream_type);
+            mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0],
+                my_obj->my_hdl, mm_camera_async_call);
+            LOGH("Stopped poll on stream %p type: %d",
+                my_obj, my_obj->stream_info->stream_type);
+        }
+        pthread_mutex_unlock(&my_obj->buf_lock);
+        uint32_t idx = vb.index;
+        buf_info->buf = &my_obj->buf[idx];
+        buf_info->frame_idx = vb.sequence;
+        buf_info->stream_id = my_obj->my_hdl;
+
+        buf_info->buf->stream_id = my_obj->my_hdl;
+        buf_info->buf->buf_idx = idx;
+        buf_info->buf->frame_idx = vb.sequence;
+        buf_info->buf->ts.tv_sec  = vb.timestamp.tv_sec;
+        buf_info->buf->ts.tv_nsec = vb.timestamp.tv_usec * 1000;
+        buf_info->buf->flags = vb.flags;
+
+        LOGH("VIDIOC_DQBUF buf_index %d, frame_idx %d, stream type %d, rc %d,"
+                "queued: %d, buf_type = %d flags = %d",
+             vb.index, buf_info->buf->frame_idx,
+            my_obj->stream_info->stream_type, rc,
+            my_obj->queued_buffer_count, buf_info->buf->buf_type,
+            buf_info->buf->flags);
+
+        buf_info->buf->is_uv_subsampled =
+            (vb.reserved == V4L2_PIX_FMT_NV14 || vb.reserved == V4L2_PIX_FMT_NV41);
+
+        if(buf_info->buf->buf_type == CAM_STREAM_BUF_TYPE_USERPTR) {
+            mm_stream_read_user_buf(my_obj, buf_info);
+        }
+
+        if ( NULL != my_obj->mem_vtbl.clean_invalidate_buf ) {
+            rc = my_obj->mem_vtbl.clean_invalidate_buf(idx,
+                my_obj->mem_vtbl.user_data);
+            if (0 > rc) {
+                LOGE("Clean invalidate cache failed on buffer index: %d",
+                     idx);
+            }
+        } else {
+            LOGE("Clean invalidate cache op not supported");
+        }
+    }
+
+    LOGD("X rc = %d",rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_set_parms
+ *
+ * DESCRIPTION: set parameters per stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @in_value     : ptr to a param struct to be set to server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of parameters to be set
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_stream_set_parm(mm_stream_t *my_obj,
+                           cam_stream_parm_buffer_t *in_value)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+    if (in_value != NULL) {
+      mm_camera_obj_t *cam_obj = my_obj->ch_obj->cam_obj;
+      int stream_id = my_obj->server_stream_id;
+      rc = mm_camera_util_s_ctrl(cam_obj, stream_id, my_obj->fd,
+              CAM_PRIV_STREAM_PARM, &value);
+      if (rc < 0) {
+        LOGE("Failed to set stream parameter type = %d", in_value->type);
+      }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_get_parms
+ *
+ * DESCRIPTION: get parameters per stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @in_value     : ptr to a param struct to be get from server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of parameters to be get
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_stream_get_parm(mm_stream_t *my_obj,
+                           cam_stream_parm_buffer_t *in_value)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+    if (in_value != NULL) {
+        mm_camera_obj_t *cam_obj = my_obj->ch_obj->cam_obj;
+        int stream_id = my_obj->server_stream_id;
+        rc = mm_camera_util_g_ctrl(cam_obj, stream_id, my_obj->fd,
+              CAM_PRIV_STREAM_PARM, &value);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_do_actions
+ *
+ * DESCRIPTION: request server to perform stream based actions
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @in_value     : ptr to a struct of actions to be performed by the server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the action struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of actions to be performed
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_stream_do_action(mm_stream_t *my_obj,
+                            void *in_value)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+    if (in_value != NULL) {
+        mm_camera_obj_t *cam_obj = my_obj->ch_obj->cam_obj;
+        int stream_id = my_obj->server_stream_id;
+        rc = mm_camera_util_s_ctrl(cam_obj, stream_id, my_obj->fd,
+              CAM_PRIV_STREAM_PARM, &value);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_set_ext_mode
+ *
+ * DESCRIPTION: set stream extended mode to server via v4l2 ioctl
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Server will return a server stream id that uniquely identify
+ *              this stream on server side. Later on communication to server
+ *              per stream should use this server stream id.
+ *==========================================================================*/
+int32_t mm_stream_set_ext_mode(mm_stream_t * my_obj)
+{
+    int32_t rc = 0;
+    struct v4l2_streamparm s_parm;
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    memset(&s_parm, 0, sizeof(s_parm));
+    s_parm.type =  V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+
+    rc = ioctl(my_obj->fd, VIDIOC_S_PARM, &s_parm);
+    LOGD("stream fd=%d, rc=%d, extended_mode=%d",
+         my_obj->fd, rc, s_parm.parm.capture.extendedmode);
+
+    if (rc == 0) {
+        my_obj->server_stream_id = s_parm.parm.capture.extendedmode;
+#ifndef DAEMON_PRESENT
+        cam_shim_packet_t *shim_cmd;
+        cam_shim_cmd_data shim_cmd_data;
+        mm_camera_obj_t *cam_obj = my_obj->ch_obj->cam_obj;
+
+        memset(&shim_cmd_data, 0, sizeof(shim_cmd_data));
+        shim_cmd_data.command = MSM_CAMERA_PRIV_NEW_STREAM;
+        shim_cmd_data.stream_id = my_obj->server_stream_id;
+        shim_cmd_data.value = NULL;
+        shim_cmd = mm_camera_create_shim_cmd_packet(CAM_SHIM_SET_PARM,
+                cam_obj->sessionid, &shim_cmd_data);
+        rc = mm_camera_module_send_cmd(shim_cmd);
+        mm_camera_destroy_shim_cmd_packet(shim_cmd);
+#endif /* DAEMON_PRESENT */
+    } else {
+        LOGE("VIDIOC_S_PARM  extendedmode error");
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_qbuf
+ *
+ * DESCRIPTION: enqueue buffer back to kernel queue for furture use
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @buf          : ptr to a struct storing buffer information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_qbuf(mm_stream_t *my_obj, mm_camera_buf_def_t *buf)
+{
+    int32_t rc = 0;
+    uint32_t length = 0;
+    struct v4l2_buffer buffer;
+    struct v4l2_plane planes[VIDEO_MAX_PLANES];
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d, stream type = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state,
+         my_obj->stream_info->stream_type);
+
+    if (buf->buf_type == CAM_STREAM_BUF_TYPE_USERPTR) {
+        LOGD("USERPTR num_buf = %d, idx = %d",
+                buf->user_buf.bufs_used, buf->buf_idx);
+        memset(&planes, 0, sizeof(planes));
+        planes[0].length = my_obj->stream_info->user_buf_info.size;
+        planes[0].m.userptr = buf->fd;
+        length = 1;
+    } else {
+        memcpy(planes, buf->planes_buf.planes, sizeof(planes));
+        length = buf->planes_buf.num_planes;
+    }
+
+    memset(&buffer, 0, sizeof(buffer));
+    buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    buffer.memory = V4L2_MEMORY_USERPTR;
+    buffer.index = (__u32)buf->buf_idx;
+    buffer.m.planes = &planes[0];
+    buffer.length = (__u32)length;
+
+    if ( NULL != my_obj->mem_vtbl.invalidate_buf ) {
+        rc = my_obj->mem_vtbl.invalidate_buf(buffer.index,
+                                             my_obj->mem_vtbl.user_data);
+        if ( 0 > rc ) {
+            LOGE("Cache invalidate failed on buffer index: %d",
+                       buffer.index);
+            return rc;
+        }
+    } else {
+        LOGE("Cache invalidate op not added");
+    }
+
+    pthread_mutex_lock(&my_obj->buf_lock);
+    my_obj->queued_buffer_count++;
+    if (1 == my_obj->queued_buffer_count) {
+        /* Add fd to data poll thread */
+        LOGH("Starting poll on stream %p type: %d",
+            my_obj,my_obj->stream_info->stream_type);
+        rc = mm_camera_poll_thread_add_poll_fd(&my_obj->ch_obj->poll_thread[0],
+            my_obj->my_hdl, my_obj->fd, mm_stream_data_notify, (void*)my_obj,
+            mm_camera_async_call);
+        if (0 > rc) {
+            LOGE("Add poll on stream %p type: %d fd error (rc=%d)",
+                 my_obj, my_obj->stream_info->stream_type, rc);
+        } else {
+            LOGH("Started poll on stream %p type: %d",
+                my_obj, my_obj->stream_info->stream_type);
+        }
+    }
+    pthread_mutex_unlock(&my_obj->buf_lock);
+
+    rc = ioctl(my_obj->fd, VIDIOC_QBUF, &buffer);
+    pthread_mutex_lock(&my_obj->buf_lock);
+    if (0 > rc) {
+        LOGE("VIDIOC_QBUF ioctl call failed on stream type %d (rc=%d): %s",
+             my_obj->stream_info->stream_type, rc, strerror(errno));
+        my_obj->queued_buffer_count--;
+        if (0 == my_obj->queued_buffer_count) {
+            /* Remove fd from data poll in case of failing
+             * first buffer queuing attempt */
+            LOGH("Stoping poll on stream %p type: %d",
+                my_obj, my_obj->stream_info->stream_type);
+            mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0],
+                my_obj->my_hdl, mm_camera_async_call);
+            LOGH("Stopped poll on stream %p type: %d",
+                my_obj, my_obj->stream_info->stream_type);
+        }
+    } else {
+        LOGH("VIDIOC_QBUF buf_index %d, frame_idx %d stream type %d, rc %d,"
+                " queued: %d, buf_type = %d",
+                 buffer.index, buf->frame_idx, my_obj->stream_info->stream_type, rc,
+                my_obj->queued_buffer_count, buf->buf_type);
+    }
+    pthread_mutex_unlock(&my_obj->buf_lock);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_request_buf
+ *
+ * DESCRIPTION: This function let kernel know the amount of buffers need to
+ *              be registered via v4l2 ioctl.
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_request_buf(mm_stream_t * my_obj)
+{
+    int32_t rc = 0;
+    struct v4l2_requestbuffers bufreq;
+    uint8_t buf_num = my_obj->buf_num;
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    LOGD("buf_num = %d, stream type = %d",
+          buf_num, my_obj->stream_info->stream_type);
+
+    if(buf_num > MM_CAMERA_MAX_NUM_FRAMES) {
+        LOGE("buf num %d > max limit %d\n",
+                    buf_num, MM_CAMERA_MAX_NUM_FRAMES);
+        return -1;
+    }
+
+    memset(&bufreq, 0, sizeof(bufreq));
+    bufreq.count = buf_num;
+    bufreq.type  = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    bufreq.memory = V4L2_MEMORY_USERPTR;
+    rc = ioctl(my_obj->fd, VIDIOC_REQBUFS, &bufreq);
+    if (rc < 0) {
+      LOGE("fd=%d, ioctl VIDIOC_REQBUFS failed: rc=%d, errno %d",
+            my_obj->fd, rc, errno);
+    }
+
+    LOGD("X rc = %d",rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_need_wait_for_mapping
+ *
+ * DESCRIPTION: Utility function to determine whether to wait for mapping
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *
+ * RETURN     : int8_t whether wait is necessary
+ *              0  -- no wait
+ *              1 -- wait
+ *==========================================================================*/
+int8_t mm_stream_need_wait_for_mapping(mm_stream_t * my_obj)
+{
+    uint32_t i;
+    int8_t ret = 0;
+
+    for (i = 0; i < my_obj->buf_num; i++) {
+        if ((my_obj->buf_status[i].map_status == 0)
+                && (my_obj->buf_status[i].in_kernel)) {
+            /*do not signal in case if any buffer is not mapped
+              but queued to kernel.*/
+            ret = 1;
+        } else if (my_obj->buf_status[i].map_status < 0) {
+            return 0;
+        }
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_map_buf
+ *
+ * DESCRIPTION: mapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @buf_type     : type of buffer to be mapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @frame_idx    : index of buffer within the stream buffers, only valid if
+ *                   buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @plane_idx    : plane index. If all planes share the same fd,
+ *                   plane_idx = -1; otherwise, plean_idx is the
+ *                   index to plane (0..num_of_planes)
+ *   @fd           : file descriptor of the buffer
+ *   @size         : size of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_map_buf(mm_stream_t * my_obj,
+        uint8_t buf_type, uint32_t frame_idx,
+        int32_t plane_idx, int32_t fd,
+        size_t size, void *buffer)
+{
+    int32_t rc = 0;
+    if (NULL == my_obj || NULL == my_obj->ch_obj || NULL == my_obj->ch_obj->cam_obj) {
+        LOGE("NULL obj of stream/channel/camera");
+        return -1;
+    }
+
+    cam_sock_packet_t packet;
+    memset(&packet, 0, sizeof(cam_sock_packet_t));
+    packet.msg_type = CAM_MAPPING_TYPE_FD_MAPPING;
+    packet.payload.buf_map.type = buf_type;
+    packet.payload.buf_map.fd = fd;
+    packet.payload.buf_map.size = size;
+    packet.payload.buf_map.stream_id = my_obj->server_stream_id;
+    packet.payload.buf_map.frame_idx = frame_idx;
+    packet.payload.buf_map.plane_idx = plane_idx;
+    packet.payload.buf_map.buffer = buffer;
+    LOGD("mapping buf_type %d, stream_id %d, frame_idx %d, fd %d, size %d",
+             buf_type, my_obj->server_stream_id, frame_idx, fd, size);
+
+#ifdef DAEMON_PRESENT
+    rc = mm_camera_util_sendmsg(my_obj->ch_obj->cam_obj,
+                                &packet, sizeof(cam_sock_packet_t), fd);
+#else
+    cam_shim_packet_t *shim_cmd;
+    shim_cmd = mm_camera_create_shim_cmd_packet(CAM_SHIM_REG_BUF,
+            my_obj->ch_obj->cam_obj->sessionid, &packet);
+    rc = mm_camera_module_send_cmd(shim_cmd);
+    mm_camera_destroy_shim_cmd_packet(shim_cmd);
+#endif
+    if ((buf_type == CAM_MAPPING_BUF_TYPE_STREAM_BUF)
+            || ((buf_type
+            == CAM_MAPPING_BUF_TYPE_STREAM_USER_BUF)
+            && (my_obj->stream_info != NULL)
+            && (my_obj->stream_info->streaming_mode
+            == CAM_STREAMING_MODE_BATCH))) {
+        pthread_mutex_lock(&my_obj->buf_lock);
+        if (rc < 0) {
+            my_obj->buf_status[frame_idx].map_status = -1;
+            LOGE("fail status =%d", my_obj->buf_status[frame_idx].map_status);
+        } else {
+            my_obj->buf_status[frame_idx].map_status = 1;
+        }
+        if (mm_stream_need_wait_for_mapping(my_obj) == 0) {
+            LOGD("Buffer mapping Done: Signal strm fd = %d",
+                     my_obj->fd);
+            pthread_cond_signal(&my_obj->buf_cond);
+        }
+        pthread_mutex_unlock(&my_obj->buf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_map_bufs
+ *
+ * DESCRIPTION: mapping stream buffers via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @buf_map_list : list of buffer objects to map
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+
+int32_t mm_stream_map_bufs(mm_stream_t * my_obj,
+                           const cam_buf_map_type_list *buf_map_list)
+{
+    if (NULL == my_obj || NULL == my_obj->ch_obj || NULL == my_obj->ch_obj->cam_obj) {
+        LOGE("NULL obj of stream/channel/camera");
+        return -1;
+    }
+
+    cam_sock_packet_t packet;
+    memset(&packet, 0, sizeof(cam_sock_packet_t));
+    packet.msg_type = CAM_MAPPING_TYPE_FD_BUNDLED_MAPPING;
+
+    memcpy(&packet.payload.buf_map_list, buf_map_list,
+           sizeof(packet.payload.buf_map_list));
+
+    int sendfds[CAM_MAX_NUM_BUFS_PER_STREAM];
+    uint32_t numbufs = packet.payload.buf_map_list.length;
+    if (numbufs < 1) {
+      LOGD("No buffers, suppressing the mapping command");
+      return 0;
+    }
+
+    uint32_t i;
+    for (i = 0; i < numbufs; i++) {
+        packet.payload.buf_map_list.buf_maps[i].stream_id = my_obj->server_stream_id;
+        sendfds[i] = packet.payload.buf_map_list.buf_maps[i].fd;
+    }
+
+    for (i = numbufs; i < CAM_MAX_NUM_BUFS_PER_STREAM; i++) {
+        packet.payload.buf_map_list.buf_maps[i].fd = -1;
+        sendfds[i] = -1;
+    }
+
+#ifdef DAEMON_PRESENT
+    int32_t ret = mm_camera_util_bundled_sendmsg(my_obj->ch_obj->cam_obj,
+            &packet, sizeof(cam_sock_packet_t), sendfds, numbufs);
+#else
+    cam_shim_packet_t *shim_cmd;
+    shim_cmd = mm_camera_create_shim_cmd_packet(CAM_SHIM_REG_BUF,
+            my_obj->ch_obj->cam_obj->sessionid, &packet);
+    int32_t ret = mm_camera_module_send_cmd(shim_cmd);
+    mm_camera_destroy_shim_cmd_packet(shim_cmd);
+#endif
+    if ((numbufs > 0) && ((buf_map_list->buf_maps[0].type
+            == CAM_MAPPING_BUF_TYPE_STREAM_BUF)
+            || ((buf_map_list->buf_maps[0].type ==
+            CAM_MAPPING_BUF_TYPE_STREAM_USER_BUF)
+            && (my_obj->stream_info != NULL)
+            && (my_obj->stream_info->streaming_mode
+            == CAM_STREAMING_MODE_BATCH)))) {
+        pthread_mutex_lock(&my_obj->buf_lock);
+        for (i = 0; i < numbufs; i++) {
+           if (ret < 0) {
+               my_obj->buf_status[i].map_status = -1;
+           } else {
+               my_obj->buf_status[i].map_status = 1;
+           }
+        }
+
+        if (mm_stream_need_wait_for_mapping(my_obj) == 0) {
+            LOGD("Buffer mapping Done: Signal strm fd = %d",
+                     my_obj->fd);
+            pthread_cond_signal(&my_obj->buf_cond);
+        }
+        pthread_mutex_unlock(&my_obj->buf_lock);
+    }
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_unmap_buf
+ *
+ * DESCRIPTION: unmapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @buf_type     : type of buffer to be unmapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @frame_idx    : index of buffer within the stream buffers, only valid if
+ *                   buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @plane_idx    : plane index. If all planes share the same fd,
+ *                   plane_idx = -1; otherwise, plean_idx is the
+ *                   index to plane (0..num_of_planes)
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_unmap_buf(mm_stream_t * my_obj,
+                            uint8_t buf_type,
+                            uint32_t frame_idx,
+                            int32_t plane_idx)
+{
+    int32_t ret;
+    if (NULL == my_obj || NULL == my_obj->ch_obj || NULL == my_obj->ch_obj->cam_obj) {
+        LOGE("NULL obj of stream/channel/camera");
+        return -1;
+    }
+    cam_sock_packet_t packet;
+    memset(&packet, 0, sizeof(cam_sock_packet_t));
+    packet.msg_type = CAM_MAPPING_TYPE_FD_UNMAPPING;
+    packet.payload.buf_unmap.type = buf_type;
+    packet.payload.buf_unmap.stream_id = my_obj->server_stream_id;
+    packet.payload.buf_unmap.frame_idx = frame_idx;
+    packet.payload.buf_unmap.plane_idx = plane_idx;
+#ifdef DAEMON_PRESENT
+    ret = mm_camera_util_sendmsg(my_obj->ch_obj->cam_obj,
+            &packet, sizeof(cam_sock_packet_t), -1);
+#else
+    cam_shim_packet_t *shim_cmd;
+    shim_cmd = mm_camera_create_shim_cmd_packet(CAM_SHIM_REG_BUF,
+            my_obj->ch_obj->cam_obj->sessionid, &packet);
+    ret = mm_camera_module_send_cmd(shim_cmd);
+    mm_camera_destroy_shim_cmd_packet(shim_cmd);
+#endif
+    pthread_mutex_lock(&my_obj->buf_lock);
+    my_obj->buf_status[frame_idx].map_status = 0;
+    pthread_mutex_unlock(&my_obj->buf_lock);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_init_bufs
+ *
+ * DESCRIPTION: initialize stream buffers needed. This function will request
+ *              buffers needed from upper layer through the mem ops table passed
+ *              during configuration stage.
+ *
+ * PARAMETERS :
+ *   @my_obj  : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_init_bufs(mm_stream_t * my_obj)
+{
+    int32_t i, rc = 0;
+    uint8_t *reg_flags = NULL;
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    /* deinit buf if it's not NULL*/
+    if (NULL != my_obj->buf) {
+        mm_stream_deinit_bufs(my_obj);
+    }
+
+    rc = my_obj->mem_vtbl.get_bufs(&my_obj->frame_offset,
+                                   &my_obj->buf_num,
+                                   &reg_flags,
+                                   &my_obj->buf,
+                                   &my_obj->map_ops,
+                                   my_obj->mem_vtbl.user_data);
+
+    if (0 != rc) {
+        LOGE("Error get buf, rc = %d\n", rc);
+        return rc;
+    }
+
+    for (i = 0; i < my_obj->buf_num; i++) {
+        my_obj->buf_status[i].initial_reg_flag = reg_flags[i];
+        my_obj->buf[i].stream_id = my_obj->my_hdl;
+        my_obj->buf[i].stream_type = my_obj->stream_info->stream_type;
+
+        if (my_obj->buf[i].buf_type == CAM_STREAM_BUF_TYPE_USERPTR) {
+            my_obj->buf[i].user_buf.bufs_used =
+                    (int8_t)my_obj->stream_info->user_buf_info.frame_buf_cnt;
+            my_obj->buf[i].user_buf.buf_in_use = reg_flags[i];
+        }
+    }
+
+    if (my_obj->stream_info->streaming_mode == CAM_STREAMING_MODE_BATCH) {
+        my_obj->plane_buf = my_obj->buf[0].user_buf.plane_buf;
+        if (my_obj->plane_buf != NULL) {
+            my_obj->plane_buf_num =
+                    my_obj->buf_num *
+                    my_obj->stream_info->user_buf_info.frame_buf_cnt;
+            for (i = 0; i < my_obj->plane_buf_num; i++) {
+                my_obj->plane_buf[i].stream_id = my_obj->my_hdl;
+                my_obj->plane_buf[i].stream_type = my_obj->stream_info->stream_type;
+            }
+        }
+        my_obj->cur_bufs_staged = 0;
+        my_obj->cur_buf_idx = -1;
+    }
+
+    free(reg_flags);
+    reg_flags = NULL;
+
+    /* update in stream info about number of stream buffers */
+    my_obj->stream_info->num_bufs = my_obj->buf_num;
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_deinit_bufs
+ *
+ * DESCRIPTION: return stream buffers to upper layer through the mem ops table
+ *              passed during configuration stage.
+ *
+ * PARAMETERS :
+ *   @my_obj  : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_deinit_bufs(mm_stream_t * my_obj)
+{
+    int32_t rc = 0;
+
+    mm_camera_map_unmap_ops_tbl_t ops_tbl;
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    if (NULL == my_obj->buf) {
+        LOGD("Buf is NULL, no need to deinit");
+        return rc;
+    }
+
+    /* release bufs */
+    ops_tbl.map_ops = mm_stream_map_buf_ops;
+    ops_tbl.bundled_map_ops = mm_stream_bundled_map_buf_ops;
+    ops_tbl.unmap_ops = mm_stream_unmap_buf_ops;
+    ops_tbl.userdata = my_obj;
+
+    rc = my_obj->mem_vtbl.put_bufs(&ops_tbl,
+                                   my_obj->mem_vtbl.user_data);
+
+    if (my_obj->plane_buf != NULL) {
+        free(my_obj->plane_buf);
+        my_obj->plane_buf = NULL;
+    }
+
+    free(my_obj->buf);
+    my_obj->buf = NULL;
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_reg_buf
+ *
+ * DESCRIPTION: register buffers with kernel by calling v4l2 ioctl QBUF for
+ *              each buffer in the stream
+ *
+ * PARAMETERS :
+ *   @my_obj  : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_reg_buf(mm_stream_t * my_obj)
+{
+    int32_t rc = 0;
+    uint8_t i;
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    rc = mm_stream_request_buf(my_obj);
+    if (rc != 0) {
+        return rc;
+    }
+
+    my_obj->queued_buffer_count = 0;
+    for(i = 0; i < my_obj->buf_num; i++){
+        /* check if need to qbuf initially */
+        if (my_obj->buf_status[i].initial_reg_flag) {
+            rc = mm_stream_qbuf(my_obj, &my_obj->buf[i]);
+            if (rc != 0) {
+                LOGE("VIDIOC_QBUF rc = %d\n", rc);
+                break;
+            }
+            my_obj->buf_status[i].buf_refcnt = 0;
+            my_obj->buf_status[i].in_kernel = 1;
+        } else {
+            /* the buf is held by upper layer, will not queue into kernel.
+             * add buf reference count */
+            my_obj->buf_status[i].buf_refcnt = 1;
+            my_obj->buf_status[i].in_kernel = 0;
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_unreg buf
+ *
+ * DESCRIPTION: unregister all stream buffers from kernel
+ *
+ * PARAMETERS :
+ *   @my_obj  : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_unreg_buf(mm_stream_t * my_obj)
+{
+    struct v4l2_requestbuffers bufreq;
+    int32_t i, rc = 0;
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    /* unreg buf to kernel */
+    bufreq.count = 0;
+    bufreq.type  = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    bufreq.memory = V4L2_MEMORY_USERPTR;
+    rc = ioctl(my_obj->fd, VIDIOC_REQBUFS, &bufreq);
+    if (rc < 0) {
+        LOGE("fd=%d, VIDIOC_REQBUFS failed, rc=%d, errno %d",
+               my_obj->fd, rc, errno);
+    }
+
+    /* reset buf reference count */
+    pthread_mutex_lock(&my_obj->buf_lock);
+    for(i = 0; i < my_obj->buf_num; i++){
+        my_obj->buf_status[i].buf_refcnt = 0;
+        my_obj->buf_status[i].in_kernel = 0;
+    }
+    pthread_mutex_unlock(&my_obj->buf_lock);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_get_v4l2_fmt
+ *
+ * DESCRIPTION: translate camera image format into FOURCC code
+ *
+ * PARAMETERS :
+ *   @fmt     : camera image format
+ *
+ * RETURN     : FOURCC code for image format
+ *==========================================================================*/
+uint32_t mm_stream_get_v4l2_fmt(cam_format_t fmt)
+{
+    uint32_t val = 0;
+    switch(fmt) {
+    case CAM_FORMAT_YUV_420_NV12:
+    case CAM_FORMAT_YUV_420_NV12_VENUS:
+    case CAM_FORMAT_YUV_420_NV12_UBWC:
+        val = V4L2_PIX_FMT_NV12;
+        break;
+    case CAM_FORMAT_YUV_420_NV21:
+    case CAM_FORMAT_YUV_420_NV21_VENUS:
+        val = V4L2_PIX_FMT_NV21;
+        break;
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG:
+        val= V4L2_PIX_FMT_SGBRG10;
+        break;
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GRBG:
+        val= V4L2_PIX_FMT_SGRBG10;
+        break;
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_RGGB:
+        val= V4L2_PIX_FMT_SRGGB10;
+        break;
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_BGGR:
+        val= V4L2_PIX_FMT_SBGGR10;
+        break;
+    case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG:
+        val= V4L2_PIX_FMT_SGBRG12;
+        break;
+    case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GRBG:
+        val= V4L2_PIX_FMT_SGRBG12;
+        break;
+    case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_RGGB:
+        val= V4L2_PIX_FMT_SRGGB12;
+        break;
+    case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_BGGR:
+        val = V4L2_PIX_FMT_SBGGR12;
+        break;
+    case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GBRG:
+        val= V4L2_PIX_FMT_SGBRG14;
+        break;
+    case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GRBG:
+        val= V4L2_PIX_FMT_SGRBG14;
+        break;
+    case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_RGGB:
+        val= V4L2_PIX_FMT_SRGGB14;
+        break;
+    case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_BGGR:
+        val = V4L2_PIX_FMT_SBGGR14;
+        break;
+    case CAM_FORMAT_YUV_422_NV61:
+        val= V4L2_PIX_FMT_NV61;
+        break;
+    case CAM_FORMAT_YUV_RAW_8BIT_YUYV:
+        val= V4L2_PIX_FMT_YUYV;
+        break;
+    case CAM_FORMAT_YUV_RAW_8BIT_YVYU:
+        val= V4L2_PIX_FMT_YVYU;
+        break;
+    case CAM_FORMAT_YUV_RAW_8BIT_UYVY:
+        val= V4L2_PIX_FMT_UYVY;
+        break;
+    case CAM_FORMAT_YUV_RAW_8BIT_VYUY:
+        val= V4L2_PIX_FMT_VYUY;
+        break;
+    case CAM_FORMAT_YUV_420_YV12:
+        val= V4L2_PIX_FMT_NV12;
+        break;
+    case CAM_FORMAT_YUV_422_NV16:
+        val= V4L2_PIX_FMT_NV16;
+        break;
+    case CAM_FORMAT_Y_ONLY:
+        val= V4L2_PIX_FMT_GREY;
+        break;
+    case CAM_FORMAT_Y_ONLY_10_BPP:
+        val= V4L2_PIX_FMT_Y10;
+        break;
+    case CAM_FORMAT_Y_ONLY_12_BPP:
+        val= V4L2_PIX_FMT_Y12;
+        break;
+    case CAM_FORMAT_Y_ONLY_14_BPP:
+        /* No v4l2 format is defined yet for CAM_FORMAT_Y_ONLY_14_BPP */
+        /* val= V4L2_PIX_FMT_Y14; */
+        val = 0;
+        LOGE("Unknown fmt=%d", fmt);
+        break;
+    case CAM_FORMAT_MAX:
+        /* CAM_STREAM_TYPE_DEFAULT,
+         * CAM_STREAM_TYPE_OFFLINE_PROC,
+         * and CAM_STREAM_TYPE_METADATA
+         * set fmt to CAM_FORMAT_MAX*/
+        val = 0;
+        break;
+    default:
+        val = 0;
+        LOGE("Unknown fmt=%d", fmt);
+        break;
+    }
+    LOGD("fmt=%d, val =%d", fmt, val);
+    return val;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_calc_offset_preview
+ *
+ * DESCRIPTION: calculate preview frame offset based on format and
+ *              padding information
+ *
+ * PARAMETERS :
+ *   @fmt     : image format
+ *   @dim     : image dimension
+ *   @buf_planes : [out] buffer plane information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_preview(cam_stream_info_t *stream_info,
+                                      cam_dimension_t *dim,
+                                      cam_padding_info_t *padding,
+                                      cam_stream_buf_plane_info_t *buf_planes)
+{
+    int32_t rc = 0;
+    int stride = 0, scanline = 0;
+
+    uint32_t width_padding = 0;
+    uint32_t height_padding = 0;
+
+    switch (stream_info->fmt) {
+    case CAM_FORMAT_YUV_420_NV12:
+    case CAM_FORMAT_YUV_420_NV21:
+    case CAM_FORMAT_Y_ONLY:
+    case CAM_FORMAT_Y_ONLY_10_BPP:
+    case CAM_FORMAT_Y_ONLY_12_BPP:
+    case CAM_FORMAT_Y_ONLY_14_BPP:
+        /* 2 planes: Y + CbCr */
+        buf_planes->plane_info.num_planes = 2;
+
+        if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+            width_padding =  padding->width_padding;
+            height_padding = CAM_PAD_TO_2;
+        } else {
+            width_padding =  padding->width_padding;
+            height_padding = padding->height_padding;
+        }
+
+        stride = PAD_TO_SIZE(dim->width, width_padding);
+        scanline = PAD_TO_SIZE(dim->height, height_padding);
+
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[0].offset_x = 0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        stride = PAD_TO_SIZE(dim->width, width_padding);
+        scanline = PAD_TO_SIZE(dim->height / 2, height_padding);
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].len =
+            (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[1].offset_x = 0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_420_NV21_ADRENO:
+        /* 2 planes: Y + CbCr */
+        buf_planes->plane_info.num_planes = 2;
+
+        if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+            stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_32);
+            scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_32);
+        } else {
+            stride = PAD_TO_SIZE(dim->width, padding->width_padding);
+            scanline = PAD_TO_SIZE(dim->height, padding->height_padding);
+        }
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline), CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x = 0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        stride = PAD_TO_SIZE(dim->width / 2, CAM_PAD_TO_32) * 2;
+        scanline = PAD_TO_SIZE(dim->height / 2, CAM_PAD_TO_32);
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline), CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[1].offset_x = 0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_420_YV12:
+        /* 3 planes: Y + Cr + Cb */
+        buf_planes->plane_info.num_planes = 3;
+
+        if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+            stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+            scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_2);
+        } else {
+            stride = PAD_TO_SIZE(dim->width, padding->width_padding);
+            scanline = PAD_TO_SIZE(dim->height, padding->height_padding);
+        }
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[0].offset_x = 0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        stride = PAD_TO_SIZE(stride / 2, CAM_PAD_TO_16);
+        scanline = scanline / 2;
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].len =
+            (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[1].offset_x = 0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width / 2;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+        buf_planes->plane_info.mp[2].offset = 0;
+        buf_planes->plane_info.mp[2].len =
+            (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[2].offset_x = 0;
+        buf_planes->plane_info.mp[2].offset_y = 0;
+        buf_planes->plane_info.mp[2].stride = stride;
+        buf_planes->plane_info.mp[2].scanline = scanline;
+        buf_planes->plane_info.mp[2].width = dim->width / 2;
+        buf_planes->plane_info.mp[2].height = dim->height / 2;
+
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len +
+                        buf_planes->plane_info.mp[2].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_422_NV16:
+    case CAM_FORMAT_YUV_422_NV61:
+        /* 2 planes: Y + CbCr */
+        buf_planes->plane_info.num_planes = 2;
+
+        if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+            stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+            scanline = dim->height;
+        } else {
+            stride = PAD_TO_SIZE(dim->width, padding->width_padding);
+            scanline = PAD_TO_SIZE(dim->height, padding->height_padding);
+        }
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[0].offset_x = 0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[1].offset_x = 0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height;
+
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_420_NV12_VENUS:
+#ifdef VENUS_PRESENT
+        // using Venus
+        if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+            stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, dim->width);
+            scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12, dim->height);
+        } else {
+            stride = PAD_TO_SIZE(dim->width, padding->width_padding);
+            scanline = PAD_TO_SIZE(dim->height, padding->height_padding);
+        }
+        buf_planes->plane_info.frame_len =
+                VENUS_BUFFER_SIZE(COLOR_FMT_NV12, stride, scanline);
+        buf_planes->plane_info.num_planes = 2;
+        buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+        if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+            stride = VENUS_UV_STRIDE(COLOR_FMT_NV12, dim->width);
+            scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12, dim->height);
+        } else {
+            stride = PAD_TO_SIZE(dim->width, padding->width_padding);
+            scanline = PAD_TO_SIZE(dim->height, padding->height_padding);
+        }
+        buf_planes->plane_info.mp[1].len =
+                buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].offset_x =0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+        LOGE("Venus hardware not avail, cannot use this format");
+        rc = -1;
+#endif
+        break;
+    case CAM_FORMAT_YUV_420_NV21_VENUS:
+#ifdef VENUS_PRESENT
+        // using Venus
+        if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+            stride = VENUS_Y_STRIDE(COLOR_FMT_NV21, dim->width);
+            scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV21, dim->height);
+        } else {
+            stride = PAD_TO_SIZE(dim->width, padding->width_padding);
+            scanline = PAD_TO_SIZE(dim->height, padding->height_padding);
+        }
+        buf_planes->plane_info.frame_len =
+                VENUS_BUFFER_SIZE(COLOR_FMT_NV21, stride, scanline);
+        buf_planes->plane_info.num_planes = 2;
+        buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+        if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+            stride = VENUS_UV_STRIDE(COLOR_FMT_NV21, dim->width);
+            scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV21, dim->height);
+        } else {
+            stride = PAD_TO_SIZE(dim->width, padding->width_padding);
+            scanline = PAD_TO_SIZE(dim->height, padding->height_padding);
+        }
+        buf_planes->plane_info.mp[1].len =
+                buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].offset_x =0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+        LOGE("Venus hardware not avail, cannot use this format");
+        rc = -1;
+#endif
+        break;
+    case CAM_FORMAT_YUV_420_NV12_UBWC:
+#ifdef UBWC_PRESENT
+        {
+            int meta_stride = 0,meta_scanline = 0;
+            // using UBWC
+            if (stream_info->stream_type != CAM_STREAM_TYPE_OFFLINE_PROC) {
+                stride = VENUS_Y_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+                scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+            } else {
+                stride = PAD_TO_SIZE(dim->width, padding->width_padding);
+                scanline = PAD_TO_SIZE(dim->height, padding->height_padding);
+            }
+            meta_stride = VENUS_Y_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+            meta_scanline = VENUS_Y_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+
+            buf_planes->plane_info.frame_len =
+                    VENUS_BUFFER_SIZE(COLOR_FMT_NV12_UBWC, stride, scanline);
+            buf_planes->plane_info.num_planes = 2;
+            buf_planes->plane_info.mp[0].offset = 0;
+            buf_planes->plane_info.mp[0].offset_x =0;
+            buf_planes->plane_info.mp[0].offset_y = 0;
+            buf_planes->plane_info.mp[0].stride = stride;
+            buf_planes->plane_info.mp[0].scanline = scanline;
+            buf_planes->plane_info.mp[0].width = dim->width;
+            buf_planes->plane_info.mp[0].height = dim->height;
+            buf_planes->plane_info.mp[0].meta_stride = meta_stride;
+            buf_planes->plane_info.mp[0].meta_scanline = meta_scanline;
+            buf_planes->plane_info.mp[0].meta_len =
+                    MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+            buf_planes->plane_info.mp[0].len =
+                    (uint32_t)(MSM_MEDIA_ALIGN((stride * scanline), 4096) +
+                    (buf_planes->plane_info.mp[0].meta_len));
+
+            stride = VENUS_UV_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+            scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+            meta_stride = VENUS_UV_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+            meta_scanline = VENUS_UV_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+            buf_planes->plane_info.mp[1].offset = 0;
+            buf_planes->plane_info.mp[1].offset_x =0;
+            buf_planes->plane_info.mp[1].offset_y = 0;
+            buf_planes->plane_info.mp[1].stride = stride;
+            buf_planes->plane_info.mp[1].scanline = scanline;
+            buf_planes->plane_info.mp[1].width = dim->width;
+            buf_planes->plane_info.mp[1].height = dim->height/2;
+            buf_planes->plane_info.mp[1].meta_stride = meta_stride;
+            buf_planes->plane_info.mp[1].meta_scanline = meta_scanline;
+            buf_planes->plane_info.mp[1].meta_len =
+                    MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+            buf_planes->plane_info.mp[1].len =
+                    buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+        }
+#else
+        LOGE("UBWC hardware not avail, cannot use this format");
+        rc = -1;
+#endif
+        break;
+
+    default:
+        LOGE("Invalid cam_format for preview %d",
+                    stream_info->fmt);
+        rc = -1;
+        break;
+    }
+
+    return rc;
+}
+/*===========================================================================
+ * FUNCTION   : mm_stream_calc_offset_post_view
+ *
+ * DESCRIPTION: calculate postview frame offset based on format and
+ *              padding information
+ *
+ * PARAMETERS :
+ *   @fmt     : image format
+ *   @dim     : image dimension
+ *   @buf_planes : [out] buffer plane information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_post_view(cam_format_t fmt,
+                                      cam_dimension_t *dim,
+                                      cam_stream_buf_plane_info_t *buf_planes)
+{
+    int32_t rc = 0;
+    int stride = 0, scanline = 0;
+
+    switch (fmt) {
+    case CAM_FORMAT_YUV_420_NV12:
+    case CAM_FORMAT_YUV_420_NV21:
+    case CAM_FORMAT_Y_ONLY:
+    case CAM_FORMAT_Y_ONLY_10_BPP:
+    case CAM_FORMAT_Y_ONLY_12_BPP:
+    case CAM_FORMAT_Y_ONLY_14_BPP:
+        /* 2 planes: Y + CbCr */
+        buf_planes->plane_info.num_planes = 2;
+
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_64);
+        scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_64);
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[0].offset_x = 0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_64);
+        scanline = PAD_TO_SIZE(dim->height / 2, CAM_PAD_TO_64);
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].len =
+            (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[1].offset_x = 0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_420_NV21_ADRENO:
+        /* 2 planes: Y + CbCr */
+        buf_planes->plane_info.num_planes = 2;
+
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_32);
+        scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_32);
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline), CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x = 0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        stride = PAD_TO_SIZE(dim->width / 2, CAM_PAD_TO_32) * 2;
+        scanline = PAD_TO_SIZE(dim->height / 2, CAM_PAD_TO_32);
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline), CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[1].offset_x = 0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_420_YV12:
+        /* 3 planes: Y + Cr + Cb */
+        buf_planes->plane_info.num_planes = 3;
+
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+        scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_2);
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[0].offset_x = 0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        stride = PAD_TO_SIZE(stride / 2, CAM_PAD_TO_16);
+        scanline = scanline / 2;
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].len =
+            (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[1].offset_x = 0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width / 2;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+        buf_planes->plane_info.mp[2].offset = 0;
+        buf_planes->plane_info.mp[2].len =
+            (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[2].offset_x = 0;
+        buf_planes->plane_info.mp[2].offset_y = 0;
+        buf_planes->plane_info.mp[2].stride = stride;
+        buf_planes->plane_info.mp[2].scanline = scanline;
+        buf_planes->plane_info.mp[2].width = dim->width / 2;
+        buf_planes->plane_info.mp[2].height = dim->height / 2;
+
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len +
+                        buf_planes->plane_info.mp[2].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_422_NV16:
+    case CAM_FORMAT_YUV_422_NV61:
+        /* 2 planes: Y + CbCr */
+        buf_planes->plane_info.num_planes = 2;
+
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+        scanline = dim->height;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[0].offset_x = 0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[1].offset_x = 0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height;
+
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_420_NV12_VENUS:
+#ifdef VENUS_PRESENT
+        // using Venus
+        stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, dim->width);
+        scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12, dim->height);
+
+        buf_planes->plane_info.frame_len =
+            VENUS_BUFFER_SIZE(COLOR_FMT_NV12, dim->width, dim->height);
+        buf_planes->plane_info.num_planes = 2;
+        buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+        stride = VENUS_UV_STRIDE(COLOR_FMT_NV12, dim->width);
+        scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12, dim->height);
+        buf_planes->plane_info.mp[1].len =
+            buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].offset_x =0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+        LOGE("Venus hardware not avail, cannot use this format");
+        rc = -1;
+#endif
+        break;
+    case CAM_FORMAT_YUV_420_NV21_VENUS:
+#ifdef VENUS_PRESENT
+        // using Venus
+        stride = VENUS_Y_STRIDE(COLOR_FMT_NV21, dim->width);
+        scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV21, dim->height);
+        buf_planes->plane_info.frame_len =
+                VENUS_BUFFER_SIZE(COLOR_FMT_NV21, dim->width, dim->height);
+        buf_planes->plane_info.num_planes = 2;
+        buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+        stride = VENUS_UV_STRIDE(COLOR_FMT_NV21, dim->width);
+        scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV21, dim->height);
+        buf_planes->plane_info.mp[1].len =
+                buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].offset_x =0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+        LOGE("Venus hardware not avail, cannot use this format");
+        rc = -1;
+#endif
+        break;
+    case CAM_FORMAT_YUV_420_NV12_UBWC:
+#ifdef UBWC_PRESENT
+        {
+            int meta_stride = 0,meta_scanline = 0;
+            // using UBWC
+            stride = VENUS_Y_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+            scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+            meta_stride = VENUS_Y_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+            meta_scanline = VENUS_Y_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+
+            buf_planes->plane_info.frame_len =
+                    VENUS_BUFFER_SIZE(COLOR_FMT_NV12_UBWC, dim->width, dim->height);
+            buf_planes->plane_info.num_planes = 2;
+            buf_planes->plane_info.mp[0].offset = 0;
+            buf_planes->plane_info.mp[0].offset_x =0;
+            buf_planes->plane_info.mp[0].offset_y = 0;
+            buf_planes->plane_info.mp[0].stride = stride;
+            buf_planes->plane_info.mp[0].scanline = scanline;
+            buf_planes->plane_info.mp[0].width = dim->width;
+            buf_planes->plane_info.mp[0].height = dim->height;
+            buf_planes->plane_info.mp[0].meta_stride = meta_stride;
+            buf_planes->plane_info.mp[0].meta_scanline = meta_scanline;
+            buf_planes->plane_info.mp[0].meta_len =
+                    MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+            buf_planes->plane_info.mp[0].len =
+                    (uint32_t)(MSM_MEDIA_ALIGN((stride * scanline), 4096) +
+                    (buf_planes->plane_info.mp[0].meta_len));
+
+            stride = VENUS_UV_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+            scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+            meta_stride = VENUS_UV_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+            meta_scanline = VENUS_UV_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+            buf_planes->plane_info.mp[1].offset = 0;
+            buf_planes->plane_info.mp[1].offset_x =0;
+            buf_planes->plane_info.mp[1].offset_y = 0;
+            buf_planes->plane_info.mp[1].stride = stride;
+            buf_planes->plane_info.mp[1].scanline = scanline;
+            buf_planes->plane_info.mp[1].width = dim->width;
+            buf_planes->plane_info.mp[1].height = dim->height/2;
+            buf_planes->plane_info.mp[1].meta_stride = meta_stride;
+            buf_planes->plane_info.mp[1].meta_scanline = meta_scanline;
+            buf_planes->plane_info.mp[1].meta_len =
+                    MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+            buf_planes->plane_info.mp[1].len =
+                    buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+        }
+#else
+        LOGE("UBWC hardware not avail, cannot use this format");
+        rc = -1;
+#endif
+        break;
+    default:
+        LOGE("Invalid cam_format for preview %d",
+                    fmt);
+        rc = -1;
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_calc_offset_snapshot
+ *
+ * DESCRIPTION: calculate snapshot/postproc frame offset based on format and
+ *              padding information
+ *
+ * PARAMETERS :
+ *   @fmt     : image format
+ *   @dim     : image dimension
+ *   @padding : padding information
+ *   @buf_planes : [out] buffer plane information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_snapshot(cam_format_t fmt,
+                                       cam_dimension_t *dim,
+                                       cam_padding_info_t *padding,
+                                       cam_stream_buf_plane_info_t *buf_planes)
+{
+    int32_t rc = 0;
+    uint8_t isAFamily = mm_camera_util_chip_is_a_family();
+    int offset_x = 0, offset_y = 0;
+    int stride = 0, scanline = 0;
+
+    if (isAFamily) {
+        stride = dim->width;
+        scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_16);
+        offset_x = 0;
+        offset_y = scanline - dim->height;
+        scanline += offset_y; /* double padding */
+    } else {
+        offset_x = PAD_TO_SIZE(padding->offset_info.offset_x,
+                padding->plane_padding);
+        offset_y = PAD_TO_SIZE(padding->offset_info.offset_y,
+                padding->plane_padding);
+        stride = PAD_TO_SIZE((dim->width +
+                (2 * offset_x)), padding->width_padding);
+        scanline = PAD_TO_SIZE((dim->height +
+                (2 * offset_y)), padding->height_padding);
+    }
+
+    switch (fmt) {
+    case CAM_FORMAT_YUV_420_NV12:
+    case CAM_FORMAT_YUV_420_NV21:
+    case CAM_FORMAT_Y_ONLY:
+    case CAM_FORMAT_Y_ONLY_10_BPP:
+    case CAM_FORMAT_Y_ONLY_12_BPP:
+    case CAM_FORMAT_Y_ONLY_14_BPP:
+        /* 2 planes: Y + CbCr */
+        buf_planes->plane_info.num_planes = 2;
+
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                padding->plane_padding);
+        buf_planes->plane_info.mp[0].offset =
+                PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+                padding->plane_padding);
+        buf_planes->plane_info.mp[0].offset_x = offset_x;
+        buf_planes->plane_info.mp[0].offset_y = offset_y;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        scanline = scanline/2;
+        buf_planes->plane_info.mp[1].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                padding->plane_padding);
+        buf_planes->plane_info.mp[1].offset =
+                PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+                padding->plane_padding);
+        buf_planes->plane_info.mp[1].offset_x = offset_x;
+        buf_planes->plane_info.mp[1].offset_y = offset_y;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                buf_planes->plane_info.mp[1].len,
+                CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_420_YV12:
+        /* 3 planes: Y + Cr + Cb */
+        buf_planes->plane_info.num_planes = 3;
+
+        buf_planes->plane_info.mp[0].offset =
+                PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[0].offset_x = offset_x;
+        buf_planes->plane_info.mp[0].offset_y = offset_y;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        stride = PAD_TO_SIZE(stride / 2, CAM_PAD_TO_16);
+        scanline = scanline / 2;
+        buf_planes->plane_info.mp[1].offset =
+                PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[1].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[1].offset_x = offset_x;
+        buf_planes->plane_info.mp[1].offset_y = offset_y;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width / 2;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+        buf_planes->plane_info.mp[2].offset =
+                PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[2].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[2].offset_x = offset_x;
+        buf_planes->plane_info.mp[2].offset_y = offset_y;
+        buf_planes->plane_info.mp[2].stride = stride;
+        buf_planes->plane_info.mp[2].scanline = scanline;
+        buf_planes->plane_info.mp[2].width = dim->width / 2;
+        buf_planes->plane_info.mp[2].height = dim->height / 2;
+
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len +
+                        buf_planes->plane_info.mp[2].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_422_NV16:
+    case CAM_FORMAT_YUV_422_NV61:
+        /* 2 planes: Y + CbCr */
+        buf_planes->plane_info.num_planes = 2;
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[0].offset =
+                PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[0].offset_x = offset_x;
+        buf_planes->plane_info.mp[0].offset_y = offset_y;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        buf_planes->plane_info.mp[1].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[1].offset =
+                PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[1].offset_x = offset_x;
+        buf_planes->plane_info.mp[1].offset_y = offset_y;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height;
+
+        buf_planes->plane_info.frame_len = PAD_TO_SIZE(
+            buf_planes->plane_info.mp[0].len + buf_planes->plane_info.mp[1].len,
+            CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_420_NV12_UBWC:
+#ifdef UBWC_PRESENT
+        {
+            int meta_stride = 0,meta_scanline = 0;
+            // using UBWC
+            stride = VENUS_Y_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+            scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+            meta_stride = VENUS_Y_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+            meta_scanline = VENUS_Y_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+
+            buf_planes->plane_info.frame_len =
+                    VENUS_BUFFER_SIZE(COLOR_FMT_NV12_UBWC, dim->width, dim->height);
+            buf_planes->plane_info.num_planes = 2;
+            buf_planes->plane_info.mp[0].offset = 0;
+            buf_planes->plane_info.mp[0].offset_x = 0;
+            buf_planes->plane_info.mp[0].offset_y = 0;
+            buf_planes->plane_info.mp[0].stride = stride;
+            buf_planes->plane_info.mp[0].scanline = scanline;
+            buf_planes->plane_info.mp[0].width = dim->width;
+            buf_planes->plane_info.mp[0].height = dim->height;
+            buf_planes->plane_info.mp[0].meta_stride = meta_stride;
+            buf_planes->plane_info.mp[0].meta_scanline = meta_scanline;
+            buf_planes->plane_info.mp[0].meta_len =
+                    MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+            buf_planes->plane_info.mp[0].len =
+                    (uint32_t)(MSM_MEDIA_ALIGN((stride * scanline), 4096) +
+                    (buf_planes->plane_info.mp[0].meta_len));
+
+            stride = VENUS_UV_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+            scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+            meta_stride = VENUS_UV_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+            meta_scanline = VENUS_UV_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+            buf_planes->plane_info.mp[1].offset = 0;
+            buf_planes->plane_info.mp[1].offset_x =0;
+            buf_planes->plane_info.mp[1].offset_y = 0;
+            buf_planes->plane_info.mp[1].stride = stride;
+            buf_planes->plane_info.mp[1].scanline = scanline;
+            buf_planes->plane_info.mp[1].width = dim->width;
+            buf_planes->plane_info.mp[1].height = dim->height/2;
+            buf_planes->plane_info.mp[1].meta_stride = meta_stride;
+            buf_planes->plane_info.mp[1].meta_scanline = meta_scanline;
+            buf_planes->plane_info.mp[1].meta_len =
+                    MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+            buf_planes->plane_info.mp[1].len =
+                    buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+        }
+#else
+        LOGE("UBWC hardware not avail, cannot use this format");
+        rc = -1;
+#endif
+        break;
+    case CAM_FORMAT_YUV_420_NV12_VENUS:
+#ifdef VENUS_PRESENT
+        // using Venus
+        stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, dim->width);
+        scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12, dim->height);
+
+        buf_planes->plane_info.frame_len =
+                VENUS_BUFFER_SIZE(COLOR_FMT_NV12, dim->width, dim->height);
+        buf_planes->plane_info.num_planes = 2;
+        buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+        stride = VENUS_UV_STRIDE(COLOR_FMT_NV12, dim->width);
+        scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12, dim->height);
+        buf_planes->plane_info.mp[1].len =
+                buf_planes->plane_info.frame_len -
+                buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].offset_x =0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+        LOGD("Video format VENUS is not supported = %d",
+                 fmt);
+#endif
+        break;
+    case CAM_FORMAT_YUV_420_NV21_VENUS:
+#ifdef VENUS_PRESENT
+        // using Venus
+        stride = VENUS_Y_STRIDE(COLOR_FMT_NV21, dim->width);
+        scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV21, dim->height);
+        buf_planes->plane_info.frame_len =
+                VENUS_BUFFER_SIZE(COLOR_FMT_NV21, dim->width, dim->height);
+        buf_planes->plane_info.num_planes = 2;
+        buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+        stride = VENUS_UV_STRIDE(COLOR_FMT_NV21, dim->width);
+        scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV21, dim->height);
+        buf_planes->plane_info.mp[1].len =
+                buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].offset_x =0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+        LOGE("Venus hardware not avail, cannot use this format");
+        rc = -1;
+#endif
+        break;
+    default:
+        LOGE("Invalid cam_format for snapshot %d",
+                    fmt);
+        rc = -1;
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_calc_offset_raw
+ *
+ * DESCRIPTION: calculate raw frame offset based on format and padding information
+ *
+ * PARAMETERS :
+ *   @fmt     : image format
+ *   @dim     : image dimension
+ *   @padding : padding information
+ *   @buf_planes : [out] buffer plane information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_raw(cam_format_t fmt,
+                                  cam_dimension_t *dim,
+                                  cam_padding_info_t *padding,
+                                  cam_stream_buf_plane_info_t *buf_planes)
+{
+    int32_t rc = 0;
+
+    if ((NULL == dim) || (NULL == padding) || (NULL == buf_planes)) {
+        return -1;
+    }
+
+    int32_t stride = PAD_TO_SIZE(dim->width, (int32_t)padding->width_padding);
+    int32_t stride_in_bytes = stride;
+    int32_t scanline = PAD_TO_SIZE(dim->height, (int32_t)padding->height_padding);
+
+    switch (fmt) {
+    case CAM_FORMAT_YUV_420_NV21:
+        /* 2 planes: Y + CbCr */
+        buf_planes->plane_info.num_planes = 2;
+
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                padding->plane_padding);
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].offset_x = 0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].stride_in_bytes = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        scanline = scanline / 2;
+        buf_planes->plane_info.mp[1].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                padding->plane_padding);
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].offset_x = 0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].stride_in_bytes = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                buf_planes->plane_info.mp[1].len,
+                CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_RAW_8BIT_YUYV:
+    case CAM_FORMAT_YUV_RAW_8BIT_YVYU:
+    case CAM_FORMAT_YUV_RAW_8BIT_UYVY:
+    case CAM_FORMAT_YUV_RAW_8BIT_VYUY:
+    case CAM_FORMAT_JPEG_RAW_8BIT:
+        /* 1 plane */
+        /* Every 16 pixels occupy 16 bytes */
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+        stride_in_bytes = stride * 2;
+        buf_planes->plane_info.num_planes = 1;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width =
+                (int32_t)buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[0].height = 1;
+        break;
+    case CAM_FORMAT_META_RAW_8BIT:
+        // Every 16 pixels occupy 16 bytes
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+        stride_in_bytes = stride * 2;
+        buf_planes->plane_info.num_planes = 1;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        break;
+
+    case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG:
+    case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GRBG:
+    case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_RGGB:
+    case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_BGGR:
+    case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GREY:
+    case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG:
+    case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GRBG:
+    case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_RGGB:
+    case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_BGGR:
+    case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GREY:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GREY:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GREY:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_BGGR:
+        /* 1 plane */
+        /* Every 16 pixels occupy 16 bytes */
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+        stride_in_bytes = stride;
+        buf_planes->plane_info.num_planes = 1;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[0].height = 1;
+        break;
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG:
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GRBG:
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_RGGB:
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_BGGR:
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GREY:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GREY:
+        /* Every 12 pixels occupy 16 bytes */
+        stride = (dim->width + 11)/12 * 12;
+        stride_in_bytes = stride * 8 / 6;
+        buf_planes->plane_info.num_planes = 1;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[0].height = 1;
+        break;
+    case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG:
+    case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GRBG:
+    case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_RGGB:
+    case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_BGGR:
+    case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GREY:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GREY:
+        /* Every 10 pixels occupy 16 bytes */
+        stride = (dim->width + 9)/10 * 10;
+        stride_in_bytes = stride * 8 / 5;
+        buf_planes->plane_info.num_planes = 1;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[0].height = 1;
+        break;
+    case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG:
+    case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GRBG:
+    case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_RGGB:
+    case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_BGGR:
+    case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GREY:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GREY:
+        /* Every 64 pixels occupy 80 bytes */
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_4);
+        stride_in_bytes = PAD_TO_SIZE(stride * 5 / 4, CAM_PAD_TO_8);
+        buf_planes->plane_info.num_planes = 1;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[0].height = 1;
+        break;
+    case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG:
+    case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GRBG:
+    case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_RGGB:
+    case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_BGGR:
+    case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GREY:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GREY:
+        /* Every 32 pixels occupy 48 bytes */
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_32);
+        stride_in_bytes = stride * 3 / 2;
+        buf_planes->plane_info.num_planes = 1;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[0].height = 1;
+        break;
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_14BPP_BGGR:
+        /* Every 8 pixels occupy 16 bytes */
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_8);
+        stride_in_bytes = stride * 2;
+        buf_planes->plane_info.num_planes = 1;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[0].height = 1;
+        break;
+    case CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GBRG:
+    case CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GRBG:
+    case CAM_FORMAT_BAYER_MIPI_RAW_14BPP_RGGB:
+    case CAM_FORMAT_BAYER_MIPI_RAW_14BPP_BGGR:
+    case CAM_FORMAT_BAYER_MIPI_RAW_14BPP_GREY:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_14BPP_GREY:
+        /* Every 64 pixels occupy 112 bytes */
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_64);
+        stride_in_bytes = stride * 7 / 4;
+        buf_planes->plane_info.num_planes = 1;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+                padding->plane_padding);
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[0].height = 1;
+        break;
+    case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GBRG:
+    case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GRBG:
+    case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_RGGB:
+    case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_BGGR:
+    case CAM_FORMAT_BAYER_QCOM_RAW_14BPP_GREY:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_14BPP_GREY:
+        /* Every 16 pixels occupy 32 bytes */
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+        stride_in_bytes = stride * 2;
+        buf_planes->plane_info.num_planes = 1;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride_in_bytes * scanline),
+                padding->plane_padding);
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].stride_in_bytes = stride_in_bytes;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = (int32_t)buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[0].height = 1;
+        break;
+    default:
+        LOGE("Invalid cam_format %d for raw stream",
+                    fmt);
+        rc = -1;
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_calc_offset_video
+ *
+ * DESCRIPTION: calculate video frame offset based on format and
+ *              padding information
+ *
+ * PARAMETERS :
+  *   @fmt     : image format
+ *   @dim     : image dimension
+ *   @buf_planes : [out] buffer plane information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_video(cam_format_t fmt,
+        cam_dimension_t *dim, cam_stream_buf_plane_info_t *buf_planes)
+{
+    int32_t rc = 0;
+    int stride = 0, scanline = 0;
+
+    #ifdef UBWC_PRESENT
+    int meta_stride = 0,meta_scanline = 0;
+    #endif
+
+
+    switch (fmt) {
+        case CAM_FORMAT_YUV_420_NV12:
+        case CAM_FORMAT_Y_ONLY:
+        case CAM_FORMAT_Y_ONLY_10_BPP:
+        case CAM_FORMAT_Y_ONLY_12_BPP:
+        case CAM_FORMAT_Y_ONLY_14_BPP:
+            buf_planes->plane_info.num_planes = 2;
+
+            stride = dim->width;
+            scanline = dim->height;
+            buf_planes->plane_info.mp[0].len =
+                    PAD_TO_SIZE((uint32_t)(stride * scanline),
+                    CAM_PAD_TO_2K);
+            buf_planes->plane_info.mp[0].offset = 0;
+            buf_planes->plane_info.mp[0].offset_x =0;
+            buf_planes->plane_info.mp[0].offset_y = 0;
+            buf_planes->plane_info.mp[0].stride = stride;
+            buf_planes->plane_info.mp[0].scanline = scanline;
+            buf_planes->plane_info.mp[0].width = dim->width;
+            buf_planes->plane_info.mp[0].height = dim->height;
+
+            stride = dim->width;
+            scanline = dim->height / 2;
+            buf_planes->plane_info.mp[1].len =
+                    PAD_TO_SIZE((uint32_t)(stride * scanline),
+                    CAM_PAD_TO_2K);
+            buf_planes->plane_info.mp[1].offset = 0;
+            buf_planes->plane_info.mp[1].offset_x =0;
+            buf_planes->plane_info.mp[1].offset_y = 0;
+            buf_planes->plane_info.mp[1].stride = stride;
+            buf_planes->plane_info.mp[1].scanline = scanline;
+            buf_planes->plane_info.mp[1].width = dim->width;
+            buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+            buf_planes->plane_info.frame_len =
+                    PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                    buf_planes->plane_info.mp[1].len,
+                    CAM_PAD_TO_4K);
+            break;
+        case CAM_FORMAT_YUV_420_NV12_VENUS:
+#ifdef VENUS_PRESENT
+            // using Venus
+            stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, dim->width);
+            scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12, dim->height);
+
+            buf_planes->plane_info.frame_len =
+                    VENUS_BUFFER_SIZE(COLOR_FMT_NV12, dim->width, dim->height);
+            buf_planes->plane_info.num_planes = 2;
+            buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+            buf_planes->plane_info.mp[0].offset = 0;
+            buf_planes->plane_info.mp[0].offset_x =0;
+            buf_planes->plane_info.mp[0].offset_y = 0;
+            buf_planes->plane_info.mp[0].stride = stride;
+            buf_planes->plane_info.mp[0].scanline = scanline;
+            buf_planes->plane_info.mp[0].width = dim->width;
+            buf_planes->plane_info.mp[0].height = dim->height;
+            stride = VENUS_UV_STRIDE(COLOR_FMT_NV12, dim->width);
+            scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12, dim->height);
+            buf_planes->plane_info.mp[1].len =
+                    buf_planes->plane_info.frame_len -
+                    buf_planes->plane_info.mp[0].len;
+            buf_planes->plane_info.mp[1].offset = 0;
+            buf_planes->plane_info.mp[1].offset_x =0;
+            buf_planes->plane_info.mp[1].offset_y = 0;
+            buf_planes->plane_info.mp[1].stride = stride;
+            buf_planes->plane_info.mp[1].scanline = scanline;
+            buf_planes->plane_info.mp[1].width = dim->width;
+            buf_planes->plane_info.mp[1].height = dim->height/2;
+#else
+            LOGD("Video format VENUS is not supported = %d",
+                     fmt);
+#endif
+            break;
+        case CAM_FORMAT_YUV_420_NV21_VENUS:
+#ifdef VENUS_PRESENT
+            // using Venus
+            stride = VENUS_Y_STRIDE(COLOR_FMT_NV21, dim->width);
+            scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV21, dim->height);
+
+            buf_planes->plane_info.frame_len =
+                    VENUS_BUFFER_SIZE(COLOR_FMT_NV21, dim->width, dim->height);
+            buf_planes->plane_info.num_planes = 2;
+            buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+            buf_planes->plane_info.mp[0].offset = 0;
+            buf_planes->plane_info.mp[0].offset_x =0;
+            buf_planes->plane_info.mp[0].offset_y = 0;
+            buf_planes->plane_info.mp[0].stride = stride;
+            buf_planes->plane_info.mp[0].scanline = scanline;
+            buf_planes->plane_info.mp[0].width = dim->width;
+            buf_planes->plane_info.mp[0].height = dim->height;
+            stride = VENUS_UV_STRIDE(COLOR_FMT_NV21, dim->width);
+            scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV21, dim->height);
+            buf_planes->plane_info.mp[1].len =
+                    buf_planes->plane_info.frame_len -
+                    buf_planes->plane_info.mp[0].len;
+            buf_planes->plane_info.mp[1].offset = 0;
+            buf_planes->plane_info.mp[1].offset_x =0;
+            buf_planes->plane_info.mp[1].offset_y = 0;
+            buf_planes->plane_info.mp[1].stride = stride;
+            buf_planes->plane_info.mp[1].scanline = scanline;
+            buf_planes->plane_info.mp[1].width = dim->width;
+            buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+            LOGD("Video format VENUS is not supported = %d",
+                     fmt);
+#endif
+            break;
+        case CAM_FORMAT_YUV_420_NV12_UBWC:
+#ifdef UBWC_PRESENT
+            // using UBWC
+            stride = VENUS_Y_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+            scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+            meta_stride = VENUS_Y_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+            meta_scanline = VENUS_Y_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+
+            buf_planes->plane_info.frame_len =
+                    VENUS_BUFFER_SIZE(COLOR_FMT_NV12_UBWC, dim->width, dim->height);
+            buf_planes->plane_info.num_planes = 2;
+            buf_planes->plane_info.mp[0].offset = 0;
+            buf_planes->plane_info.mp[0].offset_x =0;
+            buf_planes->plane_info.mp[0].offset_y = 0;
+            buf_planes->plane_info.mp[0].stride = stride;
+            buf_planes->plane_info.mp[0].scanline = scanline;
+            buf_planes->plane_info.mp[0].width = dim->width;
+            buf_planes->plane_info.mp[0].height = dim->height;
+            buf_planes->plane_info.mp[0].meta_stride = meta_stride;
+            buf_planes->plane_info.mp[0].meta_scanline = meta_scanline;
+            buf_planes->plane_info.mp[0].meta_len =
+                    MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+            buf_planes->plane_info.mp[0].len =
+                    (uint32_t)(MSM_MEDIA_ALIGN((stride * scanline), 4096) +
+                    (buf_planes->plane_info.mp[0].meta_len));
+
+            stride = VENUS_UV_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+            scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+            meta_stride = VENUS_UV_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+            meta_scanline = VENUS_UV_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+
+            buf_planes->plane_info.mp[1].offset = 0;
+            buf_planes->plane_info.mp[1].offset_x =0;
+            buf_planes->plane_info.mp[1].offset_y = 0;
+            buf_planes->plane_info.mp[1].stride = stride;
+            buf_planes->plane_info.mp[1].scanline = scanline;
+            buf_planes->plane_info.mp[1].width = dim->width;
+            buf_planes->plane_info.mp[1].height = dim->height/2;
+            buf_planes->plane_info.mp[1].meta_stride = meta_stride;
+            buf_planes->plane_info.mp[1].meta_scanline = meta_scanline;
+            buf_planes->plane_info.mp[1].meta_len =
+                    MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+            buf_planes->plane_info.mp[1].len =
+                    buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+
+#else
+            LOGD("Video format UBWC is not supported = %d",
+                     fmt);
+            rc = -1;
+#endif
+            break;
+        default:
+            LOGD("Invalid Video Format = %d", fmt);
+            rc = -1;
+            break;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_calc_offset_metadata
+ *
+ * DESCRIPTION: calculate metadata frame offset based on format and
+ *              padding information
+ *
+ * PARAMETERS :
+ *   @dim     : image dimension
+ *   @padding : padding information
+ *   @buf_planes : [out] buffer plane information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_metadata(cam_dimension_t *dim,
+                                       cam_padding_info_t *padding,
+                                       cam_stream_buf_plane_info_t *buf_planes)
+{
+    int32_t rc = 0;
+    buf_planes->plane_info.num_planes = 1;
+    buf_planes->plane_info.mp[0].offset = 0;
+    buf_planes->plane_info.mp[0].len =
+            PAD_TO_SIZE((uint32_t)(dim->width * dim->height),
+                    padding->plane_padding);
+    buf_planes->plane_info.frame_len =
+        buf_planes->plane_info.mp[0].len;
+
+    buf_planes->plane_info.mp[0].offset_x =0;
+    buf_planes->plane_info.mp[0].offset_y = 0;
+    buf_planes->plane_info.mp[0].stride = dim->width;
+    buf_planes->plane_info.mp[0].scanline = dim->height;
+    buf_planes->plane_info.mp[0].width = dim->width;
+    buf_planes->plane_info.mp[0].height = dim->height;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_calc_offset_analysis
+ *
+ * DESCRIPTION: calculate analysis frame offset based on format and
+ *              padding information
+ *
+ * PARAMETERS :
+ *   @fmt     : image format
+ *   @dim     : image dimension
+ *   @padding : padding information
+ *   @buf_planes : [out] buffer plane information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_analysis(cam_format_t fmt,
+                                       cam_dimension_t *dim,
+                                       cam_padding_info_t *padding,
+                                       cam_stream_buf_plane_info_t *buf_planes)
+{
+    int32_t rc = 0;
+    int32_t offset_x = 0, offset_y = 0;
+    int32_t stride, scanline;
+
+    /* Clip to minimum supported bytes per line */
+    if ((uint32_t)dim->width < padding->min_stride) {
+        stride = (int32_t)padding->min_stride;
+    } else {
+        stride = dim->width;
+    }
+
+    if ((uint32_t)dim->height < padding->min_scanline) {
+      scanline = (int32_t)padding->min_scanline;
+    } else {
+      scanline = dim->height;
+    }
+
+    stride = PAD_TO_SIZE(stride, padding->width_padding);
+    scanline = PAD_TO_SIZE(scanline, padding->height_padding);
+
+    switch (fmt) {
+    case CAM_FORMAT_YUV_420_NV12:
+    case CAM_FORMAT_YUV_420_NV21:
+        /* 2 planes: Y + CbCr */
+        buf_planes->plane_info.num_planes = 2;
+
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[0].offset =
+                PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[0].offset_x = offset_x;
+        buf_planes->plane_info.mp[0].offset_y = offset_y;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        scanline = scanline / 2;
+        buf_planes->plane_info.mp[1].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[1].offset =
+                PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[1].offset_x = offset_x;
+        buf_planes->plane_info.mp[1].offset_y = offset_y;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_420_YV12:
+        /* 3 planes: Y + Cr + Cb */
+        buf_planes->plane_info.num_planes = 3;
+
+        buf_planes->plane_info.mp[0].offset =
+                PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[0].offset_x = offset_x;
+        buf_planes->plane_info.mp[0].offset_y = offset_y;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        stride = PAD_TO_SIZE(stride / 2, CAM_PAD_TO_16);
+        scanline = scanline / 2;
+        buf_planes->plane_info.mp[1].offset =
+                PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[1].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[1].offset_x = offset_x;
+        buf_planes->plane_info.mp[1].offset_y = offset_y;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width / 2;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+
+        buf_planes->plane_info.mp[2].offset =
+                PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[2].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[2].offset_x = offset_x;
+        buf_planes->plane_info.mp[2].offset_y = offset_y;
+        buf_planes->plane_info.mp[2].stride = stride;
+        buf_planes->plane_info.mp[2].scanline = scanline;
+        buf_planes->plane_info.mp[2].width = dim->width / 2;
+        buf_planes->plane_info.mp[2].height = dim->height / 2;
+
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len +
+                        buf_planes->plane_info.mp[2].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_422_NV16:
+    case CAM_FORMAT_YUV_422_NV61:
+        /* 2 planes: Y + CbCr */
+        buf_planes->plane_info.num_planes = 2;
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[0].offset =
+                PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[0].offset_x = offset_x;
+        buf_planes->plane_info.mp[0].offset_y = offset_y;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+
+        buf_planes->plane_info.mp[1].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[1].offset =
+                PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[1].offset_x = offset_x;
+        buf_planes->plane_info.mp[1].offset_y = offset_y;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height;
+
+        buf_planes->plane_info.frame_len = PAD_TO_SIZE(
+            buf_planes->plane_info.mp[0].len + buf_planes->plane_info.mp[1].len,
+            CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_Y_ONLY:
+    case CAM_FORMAT_Y_ONLY_10_BPP:
+    case CAM_FORMAT_Y_ONLY_12_BPP:
+    case CAM_FORMAT_Y_ONLY_14_BPP:
+        buf_planes->plane_info.num_planes = 1;
+
+        buf_planes->plane_info.mp[0].len =
+                PAD_TO_SIZE((uint32_t)(stride * scanline),
+                padding->plane_padding);
+        buf_planes->plane_info.mp[0].offset =
+                PAD_TO_SIZE((uint32_t)(offset_x + stride * offset_y),
+                padding->plane_padding);
+        buf_planes->plane_info.mp[0].offset_x = offset_x;
+        buf_planes->plane_info.mp[0].offset_y = offset_y;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+        buf_planes->plane_info.frame_len =
+                PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_420_NV12_VENUS:
+#ifdef VENUS_PRESENT
+        // using Venus
+        stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, dim->width);
+        scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12, dim->height);
+
+        buf_planes->plane_info.frame_len =
+                VENUS_BUFFER_SIZE(COLOR_FMT_NV12, stride, scanline);
+        buf_planes->plane_info.num_planes = 2;
+        buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+        stride = VENUS_UV_STRIDE(COLOR_FMT_NV12, dim->width);
+        scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12, dim->height);
+        buf_planes->plane_info.mp[1].len =
+                buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].offset_x =0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+        LOGE("Venus hardware not avail, cannot use this format");
+        rc = -1;
+#endif
+        break;
+    case CAM_FORMAT_YUV_420_NV21_VENUS:
+#ifdef VENUS_PRESENT
+        // using Venus
+        stride = VENUS_Y_STRIDE(COLOR_FMT_NV21, dim->width);
+        scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV21, dim->height);
+
+        buf_planes->plane_info.frame_len =
+                VENUS_BUFFER_SIZE(COLOR_FMT_NV21, stride, scanline);
+        buf_planes->plane_info.num_planes = 2;
+        buf_planes->plane_info.mp[0].len = (uint32_t)(stride * scanline);
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        buf_planes->plane_info.mp[0].width = dim->width;
+        buf_planes->plane_info.mp[0].height = dim->height;
+        stride = VENUS_UV_STRIDE(COLOR_FMT_NV21, dim->width);
+        scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV21, dim->height);
+        buf_planes->plane_info.mp[1].len =
+                buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].offset_x =0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+        buf_planes->plane_info.mp[1].width = dim->width;
+        buf_planes->plane_info.mp[1].height = dim->height / 2;
+#else
+        LOGE("Venus hardware not avail, cannot use this format");
+        rc = -1;
+#endif
+        break;
+    case CAM_FORMAT_YUV_420_NV12_UBWC:
+#ifdef UBWC_PRESENT
+        {
+            int meta_stride = 0,meta_scanline = 0;
+            // using UBWC
+            stride = VENUS_Y_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+            scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+            meta_stride = VENUS_Y_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+            meta_scanline = VENUS_Y_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+
+            buf_planes->plane_info.frame_len =
+                    VENUS_BUFFER_SIZE(COLOR_FMT_NV12_UBWC, stride, scanline);
+            buf_planes->plane_info.num_planes = 2;
+            buf_planes->plane_info.mp[0].offset = 0;
+            buf_planes->plane_info.mp[0].offset_x =0;
+            buf_planes->plane_info.mp[0].offset_y = 0;
+            buf_planes->plane_info.mp[0].stride = stride;
+            buf_planes->plane_info.mp[0].scanline = scanline;
+            buf_planes->plane_info.mp[0].width = dim->width;
+            buf_planes->plane_info.mp[0].height = dim->height;
+            buf_planes->plane_info.mp[0].meta_stride = meta_stride;
+            buf_planes->plane_info.mp[0].meta_scanline = meta_scanline;
+            buf_planes->plane_info.mp[0].meta_len =
+                    MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+            buf_planes->plane_info.mp[0].len =
+                    (uint32_t)(MSM_MEDIA_ALIGN((stride * scanline), 4096) +
+                    (buf_planes->plane_info.mp[0].meta_len));
+
+            stride = VENUS_UV_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+            scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+            meta_stride = VENUS_UV_META_STRIDE(COLOR_FMT_NV12_UBWC, dim->width);
+            meta_scanline = VENUS_UV_META_SCANLINES(COLOR_FMT_NV12_UBWC, dim->height);
+            buf_planes->plane_info.mp[1].offset = 0;
+            buf_planes->plane_info.mp[1].offset_x =0;
+            buf_planes->plane_info.mp[1].offset_y = 0;
+            buf_planes->plane_info.mp[1].stride = stride;
+            buf_planes->plane_info.mp[1].scanline = scanline;
+            buf_planes->plane_info.mp[1].width = dim->width;
+            buf_planes->plane_info.mp[1].height = dim->height/2;
+            buf_planes->plane_info.mp[1].meta_stride = meta_stride;
+            buf_planes->plane_info.mp[1].meta_scanline = meta_scanline;
+            buf_planes->plane_info.mp[1].meta_len =
+                    MSM_MEDIA_ALIGN(meta_stride * meta_scanline, 4096);
+            buf_planes->plane_info.mp[1].len =
+                    buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+        }
+#else
+        LOGE("UBWC hardware not avail, cannot use this format");
+        rc = -1;
+#endif
+        break;
+    default:
+        LOGE("Invalid cam_format for anlysis %d",
+                    fmt);
+        rc = -1;
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_calc_offset_postproc
+ *
+ * DESCRIPTION: calculate postprocess frame offset
+ *
+ * PARAMETERS :
+ *   @stream_info: ptr to stream info
+ *   @padding : padding information
+ *   @plns : [out] buffer plane information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_postproc(cam_stream_info_t *stream_info,
+                                       cam_padding_info_t *padding,
+                                       cam_stream_buf_plane_info_t *plns)
+{
+    int32_t rc = 0;
+    cam_stream_type_t type = CAM_STREAM_TYPE_DEFAULT;
+    if (stream_info->reprocess_config.pp_type == CAM_OFFLINE_REPROCESS_TYPE) {
+        type = stream_info->reprocess_config.offline.input_type;
+        if (CAM_STREAM_TYPE_DEFAULT == type) {
+            if (plns->plane_info.frame_len == 0) {
+                // take offset from input source
+                *plns = stream_info->reprocess_config.offline.input_buf_planes;
+                return rc;
+            }
+        } else {
+            type = stream_info->reprocess_config.offline.input_type;
+        }
+    } else {
+        type = stream_info->reprocess_config.online.input_stream_type;
+    }
+
+    switch (type) {
+    case CAM_STREAM_TYPE_PREVIEW:
+        rc = mm_stream_calc_offset_preview(stream_info,
+                                           &stream_info->dim,
+                                           padding,
+                                           plns);
+        break;
+    case CAM_STREAM_TYPE_POSTVIEW:
+        rc = mm_stream_calc_offset_post_view(stream_info->fmt,
+                                           &stream_info->dim,
+                                           plns);
+        break;
+    case CAM_STREAM_TYPE_SNAPSHOT:
+    case CAM_STREAM_TYPE_CALLBACK:
+        rc = mm_stream_calc_offset_snapshot(stream_info->fmt,
+                                            &stream_info->dim,
+                                            padding,
+                                            plns);
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        rc = mm_stream_calc_offset_video(stream_info->fmt,
+                &stream_info->dim, plns);
+        break;
+    case CAM_STREAM_TYPE_RAW:
+        rc = mm_stream_calc_offset_raw(stream_info->fmt,
+                                       &stream_info->dim,
+                                       padding,
+                                       plns);
+        break;
+    case CAM_STREAM_TYPE_ANALYSIS:
+        rc = mm_stream_calc_offset_analysis(stream_info->fmt,
+                                            &stream_info->dim,
+                                            padding,
+                                            plns);
+        break;
+    case CAM_STREAM_TYPE_METADATA:
+        rc = mm_stream_calc_offset_metadata(&stream_info->dim,
+                                            padding,
+                                            plns);
+        break;
+    case CAM_STREAM_TYPE_OFFLINE_PROC:
+        rc = mm_stream_calc_offset_snapshot(stream_info->fmt,
+                &stream_info->dim, padding, plns);
+        break;
+    default:
+        LOGE("not supported for stream type %d",
+                    type);
+        rc = -1;
+        break;
+    }
+    return rc;
+}
+
+/*===========================================================================
+* FUNCTION    : mm_stream_calc_lcm
+*
+* DESCRIPTION: calculate LCM of two numbers
+*
+* PARAMETERS :
+*   @num1  : number 1
+*   @num2  : number 2
+*
+* RETURN     : uint32_t type
+*
+*===========================================================================*/
+uint32_t mm_stream_calc_lcm(int32_t num1, int32_t num2)
+{
+    uint32_t lcm = 0;
+    uint32_t temp = 0;
+
+    if ((num1 < 1) && (num2 < 1)) {
+        return 0;
+    } else if (num1 < 1) {
+        return num2;
+    } else if (num2 < 1) {
+        return num1;
+    }
+
+    if (num1 > num2) {
+        lcm = num1;
+    } else {
+        lcm = num2;
+    }
+    temp = lcm;
+
+    while (1) {
+        if (((lcm%num1) == 0) && ((lcm%num2) == 0)) {
+            break;
+        }
+        lcm += temp;
+    }
+    return lcm;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_calc_offset
+ *
+ * DESCRIPTION: calculate frame offset based on format and padding information
+ *
+ * PARAMETERS :
+ *   @my_obj  : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset(mm_stream_t *my_obj)
+{
+    int32_t rc = 0;
+
+    cam_dimension_t dim = my_obj->stream_info->dim;
+    if (my_obj->stream_info->pp_config.feature_mask & CAM_QCOM_FEATURE_ROTATION &&
+        my_obj->stream_info->stream_type != CAM_STREAM_TYPE_VIDEO) {
+        if (my_obj->stream_info->pp_config.rotation == ROTATE_90 ||
+            my_obj->stream_info->pp_config.rotation == ROTATE_270) {
+            // rotated by 90 or 270, need to switch width and height
+            dim.width = my_obj->stream_info->dim.height;
+            dim.height = my_obj->stream_info->dim.width;
+        }
+    }
+
+    switch (my_obj->stream_info->stream_type) {
+    case CAM_STREAM_TYPE_PREVIEW:
+        rc = mm_stream_calc_offset_preview(my_obj->stream_info,
+                                           &dim,
+                                           &my_obj->padding_info,
+                                           &my_obj->stream_info->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_POSTVIEW:
+      rc = mm_stream_calc_offset_post_view(my_obj->stream_info->fmt,
+                                         &dim,
+                                         &my_obj->stream_info->buf_planes);
+      break;
+    case CAM_STREAM_TYPE_SNAPSHOT:
+    case CAM_STREAM_TYPE_CALLBACK:
+        rc = mm_stream_calc_offset_snapshot(my_obj->stream_info->fmt,
+                                            &dim,
+                                            &my_obj->padding_info,
+                                            &my_obj->stream_info->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_OFFLINE_PROC:
+        rc = mm_stream_calc_offset_postproc(my_obj->stream_info,
+                                            &my_obj->padding_info,
+                                            &my_obj->stream_info->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        rc = mm_stream_calc_offset_video(my_obj->stream_info->fmt,
+                &dim, &my_obj->stream_info->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_RAW:
+        rc = mm_stream_calc_offset_raw(my_obj->stream_info->fmt,
+                                       &dim,
+                                       &my_obj->padding_info,
+                                       &my_obj->stream_info->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_ANALYSIS:
+        rc = mm_stream_calc_offset_analysis(my_obj->stream_info->fmt,
+                                            &dim,
+                                            &my_obj->padding_info,
+                                            &my_obj->stream_info->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_METADATA:
+        rc = mm_stream_calc_offset_metadata(&dim,
+                                            &my_obj->padding_info,
+                                            &my_obj->stream_info->buf_planes);
+        break;
+    default:
+        LOGE("not supported for stream type %d",
+                    my_obj->stream_info->stream_type);
+        rc = -1;
+        break;
+    }
+
+    my_obj->frame_offset = my_obj->stream_info->buf_planes.plane_info;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_sync_info
+ *
+ * DESCRIPTION: synchronize stream information with server
+ *
+ * PARAMETERS :
+ *   @my_obj  : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : assume stream info buffer is mapped to server and filled in with
+ *              stream information by upper layer. This call will let server to
+ *              synchornize the stream information with HAL. If server find any
+ *              fields that need to be changed accroding to hardware configuration,
+ *              server will modify corresponding fields so that HAL could know
+ *              about it.
+ *==========================================================================*/
+int32_t mm_stream_sync_info(mm_stream_t *my_obj)
+{
+    int32_t rc = 0;
+    int32_t value = 0;
+    my_obj->stream_info->stream_svr_id = my_obj->server_stream_id;
+    rc = mm_stream_calc_offset(my_obj);
+
+    if (rc == 0) {
+        mm_camera_obj_t *cam_obj = my_obj->ch_obj->cam_obj;
+        int stream_id  =  my_obj->server_stream_id;
+        rc = mm_camera_util_s_ctrl(cam_obj, stream_id, my_obj->fd,
+                CAM_PRIV_STREAM_INFO_SYNC, &value);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_set_fmt
+ *
+ * DESCRIPTION: set stream format to kernel via v4l2 ioctl
+ *
+ * PARAMETERS :
+ *   @my_obj  : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_set_fmt(mm_stream_t *my_obj)
+{
+    int32_t rc = 0;
+    struct v4l2_format fmt;
+    struct msm_v4l2_format_data msm_fmt;
+    int i;
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    if (my_obj->stream_info->dim.width == 0 ||
+        my_obj->stream_info->dim.height == 0) {
+        LOGE("invalid input[w=%d,h=%d,fmt=%d]\n",
+                   my_obj->stream_info->dim.width,
+                   my_obj->stream_info->dim.height,
+                   my_obj->stream_info->fmt);
+        return -1;
+    }
+
+    memset(&fmt, 0, sizeof(fmt));
+    memset(&msm_fmt, 0, sizeof(msm_fmt));
+    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    msm_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+
+    msm_fmt.width = (unsigned int)my_obj->stream_info->dim.width;
+    msm_fmt.height = (unsigned int)my_obj->stream_info->dim.height;
+    msm_fmt.pixelformat = mm_stream_get_v4l2_fmt(my_obj->stream_info->fmt);
+
+    if (my_obj->stream_info->streaming_mode != CAM_STREAMING_MODE_BATCH) {
+        msm_fmt.num_planes = (unsigned char)my_obj->frame_offset.num_planes;
+        for (i = 0; i < msm_fmt.num_planes; i++) {
+            msm_fmt.plane_sizes[i] = my_obj->frame_offset.mp[i].len;
+        }
+    } else {
+        msm_fmt.num_planes = 1;
+        msm_fmt.plane_sizes[0] = my_obj->stream_info->user_buf_info.size;
+    }
+
+    memcpy(fmt.fmt.raw_data, &msm_fmt, sizeof(msm_fmt));
+    rc = ioctl(my_obj->fd, VIDIOC_S_FMT, &fmt);
+    if (rc < 0) {
+        LOGE("ioctl VIDIOC_S_FMT failed: rc=%d errno %d\n", rc, errno);
+    } else {
+#ifndef DAEMON_PRESENT
+        mm_camera_obj_t *cam_obj = my_obj->ch_obj->cam_obj;
+        cam_shim_packet_t *shim_cmd;
+        cam_shim_cmd_data shim_cmd_data;
+
+        memset(&shim_cmd_data, 0, sizeof(shim_cmd_data));
+        shim_cmd_data.command = MSM_CAMERA_PRIV_S_FMT;
+        shim_cmd_data.stream_id = my_obj->server_stream_id;
+        shim_cmd_data.value = NULL;
+        shim_cmd = mm_camera_create_shim_cmd_packet(CAM_SHIM_SET_PARM,
+                cam_obj->sessionid, &shim_cmd_data);
+        rc = mm_camera_module_send_cmd(shim_cmd);
+        mm_camera_destroy_shim_cmd_packet(shim_cmd);
+#endif /* DAEMON_PRESENT */
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_buf_done
+ *
+ * DESCRIPTION: enqueue buffer back to kernel
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @frame        : frame to be enqueued back to kernel
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_buf_done(mm_stream_t * my_obj,
+                           mm_camera_buf_def_t *frame)
+{
+    int32_t rc = 0;
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    pthread_mutex_lock(&my_obj->buf_lock);
+    if(my_obj->buf_status[frame->buf_idx].buf_refcnt == 0) {
+        LOGW("Warning: trying to free buffer for the second time?(idx=%d)\n",
+                    frame->buf_idx);
+        pthread_mutex_unlock(&my_obj->buf_lock);
+        rc = -1;
+        return rc;
+    }
+    pthread_mutex_unlock(&my_obj->buf_lock);
+    if (my_obj->stream_info->streaming_mode == CAM_STREAMING_MODE_BATCH) {
+        rc = mm_stream_write_user_buf(my_obj, frame);
+    } else {
+        pthread_mutex_lock(&my_obj->buf_lock);
+        my_obj->buf_status[frame->buf_idx].buf_refcnt--;
+        if (0 == my_obj->buf_status[frame->buf_idx].buf_refcnt) {
+            pthread_mutex_unlock(&my_obj->buf_lock);
+            LOGD("<DEBUG> : Buf done for buffer:%d, stream:%d", frame->buf_idx, frame->stream_type);
+            rc = mm_stream_qbuf(my_obj, frame);
+            if(rc < 0) {
+                LOGE("mm_camera_stream_qbuf(idx=%d) err=%d\n",
+                            frame->buf_idx, rc);
+            } else {
+                my_obj->buf_status[frame->buf_idx].in_kernel = 1;
+            }
+        }else{
+            LOGD("<DEBUG> : Still ref count pending count :%d",
+                 my_obj->buf_status[frame->buf_idx].buf_refcnt);
+            LOGD("<DEBUG> : for buffer:%p:%d",
+                 my_obj, frame->buf_idx);
+            pthread_mutex_unlock(&my_obj->buf_lock);
+        }
+    }
+    return rc;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_get_queued_buf_count
+ *
+ * DESCRIPTION: return queued buffer count
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *
+ * RETURN     : queued buffer count
+ *==========================================================================*/
+int32_t mm_stream_get_queued_buf_count(mm_stream_t *my_obj)
+{
+    int32_t rc = 0;
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+             my_obj->my_hdl, my_obj->fd, my_obj->state);
+    pthread_mutex_lock(&my_obj->buf_lock);
+    rc = my_obj->queued_buffer_count;
+    pthread_mutex_unlock(&my_obj->buf_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_reg_buf_cb
+ *
+ * DESCRIPTION: Allow other stream to register dataCB at this stream.
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @val          : callback function to be registered
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_reg_buf_cb(mm_stream_t *my_obj,
+        mm_stream_data_cb_t val)
+{
+    int32_t rc = -1;
+    uint8_t i;
+    LOGD("E, my_handle = 0x%x, fd = %d, state = %d",
+          my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    pthread_mutex_lock(&my_obj->cb_lock);
+    for (i=0 ;i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+        if(NULL == my_obj->buf_cb[i].cb) {
+            my_obj->buf_cb[i] = val;
+            rc = 0;
+            break;
+        }
+    }
+    pthread_mutex_unlock(&my_obj->cb_lock);
+
+    return rc;
+}
diff --git a/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c b/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c
new file mode 100644
index 0000000..0c740b4
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c
@@ -0,0 +1,698 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <pthread.h>
+#include <errno.h>
+#include <string.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <sys/prctl.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <cam_semaphore.h>
+
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+typedef enum {
+    /* poll entries updated */
+    MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED,
+    /* poll entries updated asynchronous */
+    MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC,
+    /* commit updates */
+    MM_CAMERA_PIPE_CMD_COMMIT,
+    /* exit */
+    MM_CAMERA_PIPE_CMD_EXIT,
+    /* max count */
+    MM_CAMERA_PIPE_CMD_MAX
+} mm_camera_pipe_cmd_type_t;
+
+typedef enum {
+    MM_CAMERA_POLL_TASK_STATE_STOPPED,
+    MM_CAMERA_POLL_TASK_STATE_POLL,     /* polling pid in polling state. */
+    MM_CAMERA_POLL_TASK_STATE_MAX
+} mm_camera_poll_task_state_type_t;
+
+typedef struct {
+    uint32_t cmd;
+    mm_camera_event_t event;
+} mm_camera_sig_evt_t;
+
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_sig_async
+ *
+ * DESCRIPTION: Asynchoronous call to send a command through pipe.
+ *
+ * PARAMETERS :
+ *   @poll_cb      : ptr to poll thread object
+ *   @cmd          : command to be sent
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_poll_sig_async(mm_camera_poll_thread_t *poll_cb,
+                                  uint32_t cmd)
+{
+    /* send through pipe */
+    /* get the mutex */
+    mm_camera_sig_evt_t cmd_evt;
+
+    LOGD("E cmd = %d",cmd);
+    memset(&cmd_evt, 0, sizeof(cmd_evt));
+    cmd_evt.cmd = cmd;
+    pthread_mutex_lock(&poll_cb->mutex);
+    /* reset the statue to false */
+    poll_cb->status = FALSE;
+
+    /* send cmd to worker */
+    ssize_t len = write(poll_cb->pfds[1], &cmd_evt, sizeof(cmd_evt));
+    if (len < 1) {
+        LOGW("len = %lld, errno = %d",
+                (long long int)len, errno);
+        /* Avoid waiting for the signal */
+        pthread_mutex_unlock(&poll_cb->mutex);
+        return 0;
+    }
+    LOGD("begin IN mutex write done, len = %lld",
+            (long long int)len);
+    pthread_mutex_unlock(&poll_cb->mutex);
+    LOGD("X");
+    return 0;
+}
+
+
+
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_sig
+ *
+ * DESCRIPTION: synchorinzed call to send a command through pipe.
+ *
+ * PARAMETERS :
+ *   @poll_cb      : ptr to poll thread object
+ *   @cmd          : command to be sent
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_poll_sig(mm_camera_poll_thread_t *poll_cb,
+                                  uint32_t cmd)
+{
+    /* send through pipe */
+    /* get the mutex */
+    mm_camera_sig_evt_t cmd_evt;
+
+    LOGD("E cmd = %d",cmd);
+    memset(&cmd_evt, 0, sizeof(cmd_evt));
+    cmd_evt.cmd = cmd;
+    pthread_mutex_lock(&poll_cb->mutex);
+    /* reset the statue to false */
+    poll_cb->status = FALSE;
+    /* send cmd to worker */
+
+    ssize_t len = write(poll_cb->pfds[1], &cmd_evt, sizeof(cmd_evt));
+    if(len < 1) {
+        LOGW("len = %lld, errno = %d",
+                (long long int)len, errno);
+        /* Avoid waiting for the signal */
+        pthread_mutex_unlock(&poll_cb->mutex);
+        return 0;
+    }
+    LOGD("begin IN mutex write done, len = %lld",
+            (long long int)len);
+    /* wait till worker task gives positive signal */
+    if (FALSE == poll_cb->status) {
+        LOGD("wait");
+        pthread_cond_wait(&poll_cb->cond_v, &poll_cb->mutex);
+    }
+    /* done */
+    pthread_mutex_unlock(&poll_cb->mutex);
+    LOGD("X");
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_sig
+ *
+ * DESCRIPTION: signal the status of done
+ *
+ * PARAMETERS :
+ *   @poll_cb : ptr to poll thread object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_camera_poll_sig_done(mm_camera_poll_thread_t *poll_cb)
+{
+    pthread_mutex_lock(&poll_cb->mutex);
+    poll_cb->status = TRUE;
+    pthread_cond_signal(&poll_cb->cond_v);
+    LOGD("done, in mutex");
+    pthread_mutex_unlock(&poll_cb->mutex);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_set_state
+ *
+ * DESCRIPTION: set a polling state
+ *
+ * PARAMETERS :
+ *   @poll_cb : ptr to poll thread object
+ *   @state   : polling state (stopped/polling)
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_camera_poll_set_state(mm_camera_poll_thread_t *poll_cb,
+                                     mm_camera_poll_task_state_type_t state)
+{
+    poll_cb->state = state;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_proc_pipe
+ *
+ * DESCRIPTION: polling thread routine to process pipe
+ *
+ * PARAMETERS :
+ *   @poll_cb : ptr to poll thread object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_camera_poll_proc_pipe(mm_camera_poll_thread_t *poll_cb)
+{
+    ssize_t read_len;
+    int i;
+    mm_camera_sig_evt_t cmd_evt;
+    read_len = read(poll_cb->pfds[0], &cmd_evt, sizeof(cmd_evt));
+    LOGD("read_fd = %d, read_len = %d, expect_len = %d cmd = %d",
+          poll_cb->pfds[0], (int)read_len, (int)sizeof(cmd_evt), cmd_evt.cmd);
+    switch (cmd_evt.cmd) {
+    case MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED:
+    case MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC:
+        /* we always have index 0 for pipe read */
+        poll_cb->num_fds = 0;
+        poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->pfds[0];
+        poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
+        poll_cb->num_fds++;
+
+        if (MM_CAMERA_POLL_TYPE_EVT == poll_cb->poll_type &&
+                poll_cb->num_fds < MAX_STREAM_NUM_IN_BUNDLE) {
+            if (poll_cb->poll_entries[0].fd >= 0) {
+                /* fd is valid, we update poll_fds */
+                poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->poll_entries[0].fd;
+                poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
+                poll_cb->num_fds++;
+            }
+        } else if (MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type &&
+                poll_cb->num_fds <= MAX_STREAM_NUM_IN_BUNDLE) {
+            for(i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+                if(poll_cb->poll_entries[i].fd >= 0) {
+                    /* fd is valid, we update poll_fds to this fd */
+                    poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->poll_entries[i].fd;
+                    poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
+                    poll_cb->num_fds++;
+                } else {
+                    /* fd is invalid, we set the entry to -1 to prevent polling.
+                     * According to spec, polling will not poll on entry with fd=-1.
+                     * If this is not the case, we need to skip these invalid fds
+                     * when updating this array.
+                     * We still keep fd=-1 in this array because this makes easier to
+                     * map cb associated with this fd once incoming data avail by directly
+                     * using the index-1(0 is reserved for pipe read, so need to reduce index by 1) */
+                    poll_cb->poll_fds[poll_cb->num_fds].fd = -1;
+                    poll_cb->poll_fds[poll_cb->num_fds].events = 0;
+                    poll_cb->num_fds++;
+                }
+            }
+        }
+        if (cmd_evt.cmd != MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC)
+            mm_camera_poll_sig_done(poll_cb);
+        break;
+
+    case MM_CAMERA_PIPE_CMD_COMMIT:
+        mm_camera_poll_sig_done(poll_cb);
+        break;
+    case MM_CAMERA_PIPE_CMD_EXIT:
+    default:
+        mm_camera_poll_set_state(poll_cb, MM_CAMERA_POLL_TASK_STATE_STOPPED);
+        mm_camera_poll_sig_done(poll_cb);
+        break;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_fn
+ *
+ * DESCRIPTION: polling thread routine
+ *
+ * PARAMETERS :
+ *   @poll_cb : ptr to poll thread object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void *mm_camera_poll_fn(mm_camera_poll_thread_t *poll_cb)
+{
+    int rc = 0, i;
+
+    if (NULL == poll_cb) {
+        LOGE("poll_cb is NULL!\n");
+        return NULL;
+    }
+    LOGD("poll type = %d, num_fd = %d poll_cb = %p\n",
+          poll_cb->poll_type, poll_cb->num_fds,poll_cb);
+    do {
+         for(i = 0; i < poll_cb->num_fds; i++) {
+            poll_cb->poll_fds[i].events = POLLIN|POLLRDNORM|POLLPRI;
+         }
+
+         rc = poll(poll_cb->poll_fds, poll_cb->num_fds, poll_cb->timeoutms);
+         if(rc > 0) {
+            if ((poll_cb->poll_fds[0].revents & POLLIN) &&
+                (poll_cb->poll_fds[0].revents & POLLRDNORM)) {
+                /* if we have data on pipe, we only process pipe in this iteration */
+                LOGD("cmd received on pipe\n");
+                mm_camera_poll_proc_pipe(poll_cb);
+            } else {
+                for(i=1; i<poll_cb->num_fds; i++) {
+                    /* Checking for ctrl events */
+                    if ((poll_cb->poll_type == MM_CAMERA_POLL_TYPE_EVT) &&
+                        (poll_cb->poll_fds[i].revents & POLLPRI)) {
+                        LOGD("mm_camera_evt_notify\n");
+                        if (NULL != poll_cb->poll_entries[i-1].notify_cb) {
+                            poll_cb->poll_entries[i-1].notify_cb(poll_cb->poll_entries[i-1].user_data);
+                        }
+                    }
+
+                    if ((MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type) &&
+                        (poll_cb->poll_fds[i].revents & POLLIN) &&
+                        (poll_cb->poll_fds[i].revents & POLLRDNORM)) {
+                        LOGD("mm_stream_data_notify\n");
+                        if (NULL != poll_cb->poll_entries[i-1].notify_cb) {
+                            poll_cb->poll_entries[i-1].notify_cb(poll_cb->poll_entries[i-1].user_data);
+                        }
+                    }
+                }
+            }
+        } else {
+            /* in error case sleep 10 us and then continue. hard coded here */
+            usleep(10);
+            continue;
+        }
+    } while ((poll_cb != NULL) && (poll_cb->state == MM_CAMERA_POLL_TASK_STATE_POLL));
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_thread
+ *
+ * DESCRIPTION: polling thread entry function
+ *
+ * PARAMETERS :
+ *   @data    : ptr to poll thread object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void *mm_camera_poll_thread(void *data)
+{
+    mm_camera_poll_thread_t *poll_cb = (mm_camera_poll_thread_t *)data;
+
+    mm_camera_cmd_thread_name(poll_cb->threadName);
+    /* add pipe read fd into poll first */
+    poll_cb->poll_fds[poll_cb->num_fds++].fd = poll_cb->pfds[0];
+
+    mm_camera_poll_sig_done(poll_cb);
+    mm_camera_poll_set_state(poll_cb, MM_CAMERA_POLL_TASK_STATE_POLL);
+    return mm_camera_poll_fn(poll_cb);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_thread
+ *
+ * DESCRIPTION: notify the polling thread that entries for polling fd have
+ *              been updated
+ *
+ * PARAMETERS :
+ *   @poll_cb : ptr to poll thread object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+int32_t mm_camera_poll_thread_notify_entries_updated(mm_camera_poll_thread_t * poll_cb)
+{
+    /* send poll entries updated signal to poll thread */
+    return mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_thread_commit_updates
+ *
+ * DESCRIPTION: sync with all previously pending async updates
+ *
+ * PARAMETERS :
+ *   @poll_cb : ptr to poll thread object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_poll_thread_commit_updates(mm_camera_poll_thread_t * poll_cb)
+{
+    return mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_COMMIT);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_thread_add_poll_fd
+ *
+ * DESCRIPTION: add a new fd into polling thread
+ *
+ * PARAMETERS :
+ *   @poll_cb   : ptr to poll thread object
+ *   @handler   : stream handle if channel data polling thread,
+ *                0 if event polling thread
+ *   @fd        : file descriptor need to be added into polling thread
+ *   @notify_cb : callback function to handle if any notify from fd
+ *   @userdata  : user data ptr
+ *   @call_type : Whether its Synchronous or Asynchronous call
+ *
+ * RETURN     : none
+ *==========================================================================*/
+int32_t mm_camera_poll_thread_add_poll_fd(mm_camera_poll_thread_t * poll_cb,
+                                          uint32_t handler,
+                                          int32_t fd,
+                                          mm_camera_poll_notify_t notify_cb,
+                                          void* userdata,
+                                          mm_camera_call_type_t call_type)
+{
+    int32_t rc = -1;
+    uint8_t idx = 0;
+
+    if (MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type) {
+        /* get stream idx from handler if CH type */
+        idx = mm_camera_util_get_index_by_handler(handler);
+    } else {
+        /* for EVT type, only idx=0 is valid */
+        idx = 0;
+    }
+
+    if (MAX_STREAM_NUM_IN_BUNDLE > idx) {
+        poll_cb->poll_entries[idx].fd = fd;
+        poll_cb->poll_entries[idx].handler = handler;
+        poll_cb->poll_entries[idx].notify_cb = notify_cb;
+        poll_cb->poll_entries[idx].user_data = userdata;
+        /* send poll entries updated signal to poll thread */
+        if (call_type == mm_camera_sync_call ) {
+            rc = mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED);
+        } else {
+            rc = mm_camera_poll_sig_async(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC );
+        }
+    } else {
+        LOGE("invalid handler %d (%d)", handler, idx);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_thread_del_poll_fd
+ *
+ * DESCRIPTION: delete a fd from polling thread
+ *
+ * PARAMETERS :
+ *   @poll_cb   : ptr to poll thread object
+ *   @handler   : stream handle if channel data polling thread,
+ *                0 if event polling thread
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_poll_thread_del_poll_fd(mm_camera_poll_thread_t * poll_cb,
+                                          uint32_t handler,
+                                          mm_camera_call_type_t call_type)
+{
+    int32_t rc = -1;
+    uint8_t idx = 0;
+
+    if (MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type) {
+        /* get stream idx from handler if CH type */
+        idx = mm_camera_util_get_index_by_handler(handler);
+    } else {
+        /* for EVT type, only idx=0 is valid */
+        idx = 0;
+    }
+
+    if ((MAX_STREAM_NUM_IN_BUNDLE > idx) &&
+        (handler == poll_cb->poll_entries[idx].handler)) {
+        /* reset poll entry */
+        poll_cb->poll_entries[idx].fd = -1; /* set fd to invalid */
+        poll_cb->poll_entries[idx].handler = 0;
+        poll_cb->poll_entries[idx].notify_cb = NULL;
+
+        /* send poll entries updated signal to poll thread */
+        if (call_type == mm_camera_sync_call ) {
+            rc = mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED);
+        } else {
+            rc = mm_camera_poll_sig_async(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC );
+        }
+    } else {
+        if ((MAX_STREAM_NUM_IN_BUNDLE <= idx) ||
+                (poll_cb->poll_entries[idx].handler != 0)) {
+            LOGE("invalid handler %d (%d)", poll_cb->poll_entries[idx].handler,
+                    idx);
+            rc = -1;
+        } else {
+            LOGW("invalid handler %d (%d)", handler, idx);
+            rc = 0;
+        }
+    }
+
+    return rc;
+}
+
+int32_t mm_camera_poll_thread_launch(mm_camera_poll_thread_t * poll_cb,
+                                     mm_camera_poll_thread_type_t poll_type)
+{
+    int32_t rc = 0;
+    size_t i = 0, cnt = 0;
+    poll_cb->poll_type = poll_type;
+
+    //Initialize poll_fds
+    cnt = sizeof(poll_cb->poll_fds) / sizeof(poll_cb->poll_fds[0]);
+    for (i = 0; i < cnt; i++) {
+        poll_cb->poll_fds[i].fd = -1;
+    }
+    //Initialize poll_entries
+    cnt = sizeof(poll_cb->poll_entries) / sizeof(poll_cb->poll_entries[0]);
+    for (i = 0; i < cnt; i++) {
+        poll_cb->poll_entries[i].fd = -1;
+    }
+    //Initialize pipe fds
+    poll_cb->pfds[0] = -1;
+    poll_cb->pfds[1] = -1;
+    rc = pipe(poll_cb->pfds);
+    if(rc < 0) {
+        LOGE("pipe open rc=%d\n", rc);
+        return -1;
+    }
+
+    poll_cb->timeoutms = -1;  /* Infinite seconds */
+
+    LOGD("poll_type = %d, read fd = %d, write fd = %d timeout = %d",
+         poll_cb->poll_type,
+        poll_cb->pfds[0], poll_cb->pfds[1],poll_cb->timeoutms);
+
+    pthread_mutex_init(&poll_cb->mutex, NULL);
+    pthread_cond_init(&poll_cb->cond_v, NULL);
+
+    /* launch the thread */
+    pthread_mutex_lock(&poll_cb->mutex);
+    poll_cb->status = 0;
+    pthread_create(&poll_cb->pid, NULL, mm_camera_poll_thread, (void *)poll_cb);
+    if(!poll_cb->status) {
+        pthread_cond_wait(&poll_cb->cond_v, &poll_cb->mutex);
+    }
+
+    pthread_mutex_unlock(&poll_cb->mutex);
+    LOGD("End");
+    return rc;
+}
+
+int32_t mm_camera_poll_thread_release(mm_camera_poll_thread_t *poll_cb)
+{
+    int32_t rc = 0;
+    if(MM_CAMERA_POLL_TASK_STATE_STOPPED == poll_cb->state) {
+        LOGE("err, poll thread is not running.\n");
+        return rc;
+    }
+
+    /* send exit signal to poll thread */
+    mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_EXIT);
+    /* wait until poll thread exits */
+    if (pthread_join(poll_cb->pid, NULL) != 0) {
+        LOGD("pthread dead already\n");
+    }
+
+    /* close pipe */
+    if(poll_cb->pfds[0] >= 0) {
+        close(poll_cb->pfds[0]);
+    }
+    if(poll_cb->pfds[1] >= 0) {
+        close(poll_cb->pfds[1]);
+    }
+
+    pthread_mutex_destroy(&poll_cb->mutex);
+    pthread_cond_destroy(&poll_cb->cond_v);
+    memset(poll_cb, 0, sizeof(mm_camera_poll_thread_t));
+    poll_cb->pfds[0] = -1;
+    poll_cb->pfds[1] = -1;
+    return rc;
+}
+
+static void *mm_camera_cmd_thread(void *data)
+{
+    int running = 1;
+    int ret;
+    mm_camera_cmd_thread_t *cmd_thread =
+                (mm_camera_cmd_thread_t *)data;
+    mm_camera_cmdcb_t* node = NULL;
+
+    mm_camera_cmd_thread_name(cmd_thread->threadName);
+    do {
+        do {
+            ret = cam_sem_wait(&cmd_thread->cmd_sem);
+            if (ret != 0 && errno != EINVAL) {
+                LOGE("cam_sem_wait error (%s)",
+                            strerror(errno));
+                return NULL;
+            }
+        } while (ret != 0);
+
+        /* we got notified about new cmd avail in cmd queue */
+        node = (mm_camera_cmdcb_t*)cam_queue_deq(&cmd_thread->cmd_queue);
+        while (node != NULL) {
+            switch (node->cmd_type) {
+            case MM_CAMERA_CMD_TYPE_EVT_CB:
+            case MM_CAMERA_CMD_TYPE_DATA_CB:
+            case MM_CAMERA_CMD_TYPE_REQ_DATA_CB:
+            case MM_CAMERA_CMD_TYPE_SUPER_BUF_DATA_CB:
+            case MM_CAMERA_CMD_TYPE_CONFIG_NOTIFY:
+            case MM_CAMERA_CMD_TYPE_START_ZSL:
+            case MM_CAMERA_CMD_TYPE_STOP_ZSL:
+            case MM_CAMERA_CMD_TYPE_GENERAL:
+            case MM_CAMERA_CMD_TYPE_FLUSH_QUEUE:
+                if (NULL != cmd_thread->cb) {
+                    cmd_thread->cb(node, cmd_thread->user_data);
+                }
+                break;
+            case MM_CAMERA_CMD_TYPE_EXIT:
+            default:
+                running = 0;
+                break;
+            }
+            free(node);
+            node = (mm_camera_cmdcb_t*)cam_queue_deq(&cmd_thread->cmd_queue);
+        } /* (node != NULL) */
+    } while (running);
+    return NULL;
+}
+
+int32_t mm_camera_cmd_thread_launch(mm_camera_cmd_thread_t * cmd_thread,
+                                    mm_camera_cmd_cb_t cb,
+                                    void* user_data)
+{
+    int32_t rc = 0;
+
+    cam_sem_init(&cmd_thread->cmd_sem, 0);
+    cam_sem_init(&cmd_thread->sync_sem, 0);
+    cam_queue_init(&cmd_thread->cmd_queue);
+    cmd_thread->cb = cb;
+    cmd_thread->user_data = user_data;
+    cmd_thread->is_active = TRUE;
+
+    /* launch the thread */
+    pthread_create(&cmd_thread->cmd_pid,
+                   NULL,
+                   mm_camera_cmd_thread,
+                   (void *)cmd_thread);
+    return rc;
+}
+
+int32_t mm_camera_cmd_thread_name(const char* name)
+{
+    int32_t rc = 0;
+    /* name the thread */
+    if (name && strlen(name))
+        prctl(PR_SET_NAME, (unsigned long)name, 0, 0, 0);
+    return rc;
+}
+
+
+int32_t mm_camera_cmd_thread_stop(mm_camera_cmd_thread_t * cmd_thread)
+{
+    int32_t rc = 0;
+    mm_camera_cmdcb_t* node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+    if (NULL == node) {
+        LOGE("No memory for mm_camera_cmdcb_t");
+        return -1;
+    }
+
+    memset(node, 0, sizeof(mm_camera_cmdcb_t));
+    node->cmd_type = MM_CAMERA_CMD_TYPE_EXIT;
+
+    cam_queue_enq(&cmd_thread->cmd_queue, node);
+    cam_sem_post(&cmd_thread->cmd_sem);
+
+    /* wait until cmd thread exits */
+    if (pthread_join(cmd_thread->cmd_pid, NULL) != 0) {
+        LOGD("pthread dead already\n");
+    }
+    return rc;
+}
+
+int32_t mm_camera_cmd_thread_destroy(mm_camera_cmd_thread_t * cmd_thread)
+{
+    int32_t rc = 0;
+    cam_queue_deinit(&cmd_thread->cmd_queue);
+    cam_sem_destroy(&cmd_thread->cmd_sem);
+    cam_sem_destroy(&cmd_thread->sync_sem);
+    memset(cmd_thread, 0, sizeof(mm_camera_cmd_thread_t));
+    return rc;
+}
+
+int32_t mm_camera_cmd_thread_release(mm_camera_cmd_thread_t * cmd_thread)
+{
+    int32_t rc = 0;
+    rc = mm_camera_cmd_thread_stop(cmd_thread);
+    if (0 == rc) {
+        rc = mm_camera_cmd_thread_destroy(cmd_thread);
+    }
+    return rc;
+}
diff --git a/msmcobalt/QCamera2/stack/mm-camera-test/Android.mk b/msmcobalt/QCamera2/stack/mm-camera-test/Android.mk
new file mode 100644
index 0000000..0ea22e0
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-test/Android.mk
@@ -0,0 +1,193 @@
+OLD_LOCAL_PATH := $(LOCAL_PATH)
+LOCAL_PATH:=$(call my-dir)
+
+# Build command line test app: mm-qcamera-app
+include $(LOCAL_PATH)/../../../common.mk
+include $(CLEAR_VARS)
+
+LOCAL_CFLAGS:= \
+        -DAMSS_VERSION=$(AMSS_VERSION) \
+        $(mmcamera_debug_defines) \
+        $(mmcamera_debug_cflags) \
+        $(USE_SERVER_TREE)
+
+ifeq ($(strip $(TARGET_USES_ION)),true)
+LOCAL_CFLAGS += -DUSE_ION
+endif
+
+LOCAL_CFLAGS += -D_ANDROID_ -DQCAMERA_REDEFINE_LOG
+
+# System header file path prefix
+LOCAL_CFLAGS += -DSYSTEM_HEADER_PREFIX=sys
+
+LOCAL_SRC_FILES:= \
+        src/mm_qcamera_main_menu.c \
+        src/mm_qcamera_app.c \
+        src/mm_qcamera_unit_test.c \
+        src/mm_qcamera_video.c \
+        src/mm_qcamera_preview.c \
+        src/mm_qcamera_snapshot.c \
+        src/mm_qcamera_rdi.c \
+        src/mm_qcamera_reprocess.c\
+        src/mm_qcamera_queue.c \
+        src/mm_qcamera_socket.c \
+        src/mm_qcamera_commands.c
+#        src/mm_qcamera_dual_test.c \
+
+LOCAL_C_INCLUDES:=$(LOCAL_PATH)/inc
+LOCAL_C_INCLUDES+= \
+        frameworks/native/include/media/openmax \
+        $(LOCAL_PATH)/../common \
+        $(LOCAL_PATH)/../mm-camera-interface/inc \
+        $(LOCAL_PATH)/../../../mm-image-codec/qexif \
+        $(LOCAL_PATH)/../../../mm-image-codec/qomx_core
+
+LOCAL_C_INCLUDES+= $(kernel_includes)
+LOCAL_ADDITIONAL_DEPENDENCIES := $(common_deps)
+
+LOCAL_CFLAGS += -DCAMERA_ION_HEAP_ID=ION_IOMMU_HEAP_ID
+ifeq ($(TARGET_BOARD_PLATFORM),msm8974)
+        LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+        LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(filter $(TARGET_BOARD_PLATFORM), apq8084 msm8084),$(TARGET_BOARD_PLATFORM))
+        LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+        LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(TARGET_BOARD_PLATFORM),msm8994)
+        LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+        LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(TARGET_BOARD_PLATFORM),msm8916 msm8952 msm8937 msm8953)
+        LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+        LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(TARGET_BOARD_PLATFORM),msm8226)
+        LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+        LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(TARGET_BOARD_PLATFORM),msm8610)
+        LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+        LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(TARGET_BOARD_PLATFORM),msm8960)
+        LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+        LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=5
+else ifneq (,$(filter msm8660,$(TARGET_BOARD_PLATFORM)))
+        LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID # EBI
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+        LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=5
+else
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=GRALLOC_USAGE_PRIVATE_UNCACHED #uncached
+        LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_CAMERA_HEAP_ID
+        LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=5
+endif
+LOCAL_CFLAGS += -Wall -Wextra -Werror
+
+LOCAL_SHARED_LIBRARIES:= \
+         libcutils libdl libmmcamera_interface
+
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_32_BIT_ONLY := $(BOARD_QTI_CAMERA_32BIT_ONLY)
+
+LOCAL_MODULE:= mm-qcamera-app
+
+include $(BUILD_EXECUTABLE)
+
+# Build tuning library
+include $(CLEAR_VARS)
+
+LOCAL_CFLAGS:= \
+        -DAMSS_VERSION=$(AMSS_VERSION) \
+        $(mmcamera_debug_defines) \
+        $(mmcamera_debug_cflags) \
+        $(USE_SERVER_TREE)
+
+ifeq ($(strip $(TARGET_USES_ION)),true)
+LOCAL_CFLAGS += -DUSE_ION
+endif
+
+LOCAL_CFLAGS += -D_ANDROID_ -DQCAMERA_REDEFINE_LOG
+
+# System header file path prefix
+LOCAL_CFLAGS += -DSYSTEM_HEADER_PREFIX=sys
+
+LOCAL_SRC_FILES:= \
+        src/mm_qcamera_main_menu.c \
+        src/mm_qcamera_app.c \
+        src/mm_qcamera_unit_test.c \
+        src/mm_qcamera_video.c \
+        src/mm_qcamera_preview.c \
+        src/mm_qcamera_snapshot.c \
+        src/mm_qcamera_rdi.c \
+        src/mm_qcamera_reprocess.c\
+        src/mm_qcamera_queue.c \
+        src/mm_qcamera_socket.c \
+        src/mm_qcamera_commands.c
+#        src/mm_qcamera_dual_test.c \
+
+LOCAL_C_INCLUDES:=$(LOCAL_PATH)/inc
+LOCAL_C_INCLUDES+= \
+        frameworks/native/include/media/openmax \
+        $(LOCAL_PATH)/../common \
+        $(LOCAL_PATH)/../mm-camera-interface/inc \
+        $(LOCAL_PATH)/../../../mm-image-codec/qexif \
+        $(LOCAL_PATH)/../../../mm-image-codec/qomx_core
+
+LOCAL_C_INCLUDES+= $(kernel_includes)
+LOCAL_ADDITIONAL_DEPENDENCIES := $(common_deps)
+
+LOCAL_CFLAGS += -DCAMERA_ION_HEAP_ID=ION_IOMMU_HEAP_ID
+ifeq ($(TARGET_BOARD_PLATFORM),msm8974)
+        LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+        LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(filter $(TARGET_BOARD_PLATFORM), apq8084 msm8084),$(TARGET_BOARD_PLATFORM))
+        LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+        LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(TARGET_BOARD_PLATFORM),msm8994)
+        LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+        LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(TARGET_BOARD_PLATFORM),msm8916 msm8952 msm8937 msm8953)
+        LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+        LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(TARGET_BOARD_PLATFORM),msm8226)
+        LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+        LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(TARGET_BOARD_PLATFORM),msm8610)
+        LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+        LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(TARGET_BOARD_PLATFORM),msm8960)
+        LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+        LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=5
+else ifneq (,$(filter msm8660,$(TARGET_BOARD_PLATFORM)))
+        LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID # EBI
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+        LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=5
+else
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=GRALLOC_USAGE_PRIVATE_UNCACHED #uncached
+        LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_CAMERA_HEAP_ID
+        LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=5
+endif
+LOCAL_CFLAGS += -Wall -Wextra -Werror
+
+LOCAL_SHARED_LIBRARIES:= \
+         libcutils libdl libmmcamera_interface
+
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_32_BIT_ONLY := $(BOARD_QTI_CAMERA_32BIT_ONLY)
+
+LOCAL_MODULE:= libmm-qcamera
+
+include $(BUILD_SHARED_LIBRARY)
+
+LOCAL_PATH := $(OLD_LOCAL_PATH)
diff --git a/msmcobalt/QCamera2/stack/mm-camera-test/inc/mm_qcamera_app.h b/msmcobalt/QCamera2/stack/mm-camera-test/inc/mm_qcamera_app.h
new file mode 100644
index 0000000..1b94ea8
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-test/inc/mm_qcamera_app.h
@@ -0,0 +1,533 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_QCAMERA_APP_H__
+#define __MM_QCAMERA_APP_H__
+
+// System dependencies
+#include <pthread.h>
+#include <linux/msm_ion.h>
+#include <linux/msm_mdp.h>
+
+// Camera dependencies
+#include "mm_camera_interface.h"
+#include "mm_jpeg_interface.h"
+#include "mm_qcamera_socket.h"
+
+#define MM_QCAMERA_APP_INTERATION 1
+
+#define MM_APP_MAX_DUMP_FRAME_NUM 1000
+
+#define PREVIEW_BUF_NUM 7
+#define SNAPSHOT_BUF_NUM 10
+#define VIDEO_BUF_NUM 7
+#define ISP_PIX_BUF_NUM 9
+#define STATS_BUF_NUM 4
+#define RDI_BUF_NUM 8
+#define CAPTURE_BUF_NUM 5
+
+#define DEFAULT_PREVIEW_FORMAT    CAM_FORMAT_YUV_420_NV21
+#define DEFAULT_PREVIEW_WIDTH     1280
+#define DEFAULT_PREVIEW_HEIGHT    960
+#define DEFAULT_PREVIEW_PADDING   CAM_PAD_TO_WORD
+#define DEFAULT_VIDEO_FORMAT      CAM_FORMAT_YUV_420_NV12
+#define DEFAULT_VIDEO_WIDTH       800
+#define DEFAULT_VIDEO_HEIGHT      480
+#define DEFAULT_VIDEO_PADDING     CAM_PAD_TO_2K
+#define DEFAULT_SNAPSHOT_FORMAT   CAM_FORMAT_YUV_420_NV21
+#define DEFAULT_RAW_FORMAT        CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG
+
+#define DEFAULT_SNAPSHOT_WIDTH    4160
+#define DEFAULT_SNAPSHOT_HEIGHT   3120
+#define DEFAULT_SNAPSHOT_PADDING  CAM_PAD_TO_WORD
+
+#define DEFAULT_OV_FORMAT         MDP_Y_CRCB_H2V2
+#define DEFAULT_OV_FORMAT_BPP     3/2
+#define DEFAULT_CAMERA_FORMAT_BPP 3/2
+#define FB_PATH                   "/dev/graphics/fb0"
+#define BACKLIGHT_CONTROL         "/sys/class/leds/lcd-backlight/brightness"
+#define BACKLIGHT_LEVEL           "205"
+
+#define ENABLE_REPROCESSING       1
+
+#define INVALID_KEY_PRESS 0
+#define BASE_OFFSET  ('Z' - 'A' + 1)
+#define BASE_OFFSET_NUM  ('Z' - 'A' + 2)
+
+#ifndef TRUE
+#define TRUE 1
+#endif
+
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+typedef enum {
+    TUNE_CMD_INIT,
+    TUNE_CMD_GET_LIST,
+    TUNE_CMD_GET_PARAMS,
+    TUNE_CMD_SET_PARAMS,
+    TUNE_CMD_MISC,
+    TUNE_CMD_DEINIT,
+} mm_camera_tune_cmd_t;
+
+typedef enum {
+    TUNE_PREVCMD_INIT,
+    TUNE_PREVCMD_SETDIM,
+    TUNE_PREVCMD_GETINFO,
+    TUNE_PREVCMD_GETCHUNKSIZE,
+    TUNE_PREVCMD_GETFRAME,
+    TUNE_PREVCMD_UNSUPPORTED,
+    TUNE_PREVCMD_DEINIT,
+} mm_camera_tune_prevcmd_t;
+
+typedef void (*cam_stream_user_cb) (mm_camera_buf_def_t *frame);
+typedef void (*prev_callback) (mm_camera_buf_def_t *preview_frame);
+
+
+typedef struct {
+  char *send_buf;
+  uint32_t send_len;
+  void *next;
+} eztune_prevcmd_rsp;
+
+typedef struct {
+    int (*command_process) (void *recv, mm_camera_tune_cmd_t cmd,
+      void *param, char *send_buf, uint32_t send_len);
+    int (*prevcommand_process) (void *recv, mm_camera_tune_prevcmd_t cmd,
+      void *param, char **send_buf, uint32_t *send_len);
+    void (*prevframe_callback) (mm_camera_buf_def_t *preview_frame);
+} mm_camera_tune_func_t;
+
+typedef struct {
+    mm_camera_tune_func_t *func_tbl;
+    void *lib_handle;
+}mm_camera_tuning_lib_params_t;
+
+typedef enum {
+    MM_CAMERA_OK,
+    MM_CAMERA_E_GENERAL,
+    MM_CAMERA_E_NO_MEMORY,
+    MM_CAMERA_E_NOT_SUPPORTED,
+    MM_CAMERA_E_INVALID_INPUT,
+    MM_CAMERA_E_INVALID_OPERATION, /* 5 */
+    MM_CAMERA_E_ENCODE,
+    MM_CAMERA_E_BUFFER_REG,
+    MM_CAMERA_E_PMEM_ALLOC,
+    MM_CAMERA_E_CAPTURE_FAILED,
+    MM_CAMERA_E_CAPTURE_TIMEOUT, /* 10 */
+} mm_camera_status_type_t;
+
+typedef enum {
+    MM_CHANNEL_TYPE_ZSL,      /* preview, and snapshot main */
+    MM_CHANNEL_TYPE_CAPTURE,  /* snapshot main, and postview */
+    MM_CHANNEL_TYPE_PREVIEW,  /* preview only */
+    MM_CHANNEL_TYPE_SNAPSHOT, /* snapshot main only */
+    MM_CHANNEL_TYPE_VIDEO,    /* video only */
+    MM_CHANNEL_TYPE_RDI,      /* rdi only */
+    MM_CHANNEL_TYPE_REPROCESS,/* offline reprocess */
+    MM_CHANNEL_TYPE_MAX
+} mm_camera_channel_type_t;
+
+typedef struct {
+    int                     fd;
+    int                     main_ion_fd;
+    ion_user_handle_t       handle;
+    size_t                  size;
+    parm_buffer_t          *data;
+} mm_camera_app_meminfo_t;
+
+typedef struct {
+    mm_camera_buf_def_t buf;
+    mm_camera_app_meminfo_t mem_info;
+} mm_camera_app_buf_t;
+
+typedef struct {
+    uint32_t s_id;
+    mm_camera_stream_config_t s_config;
+    cam_frame_len_offset_t offset;
+    uint8_t num_of_bufs;
+    uint32_t multipleOf;
+    mm_camera_app_buf_t s_bufs[MM_CAMERA_MAX_NUM_FRAMES];
+    mm_camera_app_buf_t s_info_buf;
+} mm_camera_stream_t;
+
+typedef struct {
+    uint32_t ch_id;
+    uint8_t num_streams;
+    mm_camera_stream_t streams[MAX_STREAM_NUM_IN_BUNDLE];
+} mm_camera_channel_t;
+
+typedef void (*release_data_fn)(void* data, void *user_data);
+
+typedef struct {
+    struct cam_list list;
+    void* data;
+} camera_q_node;
+
+typedef struct {
+    camera_q_node m_head;
+    int m_size;
+    pthread_mutex_t m_lock;
+    release_data_fn m_dataFn;
+    void * m_userData;
+} mm_camera_queue_t;
+
+typedef struct {
+    uint16_t user_input_display_width;
+    uint16_t user_input_display_height;
+} USER_INPUT_DISPLAY_T;
+
+typedef struct {
+    mm_camera_vtbl_t *cam;
+    uint8_t num_channels;
+    mm_camera_channel_t channels[MM_CHANNEL_TYPE_MAX];
+    mm_jpeg_ops_t jpeg_ops;
+    uint32_t jpeg_hdl;
+    mm_camera_app_buf_t cap_buf;
+    mm_camera_app_buf_t parm_buf;
+
+    uint32_t current_jpeg_sess_id;
+    mm_camera_super_buf_t* current_job_frames;
+    uint32_t current_job_id;
+    mm_camera_app_buf_t jpeg_buf;
+
+    int fb_fd;
+    struct fb_var_screeninfo vinfo;
+    struct mdp_overlay data_overlay;
+    uint32_t slice_size;
+    uint32_t buffer_width, buffer_height;
+    uint32_t buffer_size;
+    cam_format_t buffer_format;
+    uint32_t frame_size;
+    uint32_t frame_count;
+    int encodeJpeg;
+    int zsl_enabled;
+    int8_t focus_supported;
+    cam_stream_user_cb user_preview_cb;
+    cam_stream_user_cb user_metadata_cb;
+    parm_buffer_t *params_buffer;
+    USER_INPUT_DISPLAY_T preview_resolution;
+
+    //Reprocess params&stream
+    int8_t enable_reproc;
+    int32_t reproc_sharpness;
+    cam_denoise_param_t reproc_wnr;
+    int8_t enable_CAC;
+    mm_camera_queue_t pp_frames;
+    mm_camera_stream_t *reproc_stream;
+    metadata_buffer_t *metadata;
+    int8_t is_chromatix_reload;
+} mm_camera_test_obj_t;
+
+typedef struct {
+  void *ptr;
+  void* ptr_jpeg;
+
+  uint8_t (*get_num_of_cameras) ();
+  int32_t (*mm_camera_open) (uint8_t camera_idx, mm_camera_vtbl_t **camera_vtbl);
+  uint32_t (*jpeg_open)(mm_jpeg_ops_t *ops, mm_jpeg_mpo_ops_t *mpo_ops,
+                   mm_dimension picture_size,
+                   cam_jpeg_metadata_t *jpeg_metadata);
+
+} hal_interface_lib_t;
+
+typedef struct {
+    uint8_t num_cameras;
+    hal_interface_lib_t hal_lib;
+} mm_camera_app_t;
+
+typedef struct {
+    uint32_t width;
+    uint32_t height;
+} mm_camera_lib_snapshot_params;
+
+typedef enum {
+    MM_CAMERA_LIB_NO_ACTION = 0,
+    MM_CAMERA_LIB_RAW_CAPTURE,
+    MM_CAMERA_LIB_JPEG_CAPTURE,
+    MM_CAMERA_LIB_SET_FOCUS_MODE,
+    MM_CAMERA_LIB_DO_AF,
+    MM_CAMERA_LIB_CANCEL_AF,
+    MM_CAMERA_LIB_LOCK_AE,
+    MM_CAMERA_LIB_UNLOCK_AE,
+    MM_CAMERA_LIB_LOCK_AWB,
+    MM_CAMERA_LIB_UNLOCK_AWB,
+    MM_CAMERA_LIB_GET_CHROMATIX,
+    MM_CAMERA_LIB_SET_RELOAD_CHROMATIX,
+    MM_CAMERA_LIB_GET_AFTUNE,
+    MM_CAMERA_LIB_SET_RELOAD_AFTUNE,
+    MM_CAMERA_LIB_SET_AUTOFOCUS_TUNING,
+    MM_CAMERA_LIB_ZSL_ENABLE,
+    MM_CAMERA_LIB_EV,
+    MM_CAMERA_LIB_ANTIBANDING,
+    MM_CAMERA_LIB_SET_VFE_COMMAND,
+    MM_CAMERA_LIB_SET_POSTPROC_COMMAND,
+    MM_CAMERA_LIB_SET_3A_COMMAND,
+    MM_CAMERA_LIB_AEC_ENABLE,
+    MM_CAMERA_LIB_AEC_DISABLE,
+    MM_CAMERA_LIB_AF_ENABLE,
+    MM_CAMERA_LIB_AF_DISABLE,
+    MM_CAMERA_LIB_AWB_ENABLE,
+    MM_CAMERA_LIB_AWB_DISABLE,
+    MM_CAMERA_LIB_AEC_FORCE_LC,
+    MM_CAMERA_LIB_AEC_FORCE_GAIN,
+    MM_CAMERA_LIB_AEC_FORCE_EXP,
+    MM_CAMERA_LIB_AEC_FORCE_SNAP_LC,
+    MM_CAMERA_LIB_AEC_FORCE_SNAP_GAIN,
+    MM_CAMERA_LIB_AEC_FORCE_SNAP_EXP,
+    MM_CAMERA_LIB_WB,
+    MM_CAMERA_LIB_EXPOSURE_METERING,
+    MM_CAMERA_LIB_BRIGHTNESS,
+    MM_CAMERA_LIB_CONTRAST,
+    MM_CAMERA_LIB_SATURATION,
+    MM_CAMERA_LIB_SHARPNESS,
+    MM_CAMERA_LIB_ISO,
+    MM_CAMERA_LIB_ZOOM,
+    MM_CAMERA_LIB_BESTSHOT,
+    MM_CAMERA_LIB_FLASH,
+    MM_CAMERA_LIB_FPS_RANGE,
+    MM_CAMERA_LIB_WNR_ENABLE,
+    MM_CAMERA_LIB_SET_TINTLESS,
+} mm_camera_lib_commands;
+
+typedef struct {
+    int32_t stream_width, stream_height;
+    cam_focus_mode_type af_mode;
+} mm_camera_lib_params;
+
+typedef struct {
+  tuneserver_protocol_t *proto;
+  int clientsocket_id;
+  prserver_protocol_t *pr_proto;
+  int pr_clientsocket_id;
+  mm_camera_tuning_lib_params_t tuning_params;
+} tuningserver_t;
+
+typedef struct {
+    mm_camera_app_t app_ctx;
+    mm_camera_test_obj_t test_obj;
+    mm_camera_lib_params current_params;
+    int stream_running;
+    tuningserver_t tsctrl;
+} mm_camera_lib_ctx;
+
+typedef mm_camera_lib_ctx mm_camera_lib_handle;
+
+typedef int (*mm_app_test_t) (mm_camera_app_t *cam_apps);
+typedef struct {
+    mm_app_test_t f;
+    int r;
+} mm_app_tc_t;
+
+extern int mm_app_unit_test_entry(mm_camera_app_t *cam_app);
+extern int mm_app_dual_test_entry(mm_camera_app_t *cam_app);
+extern int setmetainfoCommand(mm_camera_test_obj_t *test_obj,
+                              cam_stream_size_info_t *value);
+
+extern void mm_app_dump_frame(mm_camera_buf_def_t *frame,
+                              char *name,
+                              char *ext,
+                              uint32_t frame_idx);
+extern void mm_app_dump_jpeg_frame(const void * data,
+                                   size_t size,
+                                   char* name,
+                                   char* ext,
+                                   uint32_t index);
+extern int mm_camera_app_timedwait(uint8_t seconds);
+extern int mm_camera_app_wait();
+extern void mm_camera_app_done();
+extern int mm_app_alloc_bufs(mm_camera_app_buf_t* app_bufs,
+                             cam_frame_len_offset_t *frame_offset_info,
+                             uint8_t num_bufs,
+                             uint8_t is_streambuf,
+                             size_t multipleOf);
+extern int mm_app_release_bufs(uint8_t num_bufs,
+                               mm_camera_app_buf_t* app_bufs);
+extern int mm_app_stream_initbuf(cam_frame_len_offset_t *frame_offset_info,
+                                 uint8_t *num_bufs,
+                                 uint8_t **initial_reg_flag,
+                                 mm_camera_buf_def_t **bufs,
+                                 mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                                 void *user_data);
+extern int32_t mm_app_stream_deinitbuf(mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                                       void *user_data);
+extern int mm_app_cache_ops(mm_camera_app_meminfo_t *mem_info, int cmd);
+extern int32_t mm_app_stream_clean_invalidate_buf(uint32_t index, void *user_data);
+extern int32_t mm_app_stream_invalidate_buf(uint32_t index, void *user_data);
+extern int mm_app_open(mm_camera_app_t *cam_app,
+                       int cam_id,
+                       mm_camera_test_obj_t *test_obj);
+extern int mm_app_close(mm_camera_test_obj_t *test_obj);
+extern mm_camera_channel_t * mm_app_add_channel(
+                                         mm_camera_test_obj_t *test_obj,
+                                         mm_camera_channel_type_t ch_type,
+                                         mm_camera_channel_attr_t *attr,
+                                         mm_camera_buf_notify_t channel_cb,
+                                         void *userdata);
+extern int mm_app_del_channel(mm_camera_test_obj_t *test_obj,
+                              mm_camera_channel_t *channel);
+extern mm_camera_stream_t * mm_app_add_stream(mm_camera_test_obj_t *test_obj,
+                                              mm_camera_channel_t *channel);
+extern int mm_app_del_stream(mm_camera_test_obj_t *test_obj,
+                             mm_camera_channel_t *channel,
+                             mm_camera_stream_t *stream);
+extern int mm_app_config_stream(mm_camera_test_obj_t *test_obj,
+                                mm_camera_channel_t *channel,
+                                mm_camera_stream_t *stream,
+                                mm_camera_stream_config_t *config);
+extern int mm_app_start_channel(mm_camera_test_obj_t *test_obj,
+                                mm_camera_channel_t *channel);
+extern int mm_app_stop_channel(mm_camera_test_obj_t *test_obj,
+                               mm_camera_channel_t *channel);
+extern mm_camera_channel_t *mm_app_get_channel_by_type(
+                                    mm_camera_test_obj_t *test_obj,
+                                    mm_camera_channel_type_t ch_type);
+
+extern int mm_app_start_preview(mm_camera_test_obj_t *test_obj);
+extern int mm_app_stop_preview(mm_camera_test_obj_t *test_obj);
+extern int mm_app_start_preview_zsl(mm_camera_test_obj_t *test_obj);
+extern int mm_app_stop_preview_zsl(mm_camera_test_obj_t *test_obj);
+extern mm_camera_channel_t * mm_app_add_preview_channel(
+                                mm_camera_test_obj_t *test_obj);
+extern mm_camera_stream_t * mm_app_add_raw_stream(mm_camera_test_obj_t *test_obj,
+                                                mm_camera_channel_t *channel,
+                                                mm_camera_buf_notify_t stream_cb,
+                                                void *userdata,
+                                                uint8_t num_bufs,
+                                                uint8_t num_burst);
+extern int mm_app_stop_and_del_channel(mm_camera_test_obj_t *test_obj,
+                                       mm_camera_channel_t *channel);
+extern mm_camera_channel_t * mm_app_add_snapshot_channel(
+                                               mm_camera_test_obj_t *test_obj);
+extern mm_camera_stream_t * mm_app_add_snapshot_stream(
+                                                mm_camera_test_obj_t *test_obj,
+                                                mm_camera_channel_t *channel,
+                                                mm_camera_buf_notify_t stream_cb,
+                                                void *userdata,
+                                                uint8_t num_bufs,
+                                                uint8_t num_burst);
+extern mm_camera_stream_t * mm_app_add_metadata_stream(mm_camera_test_obj_t *test_obj,
+                                               mm_camera_channel_t *channel,
+                                               mm_camera_buf_notify_t stream_cb,
+                                               void *userdata,
+                                               uint8_t num_bufs);
+extern int mm_app_start_record_preview(mm_camera_test_obj_t *test_obj);
+extern int mm_app_stop_record_preview(mm_camera_test_obj_t *test_obj);
+extern int mm_app_start_record(mm_camera_test_obj_t *test_obj);
+extern int mm_app_stop_record(mm_camera_test_obj_t *test_obj);
+extern int mm_app_start_live_snapshot(mm_camera_test_obj_t *test_obj);
+extern int mm_app_stop_live_snapshot(mm_camera_test_obj_t *test_obj);
+extern int mm_app_start_capture(mm_camera_test_obj_t *test_obj,
+                                uint8_t num_snapshots);
+extern int mm_app_stop_capture(mm_camera_test_obj_t *test_obj);
+extern int mm_app_start_capture_raw(mm_camera_test_obj_t *test_obj,
+                                    uint8_t num_snapshots);
+extern int mm_app_stop_capture_raw(mm_camera_test_obj_t *test_obj);
+extern int mm_app_start_rdi(mm_camera_test_obj_t *test_obj, uint8_t num_burst);
+extern int mm_app_stop_rdi(mm_camera_test_obj_t *test_obj);
+extern int mm_app_initialize_fb(mm_camera_test_obj_t *test_obj);
+extern int mm_app_close_fb(mm_camera_test_obj_t *test_obj);
+extern int mm_app_fb_write(mm_camera_test_obj_t *test_obj, char *buffer);
+extern int mm_app_overlay_display(mm_camera_test_obj_t *test_obj, int bufferFd);
+extern int mm_app_allocate_ion_memory(mm_camera_app_buf_t *buf, unsigned int ion_type);
+extern int mm_app_deallocate_ion_memory(mm_camera_app_buf_t *buf);
+extern int mm_app_set_params(mm_camera_test_obj_t *test_obj,
+                      cam_intf_parm_type_t param_type,
+                      int32_t value);
+extern int mm_app_set_preview_fps_range(mm_camera_test_obj_t *test_obj,
+                        cam_fps_range_t *fpsRange);
+extern int mm_app_set_face_detection(mm_camera_test_obj_t *test_obj,
+                        cam_fd_set_parm_t *fd_set_parm);
+extern int mm_app_set_metadata_usercb(mm_camera_test_obj_t *test_obj,
+                      cam_stream_user_cb usercb);
+extern int mm_app_set_face_detection(mm_camera_test_obj_t *test_obj,
+        cam_fd_set_parm_t *fd_set_parm);
+extern int mm_app_set_flash_mode(mm_camera_test_obj_t *test_obj,
+        cam_flash_mode_t flashMode);
+
+/* JIG camera lib interface */
+
+int mm_camera_lib_open(mm_camera_lib_handle *handle, int cam_id);
+int mm_camera_lib_get_caps(mm_camera_lib_handle *handle,
+                           cam_capability_t *caps);
+int mm_camera_lib_start_stream(mm_camera_lib_handle *handle);
+int mm_camera_lib_send_command(mm_camera_lib_handle *handle,
+                               mm_camera_lib_commands cmd,
+                               void *data, void *out_data);
+int mm_camera_lib_stop_stream(mm_camera_lib_handle *handle);
+int mm_camera_lib_number_of_cameras(mm_camera_lib_handle *handle);
+int mm_camera_lib_close(mm_camera_lib_handle *handle);
+int32_t mm_camera_load_tuninglibrary(
+  mm_camera_tuning_lib_params_t *tuning_param);
+int mm_camera_lib_set_preview_usercb(
+  mm_camera_lib_handle *handle, cam_stream_user_cb cb);
+//
+
+int mm_app_start_regression_test(int run_tc);
+int mm_app_load_hal(mm_camera_app_t *my_cam_app);
+
+extern int createEncodingSession(mm_camera_test_obj_t *test_obj,
+                          mm_camera_stream_t *m_stream,
+                          mm_camera_buf_def_t *m_frame);
+extern int encodeData(mm_camera_test_obj_t *test_obj, mm_camera_super_buf_t* recvd_frame,
+               mm_camera_stream_t *m_stream);
+extern int mm_app_take_picture(mm_camera_test_obj_t *test_obj, uint8_t);
+
+extern mm_camera_channel_t * mm_app_add_reprocess_channel(mm_camera_test_obj_t *test_obj,
+                                                   mm_camera_stream_t *source_stream);
+extern int mm_app_start_reprocess(mm_camera_test_obj_t *test_obj);
+extern int mm_app_stop_reprocess(mm_camera_test_obj_t *test_obj);
+extern int mm_app_do_reprocess(mm_camera_test_obj_t *test_obj,
+        mm_camera_buf_def_t *frame,
+        uint32_t meta_idx,
+        mm_camera_super_buf_t *super_buf,
+        mm_camera_stream_t *src_meta);
+extern void mm_app_release_ppinput(void *data, void *user_data);
+
+extern int mm_camera_queue_init(mm_camera_queue_t *queue,
+                         release_data_fn data_rel_fn,
+                         void *user_data);
+extern int mm_qcamera_queue_release(mm_camera_queue_t *queue);
+extern int mm_qcamera_queue_isempty(mm_camera_queue_t *queue);
+extern int mm_qcamera_queue_enqueue(mm_camera_queue_t *queue, void *data);
+extern void* mm_qcamera_queue_dequeue(mm_camera_queue_t *queue,
+                                      int bFromHead);
+extern void mm_qcamera_queue_flush(mm_camera_queue_t *queue);
+
+#endif /* __MM_QCAMERA_APP_H__ */
+
+
+
+
+
+
+
+
+
diff --git a/msmcobalt/QCamera2/stack/mm-camera-test/inc/mm_qcamera_commands.h b/msmcobalt/QCamera2/stack/mm-camera-test/inc/mm_qcamera_commands.h
new file mode 100644
index 0000000..1a7ac33
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-test/inc/mm_qcamera_commands.h
@@ -0,0 +1,68 @@
+/* Copyright (c) 2013, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_QCAMERA_COMMANDS_H__
+#define __MM_QCAMERA_COMMANDS_H__
+
+// Camera dependencies
+#include "mm_qcamera_socket.h"
+#include "mm_qcamera_app.h"
+
+int tuneserver_close_cam(mm_camera_lib_handle *lib_handle);
+int tuneserver_stop_cam(mm_camera_lib_handle *lib_handle);
+int tuneserver_open_cam(mm_camera_lib_handle *lib_handle);
+
+int tuneserver_initialize_tuningp(void * ctrl, int client_socket_id,
+  char *send_buf, uint32_t send_len);
+int tuneserver_deinitialize_tuningp(void * ctrl, int client_socket_id,
+  char *send_buf, uint32_t send_len);
+int tuneserver_process_get_list_cmd(void * ctrl, void *recv_cmd,
+  char *send_buf, uint32_t send_len);
+int tuneserver_process_misc_cmd(void * ctrl, void *recv_cmd,
+  char *send_buf, uint32_t send_len);
+int tuneserver_process_get_params_cmd(void * ctrl, void *recv_cmd,
+  char *send_buf, uint32_t send_len);
+int tuneserver_process_set_params_cmd(void * ctrl, void *recv_cmd,
+  char *send_buf, uint32_t send_len);
+
+int tuneserver_initialize_prevtuningp(void * ctrl,
+  int pr_client_socket_id, cam_dimension_t dimension,
+  char **send_buf, uint32_t *send_len);
+int tuneserver_deinitialize_prevtuningp(void * ctrl,
+  char **send_buf, uint32_t *send_len);
+int tuneserver_preview_getinfo(void * ctrl,
+  char **send_buf, uint32_t *send_len);
+int tuneserver_preview_getchunksize(void * ctrl,
+  char **send_buf, uint32_t *send_len);
+int tuneserver_preview_getframe(void * ctrl,
+  char **send_buf, uint32_t *send_len);
+int tuneserver_preview_unsupported(void * ctrl,
+  char **send_buf, uint32_t *send_len);
+
+#endif /*__MM_QCAMERA_COMMANDS_H__*/
diff --git a/msmcobalt/QCamera2/stack/mm-camera-test/inc/mm_qcamera_dbg.h b/msmcobalt/QCamera2/stack/mm-camera-test/inc/mm_qcamera_dbg.h
new file mode 100644
index 0000000..acd197e
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-test/inc/mm_qcamera_dbg.h
@@ -0,0 +1,38 @@
+/* Copyright (c) 2012, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_QCAMERA_DBG_H__
+#define __MM_QCAMERA_DBG_H__
+
+#ifdef QCAMERA_REDEFINE_LOG
+#define CAM_MODULE CAM_HAL_MODULE
+#include "mm_camera_dbg.h"
+#endif
+
+#endif /* __MM_QCAMERA_DBG_H__ */
diff --git a/msmcobalt/QCamera2/stack/mm-camera-test/inc/mm_qcamera_main_menu.h b/msmcobalt/QCamera2/stack/mm-camera-test/inc/mm_qcamera_main_menu.h
new file mode 100644
index 0000000..1b9eb4e
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-test/inc/mm_qcamera_main_menu.h
@@ -0,0 +1,439 @@
+/* Copyright (c) 2013, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_QCAMERA_MAIN_MENU_H__
+#define __MM_QCAMERA_MAIN_MENU_H__
+
+// Camera dependencies
+#include "mm_camera_interface.h"
+#include "mm_jpeg_interface.h"
+
+#define VIDEO_BUFFER_SIZE       (PREVIEW_WIDTH * PREVIEW_HEIGHT * 3/2)
+#define THUMBNAIL_BUFFER_SIZE   (THUMBNAIL_WIDTH * THUMBNAIL_HEIGHT * 3/2)
+#define SNAPSHOT_BUFFER_SIZE    (PICTURE_WIDTH * PICTURE_HEIGHT * 3/2)
+
+/*===========================================================================
+ * Macro
+ *===========================================================================*/
+#define PREVIEW_FRAMES_NUM    5
+#define VIDEO_FRAMES_NUM      5
+#define THUMBNAIL_FRAMES_NUM  1
+#define SNAPSHOT_FRAMES_NUM   1
+#define MAX_NUM_FORMAT        32
+
+typedef enum
+{
+  START_PREVIEW,
+  STOP_PREVIEW,
+  SET_WHITE_BALANCE,
+  SET_TINTLESS_ENABLE,
+  SET_TINTLESS_DISABLE,
+  SET_EXP_METERING,
+  GET_CTRL_VALUE,
+  TOGGLE_AFR,
+  SET_ISO,
+  BRIGHTNESS_GOTO_SUBMENU,
+  CONTRAST_GOTO_SUBMENU,
+  EV_GOTO_SUBMENU,
+  SATURATION_GOTO_SUBMENU,
+  SET_ZOOM,
+  SET_SHARPNESS,
+  TAKE_JPEG_SNAPSHOT,
+  START_RECORDING,
+  STOP_RECORDING,
+  BEST_SHOT,
+  LIVE_SHOT,
+  FLASH_MODES,
+  TOGGLE_ZSL,
+  TAKE_RAW_SNAPSHOT,
+  SWITCH_SNAP_RESOLUTION,
+  TOGGLE_WNR,
+  EXIT
+} Camera_main_menu_t;
+
+typedef enum
+{
+  ACTION_NO_ACTION,
+  ACTION_START_PREVIEW,
+  ACTION_STOP_PREVIEW,
+  ACTION_SET_WHITE_BALANCE,
+  ACTION_SET_TINTLESS_ENABLE,
+  ACTION_SET_TINTLESS_DISABLE,
+  ACTION_SET_EXP_METERING,
+  ACTION_GET_CTRL_VALUE,
+  ACTION_TOGGLE_AFR,
+  ACTION_SET_ISO,
+  ACTION_BRIGHTNESS_INCREASE,
+  ACTION_BRIGHTNESS_DECREASE,
+  ACTION_CONTRAST_INCREASE,
+  ACTION_CONTRAST_DECREASE,
+  ACTION_EV_INCREASE,
+  ACTION_EV_DECREASE,
+  ACTION_SATURATION_INCREASE,
+  ACTION_SATURATION_DECREASE,
+  ACTION_SET_ZOOM,
+  ACTION_SHARPNESS_INCREASE,
+  ACTION_SHARPNESS_DECREASE,
+  ACTION_TAKE_JPEG_SNAPSHOT,
+  ACTION_START_RECORDING,
+  ACTION_STOP_RECORDING,
+  ACTION_SET_BESTSHOT_MODE,
+  ACTION_TAKE_LIVE_SNAPSHOT,
+  ACTION_SET_FLASH_MODE,
+  ACTION_SWITCH_CAMERA,
+  ACTION_TOGGLE_ZSL,
+  ACTION_TAKE_RAW_SNAPSHOT,
+  ACTION_SWITCH_RESOLUTION,
+  ACTION_TOGGLE_WNR,
+  ACTION_EXIT
+} camera_action_t;
+
+#define INVALID_KEY_PRESS 0
+#define BASE_OFFSET  ('Z' - 'A' + 1)
+#define BASE_OFFSET_NUM  ('Z' - 'A' + 2)
+#define PAD_TO_WORD(a)  (((a)+3)&~3)
+
+
+#define SQCIF_WIDTH     128
+#define SQCIF_HEIGHT     96
+#define QCIF_WIDTH      176
+#define QCIF_HEIGHT     144
+#define QVGA_WIDTH      320
+#define QVGA_HEIGHT     240
+#define HD_THUMBNAIL_WIDTH      256
+#define HD_THUMBNAIL_HEIGHT     144
+#define CIF_WIDTH       352
+#define CIF_HEIGHT      288
+#define VGA_WIDTH       640
+#define VGA_HEIGHT      480
+#define WVGA_WIDTH      800
+#define WVGA_HEIGHT     480
+#define WVGA_PLUS_WIDTH      960
+#define WVGA_PLUS_HEIGHT     720
+
+#define MP1_WIDTH      1280
+#define MP1_HEIGHT      960
+#define MP2_WIDTH      1600
+#define MP2_HEIGHT     1200
+#define MP3_WIDTH      2048
+#define MP3_HEIGHT     1536
+#define MP5_WIDTH      2592
+#define MP5_HEIGHT     1944
+#define MP8_WIDTH      3264
+#define MP8_HEIGHT     2448
+#define MP12_WIDTH     4000
+#define MP12_HEIGHT    3000
+
+#define SVGA_WIDTH      800
+#define SVGA_HEIGHT     600
+#define XGA_WIDTH      1024
+#define XGA_HEIGHT      768
+#define HD720_WIDTH    1280
+#define HD720_HEIGHT    720
+#define HD720_PLUS_WIDTH    1440
+#define HD720_PLUS_HEIGHT   1080
+#define WXGA_WIDTH     1280
+#define WXGA_HEIGHT     768
+#define HD1080_WIDTH   1920
+#define HD1080_HEIGHT  1080
+
+
+#define ONEMP_WIDTH    1280
+#define SXGA_WIDTH     1280
+#define UXGA_WIDTH     1600
+#define QXGA_WIDTH     2048
+#define FIVEMP_WIDTH   2560
+
+
+#define ONEMP_HEIGHT    960
+#define SXGA_HEIGHT     1024
+#define UXGA_HEIGHT     1200
+#define QXGA_HEIGHT     1536
+#define FIVEMP_HEIGHT   1920
+
+
+typedef enum
+{
+  RESOLUTION_MIN,
+  QCIF                  = RESOLUTION_MIN,
+  QVGA,
+  VGA,
+  WVGA,
+  WVGA_PLUS ,
+  HD720,
+  HD720_PLUS,
+  HD1080,
+  RESOLUTION_PREVIEW_VIDEO_MAX = HD1080,
+  WXGA,
+  MP1,
+  MP2,
+  MP3,
+  MP5,
+  MP8,
+  MP12,
+  RESOLUTION_MAX         = MP12,
+} Camera_Resolution;
+
+typedef struct{
+    uint16_t width;
+    uint16_t  height;
+    char * name;
+    char * str_name;
+    int supported;
+} DIMENSION_TBL_T;
+
+typedef enum {
+    WHITE_BALANCE_STATE,
+    WHITE_BALANCE_TEMPERATURE,
+    BRIGHTNESS_CTRL,
+    EV,
+    CONTRAST_CTRL,
+    SATURATION_CTRL,
+    SHARPNESS_CTRL
+} Get_Ctrl_modes;
+
+typedef enum {
+    AUTO_EXP_FRAME_AVG,
+    AUTO_EXP_CENTER_WEIGHTED,
+    AUTO_EXP_SPOT_METERING,
+    AUTO_EXP_SMART_METERING,
+    AUTO_EXP_USER_METERING,
+    AUTO_EXP_SPOT_METERING_ADV,
+    AUTO_EXP_CENTER_WEIGHTED_ADV,
+    AUTO_EXP_MAX
+} Exp_Metering_modes;
+
+typedef enum {
+  ISO_AUTO,
+  ISO_DEBLUR,
+  ISO_100,
+  ISO_200,
+  ISO_400,
+  ISO_800,
+  ISO_1600,
+  ISO_MAX
+} ISO_modes;
+
+typedef enum {
+  BESTSHOT_AUTO,
+  BESTSHOT_ACTION,
+  BESTSHOT_PORTRAIT,
+  BESTSHOT_LANDSCAPE,
+  BESTSHOT_NIGHT,
+  BESTSHOT_NIGHT_PORTRAIT,
+  BESTSHOT_THEATRE,
+  BESTSHOT_BEACH,
+  BESTSHOT_SNOW,
+  BESTSHOT_SUNSET,
+  BESTSHOT_ANTISHAKE,
+  BESTSHOT_FIREWORKS,
+  BESTSHOT_SPORTS,
+  BESTSHOT_PARTY,
+  BESTSHOT_CANDLELIGHT,
+  BESTSHOT_ASD,
+  BESTSHOT_BACKLIGHT,
+  BESTSHOT_FLOWERS,
+  BESTSHOT_AR,
+  BESTSHOT_HDR,
+  BESTSHOT_MAX
+}Bestshot_modes;
+
+typedef enum {
+    FLASH_MODE_OFF,
+    FLASH_MODE_AUTO,
+    FLASH_MODE_ON,
+    FLASH_MODE_TORCH,
+    FLASH_MODE_MAX,
+}Flash_modes;
+
+typedef enum {
+  WB_AUTO,
+  WB_INCANDESCENT,
+  WB_FLUORESCENT,
+  WB_WARM_FLUORESCENT,
+  WB_DAYLIGHT,
+  WB_CLOUDY_DAYLIGHT,
+  WB_TWILIGHT,
+  WB_SHADE,
+  WB_MAX
+} White_Balance_modes;
+
+typedef enum
+{
+  MENU_ID_MAIN,
+  MENU_ID_WHITEBALANCECHANGE,
+  MENU_ID_EXPMETERINGCHANGE,
+  MENU_ID_GET_CTRL_VALUE,
+  MENU_ID_TOGGLEAFR,
+  MENU_ID_ISOCHANGE,
+  MENU_ID_BRIGHTNESSCHANGE,
+  MENU_ID_CONTRASTCHANGE,
+  MENU_ID_EVCHANGE,
+  MENU_ID_SATURATIONCHANGE,
+  MENU_ID_ZOOMCHANGE,
+  MENU_ID_SHARPNESSCHANGE,
+  MENU_ID_BESTSHOT,
+  MENU_ID_FLASHMODE,
+  MENU_ID_SENSORS,
+  MENU_ID_SWITCH_RES,
+  MENU_ID_INVALID,
+} menu_id_change_t;
+
+typedef enum
+{
+  DECREASE_ZOOM,
+  INCREASE_ZOOM,
+  INCREASE_STEP_ZOOM,
+  DECREASE_STEP_ZOOM,
+} Camera_Zoom;
+
+typedef enum
+{
+  INC_CONTRAST,
+  DEC_CONTRAST,
+} Camera_Contrast_changes;
+
+typedef enum
+{
+  INC_BRIGHTNESS,
+  DEC_BRIGHTNESS,
+} Camera_Brightness_changes;
+
+typedef enum
+{
+  INCREASE_EV,
+  DECREASE_EV,
+} Camera_EV_changes;
+
+typedef enum {
+  INC_SATURATION,
+  DEC_SATURATION,
+} Camera_Saturation_changes;
+
+typedef enum
+{
+  INC_ISO,
+  DEC_ISO,
+} Camera_ISO_changes;
+
+typedef enum
+{
+  INC_SHARPNESS,
+  DEC_SHARPNESS,
+} Camera_Sharpness_changes;
+
+typedef enum {
+  ZOOM_IN,
+  ZOOM_OUT,
+} Zoom_direction;
+
+typedef struct{
+    Camera_main_menu_t main_menu;
+    char * menu_name;
+} CAMERA_MAIN_MENU_TBL_T;
+
+typedef struct{
+    char * menu_name;
+    int present;
+} CAMERA_SENSOR_MENU_TLB_T;
+
+typedef struct{
+    Camera_Resolution cs_id;
+    uint16_t width;
+    uint16_t  height;
+    char * name;
+    char * str_name;
+} PREVIEW_DIMENSION_TBL_T;
+
+typedef struct {
+  White_Balance_modes wb_id;
+  char * wb_name;
+} WHITE_BALANCE_TBL_T;
+
+typedef struct {
+  Get_Ctrl_modes get_ctrl_id;
+  char * get_ctrl_name;
+} GET_CTRL_TBL_T;
+
+typedef struct{
+  Exp_Metering_modes exp_metering_id;
+  char * exp_metering_name;
+} EXP_METERING_TBL_T;
+
+typedef struct {
+  Bestshot_modes bs_id;
+  char *name;
+} BESTSHOT_MODE_TBT_T;
+
+typedef struct {
+  Flash_modes bs_id;
+  char *name;
+} FLASH_MODE_TBL_T;
+
+typedef struct {
+  ISO_modes iso_modes;
+  char *iso_modes_name;
+} ISO_TBL_T;
+
+typedef struct {
+  Zoom_direction zoom_direction;
+  char * zoom_direction_name;
+} ZOOM_TBL_T;
+
+typedef struct {
+  Camera_Sharpness_changes sharpness_change;
+  char *sharpness_change_name;
+} SHARPNESS_TBL_T;
+
+typedef struct {
+  Camera_Brightness_changes bc_id;
+  char * brightness_name;
+} CAMERA_BRIGHTNESS_TBL_T;
+
+typedef struct {
+  Camera_Contrast_changes cc_id;
+  char * contrast_name;
+} CAMERA_CONTRST_TBL_T;
+
+typedef struct {
+  Camera_EV_changes ec_id;
+  char * EV_name;
+} CAMERA_EV_TBL_T;
+
+typedef struct {
+  Camera_Saturation_changes sc_id;
+  char * saturation_name;
+} CAMERA_SATURATION_TBL_T;
+
+typedef struct {
+  Camera_Sharpness_changes bc_id;
+  char * sharpness_name;
+} CAMERA_SHARPNESS_TBL_T;
+
+#endif /* __MM_QCAMERA_MAIN_MENU_H__ */
diff --git a/msmcobalt/QCamera2/stack/mm-camera-test/inc/mm_qcamera_socket.h b/msmcobalt/QCamera2/stack/mm-camera-test/inc/mm_qcamera_socket.h
new file mode 100644
index 0000000..186c109
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-test/inc/mm_qcamera_socket.h
@@ -0,0 +1,113 @@
+/* Copyright (c) 2013-2014, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_QCAMERA_SOCKET_H__
+#define __MM_QCAMERA_SOCKET_H__
+
+// System dependencies
+#include <arpa/inet.h>
+
+#undef __FD_SET
+#define __FD_SET(fd, fdsetp) \
+  (((fd_set *)(fdsetp))->fds_bits[(fd) >> 5] |= (1LU<<((fd) & 31)))
+
+#undef __FD_CLR
+#define __FD_CLR(fd, fdsetp) \
+  (((fd_set *)(fdsetp))->fds_bits[(fd) >> 5] &= ~(1LU<<((fd) & 31)))
+
+#undef  __FD_ISSET
+#define __FD_ISSET(fd, fdsetp) \
+  ((((fd_set *)(fdsetp))->fds_bits[(fd) >> 5] & (1LU<<((fd) & 31))) != 0)
+
+#undef  __FD_ZERO
+#define __FD_ZERO(fdsetp) \
+  (memset (fdsetp, 0, sizeof (*(fd_set *)(fdsetp))))
+
+#define TUNESERVER_MAX_RECV 2048
+#define TUNESERVER_MAX(a, b)  (((a) > (b)) ? (a) : (b))
+
+#define TUNESERVER_GET_LIST 1014
+#define TUNESERVER_GET_PARMS 1015
+#define TUNESERVER_SET_PARMS 1016
+#define TUNESERVER_MISC_CMDS 1021
+
+#define TUNE_PREV_GET_INFO        0x0001
+#define TUNE_PREV_CH_CNK_SIZE     0x0002
+#define TUNE_PREV_GET_PREV_FRAME  0x0003
+#define TUNE_PREV_GET_JPG_SNAP    0x0004
+#define TUNE_PREV_GET_RAW_SNAP    0x0005
+#define TUNE_PREV_GET_RAW_PREV    0x0006
+
+typedef struct {
+  char data[128];
+} tuneserver_misc_cmd;
+
+typedef enum {
+  TUNESERVER_RECV_COMMAND = 1,
+  TUNESERVER_RECV_PAYLOAD_SIZE,
+  TUNESERVER_RECV_PAYLOAD,
+  TUNESERVER_RECV_RESPONSE,
+  TUNESERVERER_RECV_INVALID,
+} tuneserver_recv_cmd_t;
+
+typedef struct {
+  uint16_t          current_cmd;
+  tuneserver_recv_cmd_t next_recv_code;
+  uint32_t          next_recv_len;
+  void              *recv_buf;
+  uint32_t          recv_len;
+  uint32_t          send_len;
+  void              *send_buf;
+} tuneserver_protocol_t;
+
+typedef enum {
+  TUNE_PREV_RECV_COMMAND = 1,
+  TUNE_PREV_RECV_NEWCNKSIZE,
+  TUNE_PREV_RECV_INVALID
+} tune_prev_cmd_t;
+
+typedef struct _eztune_preview_protocol_t {
+  uint16_t         current_cmd;
+  tune_prev_cmd_t  next_recv_code;
+  uint32_t         next_recv_len;
+  int32_t          send_len;
+  char*            send_buf;
+  uint32_t         send_buf_size;
+  uint32_t         new_cnk_size;
+  uint32_t         new_cmd_available;
+} prserver_protocol_t;
+
+typedef union {
+  struct sockaddr addr;
+  struct sockaddr_in addr_in;
+} mm_qcamera_sock_addr_t;
+
+int eztune_server_start(void *lib_handle);
+
+#endif /*__MM_QCAMERA_SOCKET_H__*/
diff --git a/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_app.c b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_app.c
new file mode 100644
index 0000000..e7a2afb
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_app.c
@@ -0,0 +1,2407 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS"AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// To remove
+#include <cutils/properties.h>
+
+// System dependencies
+#include <dlfcn.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <linux/msm_ion.h>
+#define MMAN_H <SYSTEM_HEADER_PREFIX/mman.h>
+#include MMAN_H
+
+// Camera dependencies
+#include "mm_qcamera_dbg.h"
+#include "mm_qcamera_app.h"
+
+static pthread_mutex_t app_mutex;
+static int thread_status = 0;
+static pthread_cond_t app_cond_v;
+
+#define MM_QCAMERA_APP_NANOSEC_SCALE 1000000000
+
+int mm_camera_app_timedwait(uint8_t seconds)
+{
+    int rc = 0;
+    pthread_mutex_lock(&app_mutex);
+    if(FALSE == thread_status) {
+        struct timespec tw;
+        memset(&tw, 0, sizeof tw);
+        tw.tv_sec = 0;
+        tw.tv_nsec = time(0) + seconds * MM_QCAMERA_APP_NANOSEC_SCALE;
+
+        rc = pthread_cond_timedwait(&app_cond_v, &app_mutex,&tw);
+        thread_status = FALSE;
+    }
+    pthread_mutex_unlock(&app_mutex);
+    return rc;
+}
+
+int mm_camera_app_wait()
+{
+    int rc = 0;
+    pthread_mutex_lock(&app_mutex);
+    if(FALSE == thread_status){
+        pthread_cond_wait(&app_cond_v, &app_mutex);
+    }
+    thread_status = FALSE;
+    pthread_mutex_unlock(&app_mutex);
+    return rc;
+}
+
+void mm_camera_app_done()
+{
+  pthread_mutex_lock(&app_mutex);
+  thread_status = TRUE;
+  pthread_cond_signal(&app_cond_v);
+  pthread_mutex_unlock(&app_mutex);
+}
+
+int mm_app_load_hal(mm_camera_app_t *my_cam_app)
+{
+    memset(&my_cam_app->hal_lib, 0, sizeof(hal_interface_lib_t));
+    my_cam_app->hal_lib.ptr = dlopen("libmmcamera_interface.so", RTLD_NOW);
+    my_cam_app->hal_lib.ptr_jpeg = dlopen("libmmjpeg_interface.so", RTLD_NOW);
+    if (!my_cam_app->hal_lib.ptr || !my_cam_app->hal_lib.ptr_jpeg) {
+        LOGE("Error opening HAL library %s\n",  dlerror());
+        return -MM_CAMERA_E_GENERAL;
+    }
+    *(void **)&(my_cam_app->hal_lib.get_num_of_cameras) =
+        dlsym(my_cam_app->hal_lib.ptr, "get_num_of_cameras");
+    *(void **)&(my_cam_app->hal_lib.mm_camera_open) =
+        dlsym(my_cam_app->hal_lib.ptr, "camera_open");
+    *(void **)&(my_cam_app->hal_lib.jpeg_open) =
+        dlsym(my_cam_app->hal_lib.ptr_jpeg, "jpeg_open");
+
+    if (my_cam_app->hal_lib.get_num_of_cameras == NULL ||
+        my_cam_app->hal_lib.mm_camera_open == NULL ||
+        my_cam_app->hal_lib.jpeg_open == NULL) {
+        LOGE("Error loading HAL sym %s\n",  dlerror());
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    my_cam_app->num_cameras = my_cam_app->hal_lib.get_num_of_cameras();
+    LOGD("num_cameras = %d\n",  my_cam_app->num_cameras);
+
+    return MM_CAMERA_OK;
+}
+
+int mm_app_allocate_ion_memory(mm_camera_app_buf_t *buf,
+        __unused unsigned int ion_type)
+{
+    int rc = MM_CAMERA_OK;
+    struct ion_handle_data handle_data;
+    struct ion_allocation_data alloc;
+    struct ion_fd_data ion_info_fd;
+    int main_ion_fd = -1;
+    void *data = NULL;
+
+    main_ion_fd = open("/dev/ion", O_RDONLY);
+    if (main_ion_fd <= 0) {
+        LOGE("Ion dev open failed %s\n", strerror(errno));
+        goto ION_OPEN_FAILED;
+    }
+
+    memset(&alloc, 0, sizeof(alloc));
+    alloc.len = buf->mem_info.size;
+    /* to make it page size aligned */
+    alloc.len = (alloc.len + 4095U) & (~4095U);
+    alloc.align = 4096;
+    alloc.flags = ION_FLAG_CACHED;
+    alloc.heap_id_mask = ION_HEAP(ION_SYSTEM_HEAP_ID);
+    rc = ioctl(main_ion_fd, ION_IOC_ALLOC, &alloc);
+    if (rc < 0) {
+        LOGE("ION allocation failed %s with rc = %d \n",strerror(errno), rc);
+        goto ION_ALLOC_FAILED;
+    }
+
+    memset(&ion_info_fd, 0, sizeof(ion_info_fd));
+    ion_info_fd.handle = alloc.handle;
+    rc = ioctl(main_ion_fd, ION_IOC_SHARE, &ion_info_fd);
+    if (rc < 0) {
+        LOGE("ION map failed %s\n", strerror(errno));
+        goto ION_MAP_FAILED;
+    }
+
+    data = mmap(NULL,
+                alloc.len,
+                PROT_READ  | PROT_WRITE,
+                MAP_SHARED,
+                ion_info_fd.fd,
+                0);
+
+    if (data == MAP_FAILED) {
+        LOGE("ION_MMAP_FAILED: %s (%d)\n", strerror(errno), errno);
+        goto ION_MAP_FAILED;
+    }
+    buf->mem_info.main_ion_fd = main_ion_fd;
+    buf->mem_info.fd = ion_info_fd.fd;
+    buf->mem_info.handle = ion_info_fd.handle;
+    buf->mem_info.size = alloc.len;
+    buf->mem_info.data = data;
+    return MM_CAMERA_OK;
+
+ION_MAP_FAILED:
+    memset(&handle_data, 0, sizeof(handle_data));
+    handle_data.handle = ion_info_fd.handle;
+    ioctl(main_ion_fd, ION_IOC_FREE, &handle_data);
+ION_ALLOC_FAILED:
+    close(main_ion_fd);
+ION_OPEN_FAILED:
+    return -MM_CAMERA_E_GENERAL;
+}
+
+int mm_app_deallocate_ion_memory(mm_camera_app_buf_t *buf)
+{
+  struct ion_handle_data handle_data;
+  int rc = 0;
+
+  rc = munmap(buf->mem_info.data, buf->mem_info.size);
+
+  if (buf->mem_info.fd >= 0) {
+      close(buf->mem_info.fd);
+      buf->mem_info.fd = -1;
+  }
+
+  if (buf->mem_info.main_ion_fd >= 0) {
+      memset(&handle_data, 0, sizeof(handle_data));
+      handle_data.handle = buf->mem_info.handle;
+      ioctl(buf->mem_info.main_ion_fd, ION_IOC_FREE, &handle_data);
+      close(buf->mem_info.main_ion_fd);
+      buf->mem_info.main_ion_fd = -1;
+  }
+  return rc;
+}
+
+/* cmd = ION_IOC_CLEAN_CACHES, ION_IOC_INV_CACHES, ION_IOC_CLEAN_INV_CACHES */
+int mm_app_cache_ops(mm_camera_app_meminfo_t *mem_info,
+                     int cmd)
+{
+    struct ion_flush_data cache_inv_data;
+    struct ion_custom_data custom_data;
+    int ret = MM_CAMERA_OK;
+
+#ifdef USE_ION
+    if (NULL == mem_info) {
+        LOGE("mem_info is NULL, return here");
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    memset(&cache_inv_data, 0, sizeof(cache_inv_data));
+    memset(&custom_data, 0, sizeof(custom_data));
+    cache_inv_data.vaddr = mem_info->data;
+    cache_inv_data.fd = mem_info->fd;
+    cache_inv_data.handle = mem_info->handle;
+    cache_inv_data.length = (unsigned int)mem_info->size;
+    custom_data.cmd = (unsigned int)cmd;
+    custom_data.arg = (unsigned long)&cache_inv_data;
+
+    LOGD("addr = %p, fd = %d, handle = %lx length = %d, ION Fd = %d",
+         cache_inv_data.vaddr, cache_inv_data.fd,
+         (unsigned long)cache_inv_data.handle, cache_inv_data.length,
+         mem_info->main_ion_fd);
+    if(mem_info->main_ion_fd >= 0) {
+        if(ioctl(mem_info->main_ion_fd, ION_IOC_CUSTOM, &custom_data) < 0) {
+            LOGE("Cache Invalidate failed\n");
+            ret = -MM_CAMERA_E_GENERAL;
+        }
+    }
+#endif
+
+    return ret;
+}
+
+void mm_app_dump_frame(mm_camera_buf_def_t *frame,
+                       char *name,
+                       char *ext,
+                       uint32_t frame_idx)
+{
+    char file_name[FILENAME_MAX];
+    int file_fd;
+    int i;
+    int offset = 0;
+    if ( frame != NULL) {
+        snprintf(file_name, sizeof(file_name),
+                QCAMERA_DUMP_FRM_LOCATION"%s_%04d.%s", name, frame_idx, ext);
+        file_fd = open(file_name, O_RDWR | O_CREAT, 0777);
+        if (file_fd < 0) {
+            LOGE("cannot open file %s \n",  file_name);
+        } else {
+            for (i = 0; i < frame->planes_buf.num_planes; i++) {
+                LOGD("saving file from address: %p, data offset: %d, "
+                     "length: %d \n",  frame->buffer,
+                    frame->planes_buf.planes[i].data_offset, frame->planes_buf.planes[i].length);
+                write(file_fd,
+                      (uint8_t *)frame->buffer + offset,
+                      frame->planes_buf.planes[i].length);
+                offset += (int)frame->planes_buf.planes[i].length;
+            }
+
+            close(file_fd);
+            LOGD("dump %s", file_name);
+        }
+    }
+}
+
+void mm_app_dump_jpeg_frame(const void * data, size_t size, char* name,
+        char* ext, uint32_t index)
+{
+    char buf[FILENAME_MAX];
+    int file_fd;
+    if ( data != NULL) {
+        snprintf(buf, sizeof(buf),
+                QCAMERA_DUMP_FRM_LOCATION"test/%s_%u.%s", name, index, ext);
+        LOGD("%s size =%zu, jobId=%u",  buf, size, index);
+        file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+        write(file_fd, data, size);
+        close(file_fd);
+    }
+}
+
+int mm_app_alloc_bufs(mm_camera_app_buf_t* app_bufs,
+                      cam_frame_len_offset_t *frame_offset_info,
+                      uint8_t num_bufs,
+                      uint8_t is_streambuf,
+                      size_t multipleOf)
+{
+    uint32_t i, j;
+    unsigned int ion_type = 0x1 << CAMERA_ION_FALLBACK_HEAP_ID;
+
+    if (is_streambuf) {
+        ion_type |= 0x1 << CAMERA_ION_HEAP_ID;
+    }
+
+    for (i = 0; i < num_bufs ; i++) {
+        if ( 0 < multipleOf ) {
+            size_t m = frame_offset_info->frame_len / multipleOf;
+            if ( ( frame_offset_info->frame_len % multipleOf ) != 0 ) {
+                m++;
+            }
+            app_bufs[i].mem_info.size = m * multipleOf;
+        } else {
+            app_bufs[i].mem_info.size = frame_offset_info->frame_len;
+        }
+        mm_app_allocate_ion_memory(&app_bufs[i], ion_type);
+
+        app_bufs[i].buf.buf_idx = i;
+        app_bufs[i].buf.planes_buf.num_planes = (int8_t)frame_offset_info->num_planes;
+        app_bufs[i].buf.fd = app_bufs[i].mem_info.fd;
+        app_bufs[i].buf.frame_len = app_bufs[i].mem_info.size;
+        app_bufs[i].buf.buffer = app_bufs[i].mem_info.data;
+        app_bufs[i].buf.mem_info = (void *)&app_bufs[i].mem_info;
+
+        /* Plane 0 needs to be set seperately. Set other planes
+             * in a loop. */
+        app_bufs[i].buf.planes_buf.planes[0].length = frame_offset_info->mp[0].len;
+        app_bufs[i].buf.planes_buf.planes[0].m.userptr =
+            (long unsigned int)app_bufs[i].buf.fd;
+        app_bufs[i].buf.planes_buf.planes[0].data_offset = frame_offset_info->mp[0].offset;
+        app_bufs[i].buf.planes_buf.planes[0].reserved[0] = 0;
+        for (j = 1; j < (uint8_t)frame_offset_info->num_planes; j++) {
+            app_bufs[i].buf.planes_buf.planes[j].length = frame_offset_info->mp[j].len;
+            app_bufs[i].buf.planes_buf.planes[j].m.userptr =
+                (long unsigned int)app_bufs[i].buf.fd;
+            app_bufs[i].buf.planes_buf.planes[j].data_offset = frame_offset_info->mp[j].offset;
+            app_bufs[i].buf.planes_buf.planes[j].reserved[0] =
+                app_bufs[i].buf.planes_buf.planes[j-1].reserved[0] +
+                app_bufs[i].buf.planes_buf.planes[j-1].length;
+        }
+    }
+    LOGD("X");
+    return MM_CAMERA_OK;
+}
+
+int mm_app_release_bufs(uint8_t num_bufs,
+                        mm_camera_app_buf_t* app_bufs)
+{
+    int i, rc = MM_CAMERA_OK;
+
+    LOGD("E");
+
+    for (i = 0; i < num_bufs; i++) {
+        rc = mm_app_deallocate_ion_memory(&app_bufs[i]);
+    }
+    memset(app_bufs, 0, num_bufs * sizeof(mm_camera_app_buf_t));
+    LOGD("X");
+    return rc;
+}
+
+int mm_app_stream_initbuf(cam_frame_len_offset_t *frame_offset_info,
+                          uint8_t *num_bufs,
+                          uint8_t **initial_reg_flag,
+                          mm_camera_buf_def_t **bufs,
+                          mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                          void *user_data)
+{
+    mm_camera_stream_t *stream = (mm_camera_stream_t *)user_data;
+    mm_camera_buf_def_t *pBufs = NULL;
+    uint8_t *reg_flags = NULL;
+    int i, rc;
+
+    stream->offset = *frame_offset_info;
+
+    LOGD("alloc buf for stream_id %d, len=%d, num planes: %d, offset: %d",
+         stream->s_id,
+         frame_offset_info->frame_len,
+         frame_offset_info->num_planes,
+         frame_offset_info->mp[1].offset);
+
+    if (stream->num_of_bufs > CAM_MAX_NUM_BUFS_PER_STREAM)
+        stream->num_of_bufs = CAM_MAX_NUM_BUFS_PER_STREAM;
+
+    pBufs = (mm_camera_buf_def_t *)malloc(sizeof(mm_camera_buf_def_t) * stream->num_of_bufs);
+    reg_flags = (uint8_t *)malloc(sizeof(uint8_t) * stream->num_of_bufs);
+    if (pBufs == NULL || reg_flags == NULL) {
+        LOGE("No mem for bufs");
+        if (pBufs != NULL) {
+            free(pBufs);
+        }
+        if (reg_flags != NULL) {
+            free(reg_flags);
+        }
+        return -1;
+    }
+
+    rc = mm_app_alloc_bufs(&stream->s_bufs[0],
+                           frame_offset_info,
+                           stream->num_of_bufs,
+                           1,
+                           stream->multipleOf);
+
+    if (rc != MM_CAMERA_OK) {
+        LOGE("mm_stream_alloc_bufs err = %d",  rc);
+        free(pBufs);
+        free(reg_flags);
+        return rc;
+    }
+
+    for (i = 0; i < stream->num_of_bufs; i++) {
+        /* mapping stream bufs first */
+        pBufs[i] = stream->s_bufs[i].buf;
+        reg_flags[i] = 1;
+        rc = ops_tbl->map_ops(pBufs[i].buf_idx,
+                              -1,
+                              pBufs[i].fd,
+                              (uint32_t)pBufs[i].frame_len,
+                              NULL,
+                              CAM_MAPPING_BUF_TYPE_STREAM_BUF, ops_tbl->userdata);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mapping buf[%d] err = %d",  i, rc);
+            break;
+        }
+    }
+
+    if (rc != MM_CAMERA_OK) {
+        int j;
+        for (j=0; j>i; j++) {
+            ops_tbl->unmap_ops(pBufs[j].buf_idx, -1,
+                    CAM_MAPPING_BUF_TYPE_STREAM_BUF, ops_tbl->userdata);
+        }
+        mm_app_release_bufs(stream->num_of_bufs, &stream->s_bufs[0]);
+        free(pBufs);
+        free(reg_flags);
+        return rc;
+    }
+
+    *num_bufs = stream->num_of_bufs;
+    *bufs = pBufs;
+    *initial_reg_flag = reg_flags;
+
+    LOGD("X");
+    return rc;
+}
+
+int32_t mm_app_stream_deinitbuf(mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                                void *user_data)
+{
+    mm_camera_stream_t *stream = (mm_camera_stream_t *)user_data;
+    int i;
+
+    for (i = 0; i < stream->num_of_bufs ; i++) {
+        /* mapping stream bufs first */
+        ops_tbl->unmap_ops(stream->s_bufs[i].buf.buf_idx, -1,
+                CAM_MAPPING_BUF_TYPE_STREAM_BUF, ops_tbl->userdata);
+    }
+
+    mm_app_release_bufs(stream->num_of_bufs, &stream->s_bufs[0]);
+
+    LOGD("X");
+    return 0;
+}
+
+int32_t mm_app_stream_clean_invalidate_buf(uint32_t index, void *user_data)
+{
+    mm_camera_stream_t *stream = (mm_camera_stream_t *)user_data;
+    return mm_app_cache_ops(&stream->s_bufs[index].mem_info,
+      ION_IOC_CLEAN_INV_CACHES);
+}
+
+int32_t mm_app_stream_invalidate_buf(uint32_t index, void *user_data)
+{
+    mm_camera_stream_t *stream = (mm_camera_stream_t *)user_data;
+    return mm_app_cache_ops(&stream->s_bufs[index].mem_info, ION_IOC_INV_CACHES);
+}
+
+static void notify_evt_cb(uint32_t camera_handle,
+                          mm_camera_event_t *evt,
+                          void *user_data)
+{
+    mm_camera_test_obj_t *test_obj =
+        (mm_camera_test_obj_t *)user_data;
+    if (test_obj == NULL || test_obj->cam->camera_handle != camera_handle) {
+        LOGE("Not a valid test obj");
+        return;
+    }
+
+    LOGD("E evt = %d",  evt->server_event_type);
+    switch (evt->server_event_type) {
+       case CAM_EVENT_TYPE_AUTO_FOCUS_DONE:
+           LOGD("rcvd auto focus done evt");
+           break;
+       case CAM_EVENT_TYPE_ZOOM_DONE:
+           LOGD("rcvd zoom done evt");
+           break;
+       default:
+           break;
+    }
+
+    LOGD("X");
+}
+
+int mm_app_open(mm_camera_app_t *cam_app,
+                int cam_id,
+                mm_camera_test_obj_t *test_obj)
+{
+    int32_t rc = 0;
+    cam_frame_len_offset_t offset_info;
+
+    LOGD("BEGIN\n");
+
+    rc = cam_app->hal_lib.mm_camera_open((uint8_t)cam_id, &(test_obj->cam));
+    if(rc || !test_obj->cam) {
+        LOGE("dev open error. rc = %d, vtbl = %p\n",  rc, test_obj->cam);
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    LOGD("Open Camera id = %d handle = %d", cam_id, test_obj->cam->camera_handle);
+
+    /* alloc ion mem for capability buf */
+    memset(&offset_info, 0, sizeof(offset_info));
+    offset_info.frame_len = sizeof(cam_capability_t);
+
+    rc = mm_app_alloc_bufs(&test_obj->cap_buf,
+                           &offset_info,
+                           1,
+                           0,
+                           0);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("alloc buf for capability error\n");
+        goto error_after_cam_open;
+    }
+
+    /* mapping capability buf */
+    rc = test_obj->cam->ops->map_buf(test_obj->cam->camera_handle,
+                                     CAM_MAPPING_BUF_TYPE_CAPABILITY,
+                                     test_obj->cap_buf.mem_info.fd,
+                                     test_obj->cap_buf.mem_info.size,
+                                     NULL);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("map for capability error\n");
+        goto error_after_cap_buf_alloc;
+    }
+
+    /* alloc ion mem for getparm buf */
+    memset(&offset_info, 0, sizeof(offset_info));
+    offset_info.frame_len = sizeof(parm_buffer_t);
+    rc = mm_app_alloc_bufs(&test_obj->parm_buf,
+                           &offset_info,
+                           1,
+                           0,
+                           0);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("alloc buf for getparm_buf error\n");
+        goto error_after_cap_buf_map;
+    }
+
+    /* mapping getparm buf */
+    rc = test_obj->cam->ops->map_buf(test_obj->cam->camera_handle,
+                                     CAM_MAPPING_BUF_TYPE_PARM_BUF,
+                                     test_obj->parm_buf.mem_info.fd,
+                                     test_obj->parm_buf.mem_info.size,
+                                     NULL);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("map getparm_buf error\n");
+        goto error_after_getparm_buf_alloc;
+    }
+    test_obj->params_buffer = (parm_buffer_t*) test_obj->parm_buf.mem_info.data;
+    LOGH("\n%s params_buffer=%p\n",test_obj->params_buffer);
+
+    rc = test_obj->cam->ops->register_event_notify(test_obj->cam->camera_handle,
+                                                   notify_evt_cb,
+                                                   test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("failed register_event_notify");
+        rc = -MM_CAMERA_E_GENERAL;
+        goto error_after_getparm_buf_map;
+    }
+
+    rc = test_obj->cam->ops->query_capability(test_obj->cam->camera_handle);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("failed query_capability");
+        rc = -MM_CAMERA_E_GENERAL;
+        goto error_after_getparm_buf_map;
+    }
+    memset(&test_obj->jpeg_ops, 0, sizeof(mm_jpeg_ops_t));
+    mm_dimension pic_size;
+    memset(&pic_size, 0, sizeof(mm_dimension));
+    pic_size.w = 4000;
+    pic_size.h = 3000;
+    test_obj->jpeg_hdl = cam_app->hal_lib.jpeg_open(&test_obj->jpeg_ops, NULL, pic_size, NULL);
+    if (test_obj->jpeg_hdl == 0) {
+        LOGE("jpeg lib open err");
+        rc = -MM_CAMERA_E_GENERAL;
+        goto error_after_getparm_buf_map;
+    }
+
+    return rc;
+
+error_after_getparm_buf_map:
+    test_obj->cam->ops->unmap_buf(test_obj->cam->camera_handle,
+                                  CAM_MAPPING_BUF_TYPE_PARM_BUF);
+error_after_getparm_buf_alloc:
+    mm_app_release_bufs(1, &test_obj->parm_buf);
+error_after_cap_buf_map:
+    test_obj->cam->ops->unmap_buf(test_obj->cam->camera_handle,
+                                  CAM_MAPPING_BUF_TYPE_CAPABILITY);
+error_after_cap_buf_alloc:
+    mm_app_release_bufs(1, &test_obj->cap_buf);
+error_after_cam_open:
+    test_obj->cam->ops->close_camera(test_obj->cam->camera_handle);
+    test_obj->cam = NULL;
+    return rc;
+}
+
+int init_batch_update(parm_buffer_t *p_table)
+{
+    int rc = MM_CAMERA_OK;
+    LOGH("\nEnter %s\n");
+    int32_t hal_version = CAM_HAL_V1;
+
+    memset(p_table, 0, sizeof(parm_buffer_t));
+    if(ADD_SET_PARAM_ENTRY_TO_BATCH(p_table, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
+        rc = -1;
+    }
+
+    return rc;
+}
+
+int commit_set_batch(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+    int i = 0;
+
+    for(i = 0; i < CAM_INTF_PARM_MAX; i++){
+        if(test_obj->params_buffer->is_valid[i])
+            break;
+    }
+    if (i < CAM_INTF_PARM_MAX) {
+        LOGH("\n set_param p_buffer =%p\n",test_obj->params_buffer);
+        rc = test_obj->cam->ops->set_parms(test_obj->cam->camera_handle, test_obj->params_buffer);
+    }
+    if (rc != MM_CAMERA_OK) {
+        LOGE("cam->ops->set_parms failed !!");
+    }
+    return rc;
+}
+
+int mm_app_close(mm_camera_test_obj_t *test_obj)
+{
+    int32_t rc = MM_CAMERA_OK;
+
+    if (test_obj == NULL || test_obj->cam ==NULL) {
+        LOGE("cam not opened");
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    /* unmap capability buf */
+    rc = test_obj->cam->ops->unmap_buf(test_obj->cam->camera_handle,
+                                       CAM_MAPPING_BUF_TYPE_CAPABILITY);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("unmap capability buf failed, rc=%d",  rc);
+    }
+
+    /* unmap parm buf */
+    rc = test_obj->cam->ops->unmap_buf(test_obj->cam->camera_handle,
+                                       CAM_MAPPING_BUF_TYPE_PARM_BUF);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("unmap setparm buf failed, rc=%d",  rc);
+    }
+
+    rc = test_obj->cam->ops->close_camera(test_obj->cam->camera_handle);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("close camera failed, rc=%d",  rc);
+    }
+    test_obj->cam = NULL;
+
+    /* close jpeg client */
+    if (test_obj->jpeg_hdl && test_obj->jpeg_ops.close) {
+        rc = test_obj->jpeg_ops.close(test_obj->jpeg_hdl);
+        test_obj->jpeg_hdl = 0;
+        if (rc != MM_CAMERA_OK) {
+            LOGE("close jpeg failed, rc=%d",  rc);
+        }
+    }
+
+    /* dealloc capability buf */
+    rc = mm_app_release_bufs(1, &test_obj->cap_buf);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("release capability buf failed, rc=%d",  rc);
+    }
+
+    /* dealloc parm buf */
+    rc = mm_app_release_bufs(1, &test_obj->parm_buf);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("release setparm buf failed, rc=%d",  rc);
+    }
+
+    return MM_CAMERA_OK;
+}
+
+mm_camera_channel_t * mm_app_add_channel(mm_camera_test_obj_t *test_obj,
+                                         mm_camera_channel_type_t ch_type,
+                                         mm_camera_channel_attr_t *attr,
+                                         mm_camera_buf_notify_t channel_cb,
+                                         void *userdata)
+{
+    uint32_t ch_id = 0;
+    mm_camera_channel_t *channel = NULL;
+
+    ch_id = test_obj->cam->ops->add_channel(test_obj->cam->camera_handle,
+                                            attr,
+                                            channel_cb,
+                                            userdata);
+    if (ch_id == 0) {
+        LOGE("add channel failed");
+        return NULL;
+    }
+    channel = &test_obj->channels[ch_type];
+    channel->ch_id = ch_id;
+    return channel;
+}
+
+int mm_app_del_channel(mm_camera_test_obj_t *test_obj,
+                       mm_camera_channel_t *channel)
+{
+    test_obj->cam->ops->delete_channel(test_obj->cam->camera_handle,
+                                       channel->ch_id);
+    memset(channel, 0, sizeof(mm_camera_channel_t));
+    return MM_CAMERA_OK;
+}
+
+mm_camera_stream_t * mm_app_add_stream(mm_camera_test_obj_t *test_obj,
+                                       mm_camera_channel_t *channel)
+{
+    mm_camera_stream_t *stream = NULL;
+    int rc = MM_CAMERA_OK;
+    cam_frame_len_offset_t offset_info;
+
+    stream = &(channel->streams[channel->num_streams++]);
+    stream->s_id = test_obj->cam->ops->add_stream(test_obj->cam->camera_handle,
+                                                  channel->ch_id);
+    if (stream->s_id == 0) {
+        LOGE("add stream failed");
+        return NULL;
+    }
+
+    stream->multipleOf = test_obj->slice_size;
+
+    /* alloc ion mem for stream_info buf */
+    memset(&offset_info, 0, sizeof(offset_info));
+    offset_info.frame_len = sizeof(cam_stream_info_t);
+
+    rc = mm_app_alloc_bufs(&stream->s_info_buf,
+                           &offset_info,
+                           1,
+                           0,
+                           0);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("alloc buf for stream_info error\n");
+        test_obj->cam->ops->delete_stream(test_obj->cam->camera_handle,
+                                          channel->ch_id,
+                                          stream->s_id);
+        stream->s_id = 0;
+        return NULL;
+    }
+
+    /* mapping streaminfo buf */
+    rc = test_obj->cam->ops->map_stream_buf(test_obj->cam->camera_handle,
+                                            channel->ch_id,
+                                            stream->s_id,
+                                            CAM_MAPPING_BUF_TYPE_STREAM_INFO,
+                                            0,
+                                            -1,
+                                            stream->s_info_buf.mem_info.fd,
+                                            (uint32_t)stream->s_info_buf.mem_info.size, NULL);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("map setparm_buf error\n");
+        mm_app_deallocate_ion_memory(&stream->s_info_buf);
+        test_obj->cam->ops->delete_stream(test_obj->cam->camera_handle,
+                                          channel->ch_id,
+                                          stream->s_id);
+        stream->s_id = 0;
+        return NULL;
+    }
+
+    return stream;
+}
+
+int mm_app_del_stream(mm_camera_test_obj_t *test_obj,
+                      mm_camera_channel_t *channel,
+                      mm_camera_stream_t *stream)
+{
+    test_obj->cam->ops->unmap_stream_buf(test_obj->cam->camera_handle,
+                                         channel->ch_id,
+                                         stream->s_id,
+                                         CAM_MAPPING_BUF_TYPE_STREAM_INFO,
+                                         0,
+                                         -1);
+    mm_app_deallocate_ion_memory(&stream->s_info_buf);
+    test_obj->cam->ops->delete_stream(test_obj->cam->camera_handle,
+                                      channel->ch_id,
+                                      stream->s_id);
+    memset(stream, 0, sizeof(mm_camera_stream_t));
+    return MM_CAMERA_OK;
+}
+
+mm_camera_channel_t *mm_app_get_channel_by_type(mm_camera_test_obj_t *test_obj,
+                                                mm_camera_channel_type_t ch_type)
+{
+    return &test_obj->channels[ch_type];
+}
+
+int mm_app_config_stream(mm_camera_test_obj_t *test_obj,
+                         mm_camera_channel_t *channel,
+                         mm_camera_stream_t *stream,
+                         mm_camera_stream_config_t *config)
+{
+    return test_obj->cam->ops->config_stream(test_obj->cam->camera_handle,
+                                             channel->ch_id,
+                                             stream->s_id,
+                                             config);
+}
+
+int mm_app_start_channel(mm_camera_test_obj_t *test_obj,
+                         mm_camera_channel_t *channel)
+{
+    return test_obj->cam->ops->start_channel(test_obj->cam->camera_handle,
+                                             channel->ch_id);
+}
+
+int mm_app_stop_channel(mm_camera_test_obj_t *test_obj,
+                        mm_camera_channel_t *channel)
+{
+    return test_obj->cam->ops->stop_channel(test_obj->cam->camera_handle,
+                                            channel->ch_id);
+}
+
+int initBatchUpdate(mm_camera_test_obj_t *test_obj)
+{
+    int32_t hal_version = CAM_HAL_V1;
+
+    parm_buffer_t *parm_buf = ( parm_buffer_t * ) test_obj->parm_buf.mem_info.data;
+    memset(parm_buf, 0, sizeof(parm_buffer_t));
+    ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+            CAM_INTF_PARM_HAL_VERSION, hal_version);
+
+    return MM_CAMERA_OK;
+}
+
+int commitSetBatch(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+    int i = 0;
+
+    parm_buffer_t *p_table = ( parm_buffer_t * ) test_obj->parm_buf.mem_info.data;
+    for(i = 0; i < CAM_INTF_PARM_MAX; i++){
+        if(p_table->is_valid[i])
+            break;
+    }
+    if (i < CAM_INTF_PARM_MAX) {
+        rc = test_obj->cam->ops->set_parms(test_obj->cam->camera_handle, p_table);
+    }
+    return rc;
+}
+
+
+int commitGetBatch(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+    int i = 0;
+    parm_buffer_t *p_table = ( parm_buffer_t * ) test_obj->parm_buf.mem_info.data;
+    for(i = 0; i < CAM_INTF_PARM_MAX; i++){
+        if(p_table->is_valid[i])
+            break;
+    }
+    if (i < CAM_INTF_PARM_MAX) {
+        rc = test_obj->cam->ops->get_parms(test_obj->cam->camera_handle, p_table);
+    }
+    return rc;
+}
+
+int setAecLock(mm_camera_test_obj_t *test_obj, int value)
+{
+    int rc = MM_CAMERA_OK;
+
+    rc = initBatchUpdate(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch camera parameter update failed\n");
+        goto ERROR;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+            CAM_INTF_PARM_AEC_LOCK, (uint32_t)value)) {
+        LOGE("AEC Lock parameter not added to batch\n");
+        rc = -1;
+        goto ERROR;
+    }
+
+    rc = commitSetBatch(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch parameters commit failed\n");
+        goto ERROR;
+    }
+
+ERROR:
+    return rc;
+}
+
+int setAwbLock(mm_camera_test_obj_t *test_obj, int value)
+{
+    int rc = MM_CAMERA_OK;
+
+    rc = initBatchUpdate(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch camera parameter update failed\n");
+        goto ERROR;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+            CAM_INTF_PARM_AWB_LOCK, (uint32_t)value)) {
+        LOGE("AWB Lock parameter not added to batch\n");
+        rc = -1;
+        goto ERROR;
+    }
+
+    rc = commitSetBatch(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch parameters commit failed\n");
+        goto ERROR;
+    }
+
+ERROR:
+    return rc;
+}
+
+
+int set3Acommand(mm_camera_test_obj_t *test_obj, cam_eztune_cmd_data_t *value)
+{
+    int rc = MM_CAMERA_OK;
+
+    rc = initBatchUpdate(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch camera parameter update failed\n");
+        goto ERROR;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+            CAM_INTF_PARM_EZTUNE_CMD, *value)) {
+        LOGE("CAM_INTF_PARM_EZTUNE_CMD parameter not added to batch\n");
+        rc = -1;
+        goto ERROR;
+    }
+
+    rc = commitSetBatch(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch parameters commit failed\n");
+        goto ERROR;
+    }
+
+ERROR:
+    return rc;
+}
+
+int setAutoFocusTuning(mm_camera_test_obj_t *test_obj, tune_actuator_t *value)
+{
+    int rc = MM_CAMERA_OK;
+
+    rc = initBatchUpdate(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch camera parameter update failed\n");
+        goto ERROR;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+            CAM_INTF_PARM_SET_AUTOFOCUSTUNING, *value)) {
+        LOGE("AutoFocus Tuning not added to batch\n");
+        rc = -1;
+        goto ERROR;
+    }
+
+    rc = commitSetBatch(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch parameters commit failed\n");
+        goto ERROR;
+    }
+
+ERROR:
+    return rc;
+}
+
+int setVfeCommand(mm_camera_test_obj_t *test_obj, tune_cmd_t *value)
+{
+    int rc = MM_CAMERA_OK;
+
+    rc = initBatchUpdate(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch camera parameter update failed\n");
+        goto ERROR;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+            CAM_INTF_PARM_SET_VFE_COMMAND, *value)) {
+        LOGE("VFE Command not added to batch\n");
+        rc = -1;
+        goto ERROR;
+    }
+
+    rc = commitSetBatch(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch parameters commit failed\n");
+        goto ERROR;
+    }
+
+ERROR:
+    return rc;
+}
+
+int setmetainfoCommand(mm_camera_test_obj_t *test_obj, cam_stream_size_info_t *value)
+{
+    int rc = MM_CAMERA_OK;
+
+    rc = initBatchUpdate(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch camera parameter update failed\n");
+        goto ERROR;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+            CAM_INTF_META_STREAM_INFO, *value)) {
+        LOGE("PP Command not added to batch\n");
+        rc = -1;
+        goto ERROR;
+    }
+
+    rc = commitSetBatch(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch parameters commit failed\n");
+        goto ERROR;
+    }
+
+ERROR:
+    return rc;
+}
+
+
+int setPPCommand(mm_camera_test_obj_t *test_obj, tune_cmd_t *value)
+{
+    int rc = MM_CAMERA_OK;
+
+    rc = initBatchUpdate(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch camera parameter update failed\n");
+        goto ERROR;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+            CAM_INTF_PARM_SET_PP_COMMAND, *value)) {
+        LOGE("PP Command not added to batch\n");
+        rc = -1;
+        goto ERROR;
+    }
+
+    rc = commitSetBatch(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch parameters commit failed\n");
+        goto ERROR;
+    }
+
+ERROR:
+    return rc;
+}
+
+int setFocusMode(mm_camera_test_obj_t *test_obj, cam_focus_mode_type mode)
+{
+    int rc = MM_CAMERA_OK;
+
+    rc = initBatchUpdate(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch camera parameter update failed\n");
+        goto ERROR;
+    }
+
+    uint32_t value = mode;
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+            CAM_INTF_PARM_FOCUS_MODE, value)) {
+        LOGE("Focus mode parameter not added to batch\n");
+        rc = -1;
+        goto ERROR;
+    }
+
+    rc = commitSetBatch(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch parameters commit failed\n");
+        goto ERROR;
+    }
+
+ERROR:
+    return rc;
+}
+
+int setEVCompensation(mm_camera_test_obj_t *test_obj, int ev)
+{
+    int rc = MM_CAMERA_OK;
+
+    cam_capability_t *camera_cap = NULL;
+
+    camera_cap = (cam_capability_t *) test_obj->cap_buf.mem_info.data;
+    if ( (ev >= camera_cap->exposure_compensation_min) &&
+         (ev <= camera_cap->exposure_compensation_max) ) {
+
+        rc = initBatchUpdate(test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("Batch camera parameter update failed\n");
+            goto ERROR;
+        }
+
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+                CAM_INTF_PARM_EXPOSURE_COMPENSATION, ev)) {
+            LOGE("EV compensation parameter not added to batch\n");
+            rc = -1;
+            goto ERROR;
+        }
+
+        rc = commitSetBatch(test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("Batch parameters commit failed\n");
+            goto ERROR;
+        }
+
+        LOGE("EV compensation set to: %d",  ev);
+    } else {
+        LOGE("Invalid EV compensation");
+        return -EINVAL;
+    }
+
+ERROR:
+    return rc;
+}
+
+int setAntibanding(mm_camera_test_obj_t *test_obj, cam_antibanding_mode_type antibanding)
+{
+    int rc = MM_CAMERA_OK;
+
+    rc = initBatchUpdate(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch camera parameter update failed\n");
+        goto ERROR;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+            CAM_INTF_PARM_ANTIBANDING, antibanding)) {
+        LOGE("Antibanding parameter not added to batch\n");
+        rc = -1;
+        goto ERROR;
+    }
+
+    rc = commitSetBatch(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch parameters commit failed\n");
+        goto ERROR;
+    }
+
+    LOGE("Antibanding set to: %d",  (int)antibanding);
+
+ERROR:
+    return rc;
+}
+
+int setWhiteBalance(mm_camera_test_obj_t *test_obj, cam_wb_mode_type mode)
+{
+    int rc = MM_CAMERA_OK;
+
+    rc = initBatchUpdate(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch camera parameter update failed\n");
+        goto ERROR;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+            CAM_INTF_PARM_WHITE_BALANCE, mode)) {
+        LOGE("White balance parameter not added to batch\n");
+        rc = -1;
+        goto ERROR;
+    }
+
+    rc = commitSetBatch(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch parameters commit failed\n");
+        goto ERROR;
+    }
+
+    LOGE("White balance set to: %d",  (int)mode);
+
+ERROR:
+    return rc;
+}
+
+int setExposureMetering(mm_camera_test_obj_t *test_obj, cam_auto_exposure_mode_type mode)
+{
+    int rc = MM_CAMERA_OK;
+
+    rc = initBatchUpdate(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch camera parameter update failed\n");
+        goto ERROR;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+            CAM_INTF_PARM_EXPOSURE, mode)) {
+        LOGE("Exposure metering parameter not added to batch\n");
+        rc = -1;
+        goto ERROR;
+    }
+
+    rc = commitSetBatch(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch parameters commit failed\n");
+        goto ERROR;
+    }
+
+    LOGE("Exposure metering set to: %d",  (int)mode);
+
+ERROR:
+    return rc;
+}
+
+int setBrightness(mm_camera_test_obj_t *test_obj, int brightness)
+{
+    int rc = MM_CAMERA_OK;
+
+    rc = initBatchUpdate(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch camera parameter update failed\n");
+        goto ERROR;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+            CAM_INTF_PARM_BRIGHTNESS, brightness)) {
+        LOGE("Brightness parameter not added to batch\n");
+        rc = -1;
+        goto ERROR;
+    }
+
+    rc = commitSetBatch(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch parameters commit failed\n");
+        goto ERROR;
+    }
+
+    LOGE("Brightness set to: %d",  brightness);
+
+ERROR:
+    return rc;
+}
+
+int setContrast(mm_camera_test_obj_t *test_obj, int contrast)
+{
+    int rc = MM_CAMERA_OK;
+
+    rc = initBatchUpdate(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch camera parameter update failed\n");
+        goto ERROR;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+            CAM_INTF_PARM_CONTRAST, contrast)) {
+        LOGE("Contrast parameter not added to batch\n");
+        rc = -1;
+        goto ERROR;
+    }
+
+    rc = commitSetBatch(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch parameters commit failed\n");
+        goto ERROR;
+    }
+
+    LOGE("Contrast set to: %d",  contrast);
+
+ERROR:
+    return rc;
+}
+
+int setTintless(mm_camera_test_obj_t *test_obj, int tintless)
+{
+    int rc = MM_CAMERA_OK;
+
+    rc = initBatchUpdate(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch camera parameter update failed\n");
+        goto ERROR;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+            CAM_INTF_PARM_TINTLESS, tintless)) {
+        LOGE("Tintless parameter not added to batch\n");
+        rc = -1;
+        goto ERROR;
+    }
+
+    rc = commitSetBatch(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch parameters commit failed\n");
+        goto ERROR;
+    }
+
+    LOGE("set Tintless to: %d",  tintless);
+
+ERROR:
+    return rc;
+}
+
+int setSaturation(mm_camera_test_obj_t *test_obj, int saturation)
+{
+    int rc = MM_CAMERA_OK;
+
+    rc = initBatchUpdate(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch camera parameter update failed\n");
+        goto ERROR;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+            CAM_INTF_PARM_SATURATION, saturation)) {
+        LOGE("Saturation parameter not added to batch\n");
+        rc = -1;
+        goto ERROR;
+    }
+
+    rc = commitSetBatch(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch parameters commit failed\n");
+        goto ERROR;
+    }
+
+    LOGE("Saturation set to: %d",  saturation);
+
+ERROR:
+    return rc;
+}
+
+int setSharpness(mm_camera_test_obj_t *test_obj, int sharpness)
+{
+    int rc = MM_CAMERA_OK;
+
+    rc = initBatchUpdate(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch camera parameter update failed\n");
+        goto ERROR;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+            CAM_INTF_PARM_SHARPNESS, sharpness)) {
+        LOGE("Sharpness parameter not added to batch\n");
+        rc = -1;
+        goto ERROR;
+    }
+
+    rc = commitSetBatch(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch parameters commit failed\n");
+        goto ERROR;
+    }
+
+    test_obj->reproc_sharpness = sharpness;
+    LOGE("Sharpness set to: %d",  sharpness);
+
+ERROR:
+    return rc;
+}
+
+int setISO(mm_camera_test_obj_t *test_obj, cam_iso_mode_type iso)
+{
+    int rc = MM_CAMERA_OK;
+
+    rc = initBatchUpdate(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch camera parameter update failed\n");
+        goto ERROR;
+    }
+
+    cam_intf_parm_manual_3a_t iso_settings;
+    memset(&iso_settings, 0, sizeof(cam_intf_parm_manual_3a_t));
+    iso_settings.previewOnly = FALSE;
+    iso_settings.value = (uint64_t)iso;
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+            CAM_INTF_PARM_ISO, iso_settings)) {
+        LOGE("ISO parameter not added to batch\n");
+        rc = -1;
+        goto ERROR;
+    }
+
+    rc = commitSetBatch(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch parameters commit failed\n");
+        goto ERROR;
+    }
+
+    LOGE("ISO set to: %d",  (int)iso);
+
+ERROR:
+    return rc;
+}
+
+int setZoom(mm_camera_test_obj_t *test_obj, int zoom)
+{
+    int rc = MM_CAMERA_OK;
+
+    rc = initBatchUpdate(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch camera parameter update failed\n");
+        goto ERROR;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+            CAM_INTF_PARM_ZOOM, zoom)) {
+        LOGE("Zoom parameter not added to batch\n");
+        rc = -1;
+        goto ERROR;
+    }
+
+    rc = commitSetBatch(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch parameters commit failed\n");
+        goto ERROR;
+    }
+
+    LOGE("Zoom set to: %d",  zoom);
+
+ERROR:
+    return rc;
+}
+
+int setFPSRange(mm_camera_test_obj_t *test_obj, cam_fps_range_t range)
+{
+    int rc = MM_CAMERA_OK;
+
+    rc = initBatchUpdate(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch camera parameter update failed\n");
+        goto ERROR;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+            CAM_INTF_PARM_FPS_RANGE, range)) {
+        LOGE("FPS range parameter not added to batch\n");
+        rc = -1;
+        goto ERROR;
+    }
+
+    rc = commitSetBatch(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch parameters commit failed\n");
+        goto ERROR;
+    }
+
+    LOGE("FPS Range set to: [%5.2f:%5.2f]",
+            range.min_fps,
+            range.max_fps);
+
+ERROR:
+    return rc;
+}
+
+int setScene(mm_camera_test_obj_t *test_obj, cam_scene_mode_type scene)
+{
+    int rc = MM_CAMERA_OK;
+
+    rc = initBatchUpdate(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch camera parameter update failed\n");
+        goto ERROR;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+            CAM_INTF_PARM_BESTSHOT_MODE, scene)) {
+        LOGE("Scene parameter not added to batch\n");
+        rc = -1;
+        goto ERROR;
+    }
+
+    rc = commitSetBatch(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch parameters commit failed\n");
+        goto ERROR;
+    }
+
+    LOGE("Scene set to: %d",  (int)scene);
+
+ERROR:
+    return rc;
+}
+
+int setFlash(mm_camera_test_obj_t *test_obj, cam_flash_mode_t flash)
+{
+    int rc = MM_CAMERA_OK;
+
+    rc = initBatchUpdate(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch camera parameter update failed\n");
+        goto ERROR;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+            CAM_INTF_PARM_LED_MODE, flash)) {
+        LOGE("Flash parameter not added to batch\n");
+        rc = -1;
+        goto ERROR;
+    }
+
+    rc = commitSetBatch(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch parameters commit failed\n");
+        goto ERROR;
+    }
+
+    LOGE("Flash set to: %d",  (int)flash);
+
+ERROR:
+    return rc;
+}
+
+int setWNR(mm_camera_test_obj_t *test_obj, uint8_t enable)
+{
+    int rc = MM_CAMERA_OK;
+
+    rc = initBatchUpdate(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch camera parameter update failed\n");
+        goto ERROR;
+    }
+
+    cam_denoise_param_t param;
+    memset(&param, 0, sizeof(cam_denoise_param_t));
+    param.denoise_enable = enable;
+    param.process_plates = CAM_WAVELET_DENOISE_YCBCR_PLANE;
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+            CAM_INTF_PARM_WAVELET_DENOISE, param)) {
+        LOGE("WNR enabled parameter not added to batch\n");
+        rc = -1;
+        goto ERROR;
+    }
+
+    rc = commitSetBatch(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch parameters commit failed\n");
+        goto ERROR;
+    }
+
+
+    test_obj->reproc_wnr = param;
+    LOGE("WNR enabled: %d",  enable);
+
+ERROR:
+    return rc;
+}
+
+
+/** tuneserver_capture
+ *    @lib_handle: the camera handle object
+ *    @dim: snapshot dimensions
+ *
+ *  makes JPEG capture
+ *
+ *  Return: >=0 on success, -1 on failure.
+ **/
+int tuneserver_capture(mm_camera_lib_handle *lib_handle,
+                       mm_camera_lib_snapshot_params *dim)
+{
+    int rc = 0;
+
+    printf("Take jpeg snapshot\n");
+    if ( lib_handle->stream_running ) {
+
+        if ( lib_handle->test_obj.zsl_enabled) {
+            if ( NULL != dim) {
+                if ( ( lib_handle->test_obj.buffer_width != dim->width) ||
+                     ( lib_handle->test_obj.buffer_height = dim->height ) ) {
+
+                    lib_handle->test_obj.buffer_width = dim->width;
+                    lib_handle->test_obj.buffer_height = dim->height;
+
+                    rc = mm_camera_lib_stop_stream(lib_handle);
+                    if (rc != MM_CAMERA_OK) {
+                        LOGE("mm_camera_lib_stop_stream() err=%d\n",
+                                    rc);
+                        goto EXIT;
+                    }
+
+                    rc = mm_camera_lib_start_stream(lib_handle);
+                    if (rc != MM_CAMERA_OK) {
+                        LOGE("mm_camera_lib_start_stream() err=%d\n",
+                                    rc);
+                        goto EXIT;
+                    }
+                }
+
+            }
+
+            lib_handle->test_obj.encodeJpeg = 1;
+
+            mm_camera_app_wait();
+        } else {
+            // For standard 2D capture streaming has to be disabled first
+            rc = mm_camera_lib_stop_stream(lib_handle);
+            if (rc != MM_CAMERA_OK) {
+                LOGE("mm_camera_lib_stop_stream() err=%d\n",
+                          rc);
+                goto EXIT;
+            }
+
+            if ( NULL != dim ) {
+                lib_handle->test_obj.buffer_width = dim->width;
+                lib_handle->test_obj.buffer_height = dim->height;
+            }
+            rc = mm_app_start_capture(&lib_handle->test_obj, 1);
+            if (rc != MM_CAMERA_OK) {
+                LOGE("mm_app_start_capture() err=%d\n",
+                          rc);
+                goto EXIT;
+            }
+
+            mm_camera_app_wait();
+
+            rc = mm_app_stop_capture(&lib_handle->test_obj);
+            if (rc != MM_CAMERA_OK) {
+                LOGE("mm_app_stop_capture() err=%d\n",
+                          rc);
+                goto EXIT;
+            }
+
+            // Restart streaming after capture is done
+            rc = mm_camera_lib_start_stream(lib_handle);
+            if (rc != MM_CAMERA_OK) {
+                LOGE("mm_camera_lib_start_stream() err=%d\n",
+                          rc);
+                goto EXIT;
+            }
+        }
+    }
+
+EXIT:
+
+    return rc;
+}
+
+int mm_app_start_regression_test(int run_tc)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_app_t my_cam_app;
+
+    LOGD("\nCamera Test Application\n");
+    memset(&my_cam_app, 0, sizeof(mm_camera_app_t));
+
+    rc = mm_app_load_hal(&my_cam_app);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("mm_app_load_hal failed !!");
+        return rc;
+    }
+
+    if(run_tc) {
+        rc = mm_app_unit_test_entry(&my_cam_app);
+        return rc;
+    }
+#if 0
+    if(run_dual_tc) {
+        printf("\tRunning Dual camera test engine only\n");
+        rc = mm_app_dual_test_entry(&my_cam_app);
+        printf("\t Dual camera engine. EXIT(%d)!!!\n", rc);
+        exit(rc);
+    }
+#endif
+    return rc;
+}
+
+int32_t mm_camera_load_tuninglibrary(mm_camera_tuning_lib_params_t *tuning_param)
+{
+  void *(*tuning_open_lib)(void) = NULL;
+
+  LOGD("E");
+  tuning_param->lib_handle = dlopen("libmmcamera_tuning.so", RTLD_NOW);
+  if (!tuning_param->lib_handle) {
+    LOGE("Failed opening libmmcamera_tuning.so\n");
+    return -EINVAL;
+  }
+
+  *(void **)&tuning_open_lib  = dlsym(tuning_param->lib_handle,
+    "open_tuning_lib");
+  if (!tuning_open_lib) {
+    LOGE("Failed symbol libmmcamera_tuning.so\n");
+    return -EINVAL;
+  }
+
+  if (tuning_param->func_tbl) {
+    LOGE("already loaded tuninglib..");
+    return 0;
+  }
+
+  tuning_param->func_tbl = (mm_camera_tune_func_t *)tuning_open_lib();
+  if (!tuning_param->func_tbl) {
+    LOGE("Failed opening library func table ptr\n");
+    return -EINVAL;
+  }
+
+  LOGD("X");
+  return 0;
+}
+
+int mm_camera_lib_open(mm_camera_lib_handle *handle, int cam_id)
+{
+    int rc = MM_CAMERA_OK;
+
+    if ( NULL == handle ) {
+        LOGE(" Invalid handle");
+        rc = MM_CAMERA_E_INVALID_INPUT;
+        goto EXIT;
+    }
+
+    memset(handle, 0, sizeof(mm_camera_lib_handle));
+    rc = mm_app_load_hal(&handle->app_ctx);
+    if( MM_CAMERA_OK != rc ) {
+        LOGE("mm_app_init err\n");
+        goto EXIT;
+    }
+
+    handle->test_obj.buffer_width = DEFAULT_PREVIEW_WIDTH;
+    handle->test_obj.buffer_height = DEFAULT_PREVIEW_HEIGHT;
+    handle->test_obj.buffer_format = DEFAULT_SNAPSHOT_FORMAT;
+    handle->current_params.stream_width = DEFAULT_SNAPSHOT_WIDTH;
+    handle->current_params.stream_height = DEFAULT_SNAPSHOT_HEIGHT;
+    handle->current_params.af_mode = CAM_FOCUS_MODE_AUTO; // Default to auto focus mode
+    rc = mm_app_open(&handle->app_ctx, (uint8_t)cam_id, &handle->test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("mm_app_open() cam_idx=%d, err=%d\n",
+                    cam_id, rc);
+        goto EXIT;
+    }
+
+    //rc = mm_app_initialize_fb(&handle->test_obj);
+    rc = MM_CAMERA_OK;
+    if (rc != MM_CAMERA_OK) {
+        LOGE("mm_app_initialize_fb() cam_idx=%d, err=%d\n",
+                    cam_id, rc);
+        goto EXIT;
+    }
+
+EXIT:
+
+    return rc;
+}
+
+int mm_camera_lib_start_stream(mm_camera_lib_handle *handle)
+{
+    int rc = MM_CAMERA_OK;
+    cam_capability_t camera_cap;
+
+    if ( NULL == handle ) {
+        LOGE(" Invalid handle");
+        rc = MM_CAMERA_E_INVALID_INPUT;
+        goto EXIT;
+    }
+
+    if ( handle->test_obj.zsl_enabled ) {
+        rc = mm_app_start_preview_zsl(&handle->test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_start_preview_zsl() err=%d\n",
+                        rc);
+            goto EXIT;
+        }
+    } else {
+        handle->test_obj.enable_reproc = ENABLE_REPROCESSING;
+        rc = mm_app_start_preview(&handle->test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_start_preview() err=%d\n",
+                        rc);
+            goto EXIT;
+        }
+    }
+
+    // Configure focus mode after stream starts
+    rc = mm_camera_lib_get_caps(handle, &camera_cap);
+    if ( MM_CAMERA_OK != rc ) {
+      LOGE("mm_camera_lib_get_caps() err=%d\n",  rc);
+      return -1;
+    }
+    if (camera_cap.supported_focus_modes_cnt == 1 &&
+      camera_cap.supported_focus_modes[0] == CAM_FOCUS_MODE_FIXED) {
+      LOGD("focus not supported");
+      handle->test_obj.focus_supported = 0;
+      handle->current_params.af_mode = CAM_FOCUS_MODE_FIXED;
+    } else {
+      handle->test_obj.focus_supported = 1;
+    }
+    rc = setFocusMode(&handle->test_obj, handle->current_params.af_mode);
+    if (rc != MM_CAMERA_OK) {
+      LOGE("autofocus error\n");
+      goto EXIT;
+    }
+    handle->stream_running = 1;
+
+EXIT:
+    return rc;
+}
+
+int mm_camera_lib_stop_stream(mm_camera_lib_handle *handle)
+{
+    int rc = MM_CAMERA_OK;
+
+    if ( NULL == handle ) {
+        LOGE(" Invalid handle");
+        rc = MM_CAMERA_E_INVALID_INPUT;
+        goto EXIT;
+    }
+
+    if ( handle->test_obj.zsl_enabled ) {
+        rc = mm_app_stop_preview_zsl(&handle->test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_stop_preview_zsl() err=%d\n",
+                        rc);
+            goto EXIT;
+        }
+    } else {
+        rc = mm_app_stop_preview(&handle->test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_stop_preview() err=%d\n",
+                        rc);
+            goto EXIT;
+        }
+    }
+
+    handle->stream_running = 0;
+
+EXIT:
+    return rc;
+}
+
+int mm_camera_lib_get_caps(mm_camera_lib_handle *handle,
+                           cam_capability_t *caps)
+{
+    int rc = MM_CAMERA_OK;
+
+    if ( NULL == handle ) {
+        LOGE(" Invalid handle");
+        rc = MM_CAMERA_E_INVALID_INPUT;
+        goto EXIT;
+    }
+
+    if ( NULL == caps ) {
+        LOGE(" Invalid capabilities structure");
+        rc = MM_CAMERA_E_INVALID_INPUT;
+        goto EXIT;
+    }
+
+    *caps = *( (cam_capability_t *) handle->test_obj.cap_buf.mem_info.data );
+
+EXIT:
+
+    return rc;
+}
+
+
+int mm_camera_lib_send_command(mm_camera_lib_handle *handle,
+                               mm_camera_lib_commands cmd,
+                               void *in_data,
+                               __unused void *out_data)
+{
+    uint32_t width, height;
+    int rc = MM_CAMERA_OK;
+    cam_capability_t *camera_cap = NULL;
+    mm_camera_lib_snapshot_params *dim = NULL;
+
+    if ( NULL == handle ) {
+        LOGE(" Invalid handle");
+        rc = MM_CAMERA_E_INVALID_INPUT;
+        goto EXIT;
+    }
+
+    camera_cap = (cam_capability_t *) handle->test_obj.cap_buf.mem_info.data;
+
+    switch(cmd) {
+        case MM_CAMERA_LIB_FPS_RANGE:
+            if ( NULL != in_data ) {
+                cam_fps_range_t range = *(( cam_fps_range_t * )in_data);
+                rc = setFPSRange(&handle->test_obj, range);
+                if (rc != MM_CAMERA_OK) {
+                        LOGE("setFPSRange() err=%d\n",
+                                    rc);
+                        goto EXIT;
+                }
+            }
+            break;
+        case MM_CAMERA_LIB_FLASH:
+            if ( NULL != in_data ) {
+                cam_flash_mode_t flash = *(( int * )in_data);
+                rc = setFlash(&handle->test_obj, flash);
+                if (rc != MM_CAMERA_OK) {
+                        LOGE("setFlash() err=%d\n",
+                                    rc);
+                        goto EXIT;
+                }
+            }
+            break;
+        case MM_CAMERA_LIB_BESTSHOT:
+            if ( NULL != in_data ) {
+                cam_scene_mode_type scene = *(( int * )in_data);
+                rc = setScene(&handle->test_obj, scene);
+                if (rc != MM_CAMERA_OK) {
+                        LOGE("setScene() err=%d\n",
+                                    rc);
+                        goto EXIT;
+                }
+            }
+            break;
+        case MM_CAMERA_LIB_ZOOM:
+            if ( NULL != in_data ) {
+                int zoom = *(( int * )in_data);
+                rc = setZoom(&handle->test_obj, zoom);
+                if (rc != MM_CAMERA_OK) {
+                        LOGE("setZoom() err=%d\n",
+                                    rc);
+                        goto EXIT;
+                }
+            }
+            break;
+        case MM_CAMERA_LIB_ISO:
+            if ( NULL != in_data ) {
+                cam_iso_mode_type iso = *(( int * )in_data);
+                rc = setISO(&handle->test_obj, iso);
+                if (rc != MM_CAMERA_OK) {
+                        LOGE("setISO() err=%d\n",
+                                    rc);
+                        goto EXIT;
+                }
+            }
+            break;
+        case MM_CAMERA_LIB_SHARPNESS:
+            if ( NULL != in_data ) {
+                int sharpness = *(( int * )in_data);
+                rc = setSharpness(&handle->test_obj, sharpness);
+                if (rc != MM_CAMERA_OK) {
+                        LOGE("setSharpness() err=%d\n",
+                                    rc);
+                        goto EXIT;
+                }
+            }
+            break;
+        case MM_CAMERA_LIB_SATURATION:
+            if ( NULL != in_data ) {
+                int saturation = *(( int * )in_data);
+                rc = setSaturation(&handle->test_obj, saturation);
+                if (rc != MM_CAMERA_OK) {
+                        LOGE("setSaturation() err=%d\n",
+                                    rc);
+                        goto EXIT;
+                }
+            }
+            break;
+        case MM_CAMERA_LIB_CONTRAST:
+            if ( NULL != in_data ) {
+                int contrast = *(( int * )in_data);
+                rc = setContrast(&handle->test_obj, contrast);
+                if (rc != MM_CAMERA_OK) {
+                        LOGE("setContrast() err=%d\n",
+                                    rc);
+                        goto EXIT;
+                }
+            }
+            break;
+        case MM_CAMERA_LIB_SET_TINTLESS:
+            if ( NULL != in_data ) {
+                int tintless = *(( int * )in_data);
+                rc = setTintless(&handle->test_obj, tintless);
+                if (rc != MM_CAMERA_OK) {
+                        LOGE("enlabe/disable:%d tintless() err=%d\n",
+                                    tintless, rc);
+                        goto EXIT;
+                }
+            }
+            break;
+        case MM_CAMERA_LIB_BRIGHTNESS:
+            if ( NULL != in_data ) {
+                int brightness = *(( int * )in_data);
+                rc = setBrightness(&handle->test_obj, brightness);
+                if (rc != MM_CAMERA_OK) {
+                        LOGE("setBrightness() err=%d\n",
+                                    rc);
+                        goto EXIT;
+                }
+            }
+            break;
+        case MM_CAMERA_LIB_EXPOSURE_METERING:
+            if ( NULL != in_data ) {
+                cam_auto_exposure_mode_type exp = *(( int * )in_data);
+                rc = setExposureMetering(&handle->test_obj, exp);
+                if (rc != MM_CAMERA_OK) {
+                        LOGE("setExposureMetering() err=%d\n",
+                                    rc);
+                        goto EXIT;
+                }
+            }
+            break;
+        case MM_CAMERA_LIB_WB:
+            if ( NULL != in_data ) {
+                cam_wb_mode_type wb = *(( int * )in_data);
+                rc = setWhiteBalance(&handle->test_obj, wb);
+                if (rc != MM_CAMERA_OK) {
+                        LOGE("setWhiteBalance() err=%d\n",
+                                    rc);
+                        goto EXIT;
+                }
+            }
+            break;
+        case MM_CAMERA_LIB_ANTIBANDING:
+            if ( NULL != in_data ) {
+                int antibanding = *(( int * )in_data);
+                rc = setAntibanding(&handle->test_obj, antibanding);
+                if (rc != MM_CAMERA_OK) {
+                        LOGE("setAntibanding() err=%d\n",
+                                    rc);
+                        goto EXIT;
+                }
+            }
+            break;
+        case MM_CAMERA_LIB_EV:
+            if ( NULL != in_data ) {
+                int ev = *(( int * )in_data);
+                rc = setEVCompensation(&handle->test_obj, ev);
+                if (rc != MM_CAMERA_OK) {
+                        LOGE("setEVCompensation() err=%d\n",
+                                    rc);
+                        goto EXIT;
+                }
+            }
+            break;
+        case MM_CAMERA_LIB_ZSL_ENABLE:
+            if ( NULL != in_data) {
+                int enable_zsl = *(( int * )in_data);
+                if ( ( enable_zsl != handle->test_obj.zsl_enabled ) &&
+                        handle->stream_running ) {
+                    rc = mm_camera_lib_stop_stream(handle);
+                    if (rc != MM_CAMERA_OK) {
+                        LOGE("mm_camera_lib_stop_stream() err=%d\n",
+                                    rc);
+                        goto EXIT;
+                    }
+                    handle->test_obj.zsl_enabled = enable_zsl;
+                    rc = mm_camera_lib_start_stream(handle);
+                    if (rc != MM_CAMERA_OK) {
+                        LOGE("mm_camera_lib_start_stream() err=%d\n",
+                                    rc);
+                        goto EXIT;
+                    }
+                } else {
+                    handle->test_obj.zsl_enabled = enable_zsl;
+                }
+            }
+            break;
+        case MM_CAMERA_LIB_RAW_CAPTURE:
+
+            if ( 0 == handle->stream_running ) {
+                LOGE(" Streaming is not enabled!");
+                rc = MM_CAMERA_E_INVALID_OPERATION;
+                goto EXIT;
+            }
+
+            rc = mm_camera_lib_stop_stream(handle);
+            if (rc != MM_CAMERA_OK) {
+                LOGE("mm_camera_lib_stop_stream() err=%d\n",
+                            rc);
+                goto EXIT;
+            }
+
+            width = handle->test_obj.buffer_width;
+            height = handle->test_obj.buffer_height;
+            handle->test_obj.buffer_width =
+                    (uint32_t)camera_cap->raw_dim[0].width;
+            handle->test_obj.buffer_height =
+                    (uint32_t)camera_cap->raw_dim[0].height;
+            handle->test_obj.buffer_format = DEFAULT_RAW_FORMAT;
+            LOGE("MM_CAMERA_LIB_RAW_CAPTURE %dx%d\n",
+                    camera_cap->raw_dim[0].width,
+                    camera_cap->raw_dim[0].height);
+            rc = mm_app_start_capture_raw(&handle->test_obj, 1);
+            if (rc != MM_CAMERA_OK) {
+                LOGE("mm_app_start_capture() err=%d\n",
+                            rc);
+                goto EXIT;
+            }
+
+            mm_camera_app_wait();
+
+            rc = mm_app_stop_capture_raw(&handle->test_obj);
+            if (rc != MM_CAMERA_OK) {
+                LOGE("mm_app_stop_capture() err=%d\n",
+                            rc);
+                goto EXIT;
+            }
+
+            handle->test_obj.buffer_width = width;
+            handle->test_obj.buffer_height = height;
+            handle->test_obj.buffer_format = DEFAULT_SNAPSHOT_FORMAT;
+            rc = mm_camera_lib_start_stream(handle);
+            if (rc != MM_CAMERA_OK) {
+                LOGE("mm_camera_lib_start_stream() err=%d\n",
+                            rc);
+                goto EXIT;
+            }
+
+            break;
+
+        case MM_CAMERA_LIB_JPEG_CAPTURE:
+            if ( 0 == handle->stream_running ) {
+                LOGE(" Streaming is not enabled!");
+                rc = MM_CAMERA_E_INVALID_OPERATION;
+                goto EXIT;
+            }
+
+            if ( NULL != in_data ) {
+                dim = ( mm_camera_lib_snapshot_params * ) in_data;
+            }
+
+            rc = tuneserver_capture(handle, dim);
+            if (rc != MM_CAMERA_OK) {
+                LOGE("capture error %d\n",  rc);
+                goto EXIT;
+            }
+            break;
+
+        case MM_CAMERA_LIB_SET_FOCUS_MODE: {
+            cam_focus_mode_type mode = *((cam_focus_mode_type *)in_data);
+            handle->current_params.af_mode = mode;
+            rc = setFocusMode(&handle->test_obj, mode);
+            if (rc != MM_CAMERA_OK) {
+              LOGE("autofocus error\n");
+              goto EXIT;
+            }
+            break;
+        }
+
+        case MM_CAMERA_LIB_DO_AF:
+            if (handle->test_obj.focus_supported) {
+              rc = handle->test_obj.cam->ops->do_auto_focus(handle->test_obj.cam->camera_handle);
+              if (rc != MM_CAMERA_OK) {
+                LOGE("autofocus error\n");
+                goto EXIT;
+              }
+              /*Waiting for Auto Focus Done Call Back*/
+              mm_camera_app_wait();
+            }
+            break;
+
+        case MM_CAMERA_LIB_CANCEL_AF:
+            rc = handle->test_obj.cam->ops->cancel_auto_focus(handle->test_obj.cam->camera_handle);
+            if (rc != MM_CAMERA_OK) {
+                LOGE("autofocus error\n");
+                goto EXIT;
+            }
+
+            break;
+
+        case MM_CAMERA_LIB_LOCK_AWB:
+            rc = setAwbLock(&handle->test_obj, 1);
+            if (rc != MM_CAMERA_OK) {
+                LOGE("AWB locking failed\n");
+                goto EXIT;
+            }
+            break;
+
+        case MM_CAMERA_LIB_UNLOCK_AWB:
+            rc = setAwbLock(&handle->test_obj, 0);
+            if (rc != MM_CAMERA_OK) {
+                LOGE("AE unlocking failed\n");
+                goto EXIT;
+            }
+            break;
+
+        case MM_CAMERA_LIB_LOCK_AE:
+            rc = setAecLock(&handle->test_obj, 1);
+            if (rc != MM_CAMERA_OK) {
+                LOGE("AE locking failed\n");
+                goto EXIT;
+            }
+            break;
+
+        case MM_CAMERA_LIB_UNLOCK_AE:
+            rc = setAecLock(&handle->test_obj, 0);
+            if (rc != MM_CAMERA_OK) {
+                LOGE("AE unlocking failed\n");
+                goto EXIT;
+            }
+            break;
+
+       case MM_CAMERA_LIB_SET_3A_COMMAND: {
+          rc = set3Acommand(&handle->test_obj, (cam_eztune_cmd_data_t *)in_data);
+          if (rc != MM_CAMERA_OK) {
+            LOGE("3A set command error\n");
+            goto EXIT;
+          }
+          break;
+        }
+
+       case MM_CAMERA_LIB_SET_AUTOFOCUS_TUNING: {
+           rc = setAutoFocusTuning(&handle->test_obj, in_data);
+           if (rc != MM_CAMERA_OK) {
+             LOGE("Set AF tuning failed\n");
+             goto EXIT;
+           }
+           break;
+       }
+
+       case MM_CAMERA_LIB_SET_VFE_COMMAND: {
+           rc = setVfeCommand(&handle->test_obj, in_data);
+           if (rc != MM_CAMERA_OK) {
+             LOGE("Set vfe command failed\n");
+             goto EXIT;
+           }
+           break;
+       }
+
+       case MM_CAMERA_LIB_SET_POSTPROC_COMMAND: {
+           rc = setPPCommand(&handle->test_obj, in_data);
+           if (rc != MM_CAMERA_OK) {
+             LOGE("Set pp command failed\n");
+             goto EXIT;
+           }
+           break;
+       }
+
+        case MM_CAMERA_LIB_WNR_ENABLE: {
+            rc = setWNR(&handle->test_obj, *((uint8_t *)in_data));
+            if ( rc != MM_CAMERA_OK) {
+                LOGE("Set wnr enable failed\n");
+                goto EXIT;
+            }
+        }
+
+      case MM_CAMERA_LIB_NO_ACTION:
+        default:
+            break;
+    };
+
+EXIT:
+
+    return rc;
+}
+int mm_camera_lib_number_of_cameras(mm_camera_lib_handle *handle)
+{
+    int rc = 0;
+
+    if ( NULL == handle ) {
+        LOGE(" Invalid handle");
+        goto EXIT;
+    }
+
+    rc = handle->app_ctx.num_cameras;
+
+EXIT:
+
+    return rc;
+}
+
+int mm_camera_lib_close(mm_camera_lib_handle *handle)
+{
+    int rc = MM_CAMERA_OK;
+
+    if ( NULL == handle ) {
+        LOGE(" Invalid handle");
+        rc = MM_CAMERA_E_INVALID_INPUT;
+        goto EXIT;
+    }
+
+    //rc = mm_app_close_fb(&handle->test_obj);
+    rc = MM_CAMERA_OK;
+    if (rc != MM_CAMERA_OK) {
+        LOGE("mm_app_close_fb() err=%d\n",
+                    rc);
+        goto EXIT;
+    }
+
+    rc = mm_app_close(&handle->test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("mm_app_close() err=%d\n",
+                    rc);
+        goto EXIT;
+    }
+
+EXIT:
+    return rc;
+}
+
+int mm_camera_lib_set_preview_usercb(
+   mm_camera_lib_handle *handle, cam_stream_user_cb cb)
+{
+    if (handle->test_obj.user_preview_cb != NULL) {
+        LOGE(" already set preview callbacks\n");
+        return -1;
+    }
+    handle->test_obj.user_preview_cb = *cb;
+    return 0;
+}
+
+int mm_app_set_preview_fps_range(mm_camera_test_obj_t *test_obj,
+                        cam_fps_range_t *fpsRange)
+{
+    int rc = MM_CAMERA_OK;
+    LOGH("preview fps range: min=%f, max=%f.",
+            fpsRange->min_fps, fpsRange->max_fps);
+    rc = setFPSRange(test_obj, *fpsRange);
+
+    if (rc != MM_CAMERA_OK) {
+        LOGE("add_parm_entry_tobatch failed !!");
+        return rc;
+    }
+
+    return rc;
+}
+
+int mm_app_set_face_detection(mm_camera_test_obj_t *test_obj,
+        cam_fd_set_parm_t *fd_set_parm)
+{
+    int rc = MM_CAMERA_OK;
+
+    if (test_obj == NULL || fd_set_parm == NULL) {
+        LOGE(" invalid params!");
+        return MM_CAMERA_E_INVALID_INPUT;
+    }
+
+    LOGH("mode = %d, num_fd = %d",
+          fd_set_parm->fd_mode, fd_set_parm->num_fd);
+
+    rc = initBatchUpdate(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch camera parameter update failed\n");
+        goto ERROR;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+        CAM_INTF_PARM_FD, *fd_set_parm)) {
+        LOGE("FD parameter not added to batch\n");
+        rc = -1;
+        goto ERROR;
+    }
+
+    rc = commitSetBatch(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch parameters commit failed\n");
+        goto ERROR;
+    }
+
+ERROR:
+    return rc;
+}
+
+int mm_app_set_flash_mode(mm_camera_test_obj_t *test_obj,
+        cam_flash_mode_t flashMode)
+{
+    int rc = MM_CAMERA_OK;
+
+    if (test_obj == NULL) {
+        LOGE(" invalid params!");
+        return MM_CAMERA_E_INVALID_INPUT;
+    }
+
+    LOGH("mode = %d",  (int)flashMode);
+
+    rc = initBatchUpdate(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch camera parameter update failed\n");
+        goto ERROR;
+    }
+
+    if (ADD_SET_PARAM_ENTRY_TO_BATCH(test_obj->parm_buf.mem_info.data,
+        CAM_INTF_PARM_LED_MODE, flashMode)) {
+        LOGE("Flash mode parameter not added to batch\n");
+        rc = -1;
+        goto ERROR;
+    }
+
+    rc = commitSetBatch(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("Batch parameters commit failed\n");
+        goto ERROR;
+    }
+
+ERROR:
+    return rc;
+}
+
+int mm_app_set_metadata_usercb(mm_camera_test_obj_t *test_obj,
+                        cam_stream_user_cb usercb)
+{
+    if (test_obj == NULL || usercb == NULL) {
+        LOGE(" invalid params!");
+        return MM_CAMERA_E_INVALID_INPUT;
+    }
+
+    LOGH("%s, set user metadata callback, addr: %p\n",  usercb);
+
+    if (test_obj->user_metadata_cb != NULL) {
+        LOGH("%s, already set user metadata callback");
+    }
+    test_obj->user_metadata_cb = usercb;
+
+    return 0;
+}
+
+
diff --git a/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_commands.c b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_commands.c
new file mode 100644
index 0000000..45fb7a8
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_commands.c
@@ -0,0 +1,291 @@
+/* Copyright (c) 2012-2013, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// To remove
+#include <cutils/properties.h>
+
+// Camera dependencies
+#include "mm_qcamera_commands.h"
+#include "mm_qcamera_dbg.h"
+
+int tuneserver_initialize_prevtuningp(void * ctrl,
+  int pr_client_socket_id, cam_dimension_t dimension,
+  char **send_buf, uint32_t *send_len)
+{
+  int result = 0;
+  mm_camera_lib_handle *lib_handle = (mm_camera_lib_handle *) ctrl;
+  tuningserver_t *tctrl = &lib_handle->tsctrl;
+
+  LOGD("E");
+  if (tctrl->tuning_params.func_tbl->prevcommand_process == NULL) {
+      LOGE("prevcommand_process is NULL");
+      return -1;
+  }
+
+  result = tctrl->tuning_params.func_tbl->prevcommand_process(
+      NULL, TUNE_PREVCMD_INIT, (void *)&pr_client_socket_id,
+      send_buf, send_len);
+  result = tctrl->tuning_params.func_tbl->prevcommand_process(
+      NULL, TUNE_PREVCMD_SETDIM, (void *)&dimension,
+      send_buf, send_len);
+
+  mm_camera_lib_set_preview_usercb(lib_handle,
+      (tctrl->tuning_params.func_tbl->prevframe_callback));
+
+  return result;
+}
+
+int tuneserver_deinitialize_prevtuningp(void * ctrl,
+    char **send_buf, uint32_t *send_len)
+{
+  int result = 0;
+  tuningserver_t *tctrl = (tuningserver_t *) ctrl;
+
+  LOGD("E");
+
+  result = tctrl->tuning_params.func_tbl->prevcommand_process(
+    &tctrl->pr_proto, TUNE_PREVCMD_DEINIT, NULL, send_buf, send_len);
+
+  return result;
+}
+
+int tuneserver_preview_getinfo(void * ctrl, char **send_buf, uint32_t *send_len)
+{
+  int result = 0;
+  tuningserver_t *tctrl = (tuningserver_t *) ctrl;
+
+  LOGD("E");
+  result = tctrl->tuning_params.func_tbl->prevcommand_process(
+    &tctrl->pr_proto, TUNE_PREVCMD_GETINFO, NULL, send_buf, send_len);
+
+  return result;
+}
+
+int tuneserver_preview_getchunksize(void * ctrl,
+  char **send_buf, uint32_t *send_len)
+{
+  int result = 0;
+  tuningserver_t *tctrl = (tuningserver_t *) ctrl;
+
+  LOGD("E");
+  result = tctrl->tuning_params.func_tbl->prevcommand_process(
+    &tctrl->pr_proto, TUNE_PREVCMD_GETCHUNKSIZE,
+    (void *)&tctrl->pr_proto->new_cnk_size, send_buf, send_len);
+
+  return result;
+}
+
+int tuneserver_preview_getframe(void * ctrl,
+  char **send_buf, uint32_t *send_len)
+{
+  int result = 0;
+  tuningserver_t *tctrl = (tuningserver_t *) ctrl;
+
+  LOGD("E");
+  result = tctrl->tuning_params.func_tbl->prevcommand_process(
+    &tctrl->pr_proto, TUNE_PREVCMD_GETFRAME, NULL, send_buf, send_len);
+
+  return result;
+}
+
+int tuneserver_preview_unsupported(void * ctrl,
+  char **send_buf, uint32_t *send_len)
+{
+  int result = 0;
+  tuningserver_t *tctrl = (tuningserver_t *) ctrl;
+
+  LOGD("E");
+  result = tctrl->tuning_params.func_tbl->prevcommand_process(
+    &tctrl->pr_proto, TUNE_PREVCMD_UNSUPPORTED, NULL, send_buf, send_len);
+
+  return result;
+}
+
+int tuneserver_initialize_tuningp(void * ctrl, int client_socket_id,
+  char *send_buf, uint32_t send_len)
+{
+  int result = 0;
+  mm_camera_lib_handle *lib_handle = (mm_camera_lib_handle *) ctrl;
+  tuningserver_t *tctrl = &lib_handle->tsctrl;
+
+  LOGD("E");
+  result = tctrl->tuning_params.func_tbl->command_process(
+    lib_handle, TUNE_CMD_INIT, &client_socket_id, send_buf, send_len);
+
+  return result;
+}
+
+int tuneserver_deinitialize_tuningp(void * ctrl, int client_socket_id,
+  char *send_buf, uint32_t send_len)
+{
+  int result = 0;
+  tuningserver_t *tctrl = (tuningserver_t *) ctrl;
+
+  LOGD("E");
+
+  result = tctrl->tuning_params.func_tbl->command_process(
+    NULL, TUNE_CMD_DEINIT, &client_socket_id, send_buf, send_len);
+
+  return result;
+}
+
+int tuneserver_process_get_list_cmd(void * ctrl, void *recv_cmd,
+  char *send_buf, uint32_t send_len)
+{
+  int result = 0;
+  tuningserver_t *tctrl = (tuningserver_t *) ctrl;
+
+  LOGD("E");
+  result = tctrl->tuning_params.func_tbl->command_process(
+     recv_cmd, TUNE_CMD_GET_LIST, NULL, send_buf, send_len);
+
+  return result;
+}
+
+int tuneserver_process_get_params_cmd(void * ctrl, void *recv_cmd,
+  char *send_buf, uint32_t send_len)
+{
+  int result = 0;
+  tuningserver_t *tctrl = (tuningserver_t *) ctrl;
+
+  LOGD("E");
+  result = tctrl->tuning_params.func_tbl->command_process
+    (recv_cmd, TUNE_CMD_GET_PARAMS, NULL, send_buf, send_len);
+
+  return result;
+}
+
+int tuneserver_process_set_params_cmd(void * ctrl, void *recv_cmd,
+  char *send_buf, uint32_t send_len)
+{
+  int result = 0;
+  tuningserver_t *tctrl = (tuningserver_t *) ctrl;
+
+  LOGD("E");
+  result = tctrl->tuning_params.func_tbl->command_process(
+     recv_cmd, TUNE_CMD_SET_PARAMS, NULL, send_buf, send_len);
+
+  return result;
+}
+
+int tuneserver_process_misc_cmd(void * ctrl, void *recv_cmd,
+  char *send_buf, uint32_t send_len)
+{
+  int result = 0;
+  tuningserver_t *tctrl = (tuningserver_t *) ctrl;
+
+  LOGD("E");
+  result = tctrl->tuning_params.func_tbl->command_process(
+     recv_cmd, TUNE_CMD_MISC, NULL, send_buf, send_len);
+
+  return result;
+}
+
+/** tuneserver_close_cam
+ *    @lib_handle: the camera handle object
+ *
+ *  closes the camera
+ *
+ *  Return: >=0 on success, -1 on failure.
+ **/
+int tuneserver_close_cam(mm_camera_lib_handle *lib_handle)
+{
+  int result = 0;
+
+  result = mm_camera_lib_close(lib_handle);
+  if (result < 0) {
+    printf(" Camera close failed\n");
+  } else {
+    printf("Camera is closed \n");
+  }
+  return result;
+}
+#if 0
+/** tuneserver_start_cam
+ *    @lib_handle: the camera handle object
+ *
+ *  starts the camera
+ *
+ *  Return: >=0 on success, -1 on failure.
+ **/
+static int tuneserver_start_cam(mm_camera_lib_handle *lib_handle)
+{
+  int result = 0;
+
+  result = mm_camera_lib_start_stream(lib_handle);
+  if (result < 0) {
+    printf(" Camera start failed\n");
+    goto error1;
+  }
+  return result;
+error1:
+  mm_camera_lib_close(lib_handle);
+  return result;
+}
+#endif
+
+/** tuneserver_stop_cam
+ *    @lib_handle: the camera handle object
+ *
+ *  stops the camera
+ *
+ *  Return: >=0 on success, -1 on failure.
+ **/
+int tuneserver_stop_cam(mm_camera_lib_handle *lib_handle)
+{
+  int result = 0;
+
+  result = mm_camera_lib_stop_stream(lib_handle);
+  if (result < 0) {
+    printf(" Camera stop failed\n");
+  }
+//  result = mm_camera_lib_close(lib_handle);
+  return result;
+}
+
+/** tuneserver_open_cam
+ *    @lib_handle: the camera handle object
+ *
+ *  opens the camera
+ *
+ *  Return: >=0 on success, -1 on failure.
+ **/
+#if 1
+int tuneserver_open_cam(mm_camera_lib_handle *lib_handle)
+{
+  int result = 0;
+
+  LOGD("E");
+  result = mm_camera_load_tuninglibrary(&lib_handle->tsctrl.tuning_params);
+  if (result < 0) {
+    LOGE(" tuning library open failed\n");
+  }
+  return result;
+}
+#endif
diff --git a/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_dual_test.c b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_dual_test.c
new file mode 100644
index 0000000..564c474
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_dual_test.c
@@ -0,0 +1,1933 @@
+/*
+Copyright (c) 2012, 2016, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of The Linux Foundation nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+// System dependencies
+#include <pthread.h>
+
+// Camera dependencies
+#include "mm_qcamera_unit_test.h"
+#include "mm_camera_dbg.h"
+
+#define MM_QCAMERA_APP_UTEST_MAX_MAIN_LOOP 4
+#define MM_QCAM_APP_TEST_NUM 128
+
+#define MM_QCAMERA_APP_WAIT_TIME 1000000000
+
+extern int system_dimension_set(int cam_id);
+extern int stopPreview(int cam_id);
+extern int takePicture_yuv(int cam_id);
+extern int takePicture_rdi(int cam_id);
+extern int startRdi(int cam_id);
+extern int stopRdi(int cam_id);
+extern int startStats(int cam_id);
+extern int stopStats(int cam_id);
+
+
+/*
+* 1. open back
+* 2. open front
+* 3. start back
+* 4. start front
+* 5. stop back
+* 6. stop front
+* 7. close back
+* 8. close front
+* 9. take picture
+* a. start recording
+* b. stop recording
+* c. take picture rdi
+*/
+static mm_app_tc_t mm_app_tc[MM_QCAM_APP_TEST_NUM];
+static int num_test_cases = 0;
+struct test_case_params {
+  uint16_t launch;
+  uint16_t preview;
+  uint16_t recording;
+  uint16_t snapshot;
+};
+
+/*  Test case 12436857 :*/
+
+int mm_app_dtc_0(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview on back Camera and RDI on Front camera 0...\n");
+        LOGE("DUAL open back camera \n");
+        if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_open() back camera err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                LOGE("system_dimension_set() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        LOGE("DUAL open front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_open() front camera err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                LOGE("system_dimension_set() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        LOGE("DUAL start camera Rdi for front \n");
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+               LOGE(" back camera startPreview() err=%d\n",  rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+
+        LOGE("DUAL start camera Preview for back \n");
+        if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+                LOGE(" startPreview() backcamera err=%d\n",  rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+
+        LOGE("DUAL stop camera Rdi for front \n");
+        if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+                LOGE(" startPreview() backcamera err=%d\n",  rc);
+                goto end;
+        }
+        usleep(10*1000);
+
+        LOGE("DUAL close front camera\n");
+        if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_close() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        sleep(1);
+        LOGE("DUAL stop camera Preview for back \n");
+        if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+                LOGD(" startPreview() err=%d\n",  rc);
+                goto end;
+        }
+        usleep(10*1000);
+        LOGE("DUAL close back camera \n");
+        if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_close() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        LOGE("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        LOGD("END, rc = %d\n",  rc);
+        return rc;
+}
+
+/*  Test case 12436587 :*/
+
+int mm_app_dtc_1(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview on back Camera and RDI on Front camera 1...\n");
+        LOGE("DUAL open back camera \n");
+        if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_open() back camera err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                LOGE("system_dimension_set() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        LOGE("DUAL open front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_open() front camera err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                LOGE("system_dimension_set() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        LOGE("DUAL start camera Rdi for front \n");
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+               LOGE(" back camera startPreview() err=%d\n",  rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+
+        LOGE("DUAL start camera Preview for back \n");
+
+        if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+                LOGE(" startPreview() backcamera err=%d\n",  rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+        LOGE("DUAL end \n");
+
+        LOGE("DUAL stop camera Preview for front \n");
+        if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+                LOGE(" startPreview() backcamera err=%d\n",  rc);
+                goto end;
+        }
+        usleep(10*1000);
+        LOGE("DUAL stop camera Preview for back \n");
+        if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+                LOGD(" startPreview() err=%d\n",  rc);
+                goto end;
+        }
+        usleep(10*1000);
+        LOGE("DUAL close front camera\n");
+        if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_close() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        LOGE("DUAL close back camera \n");
+        if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_close() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        LOGD("END, rc = %d\n",  rc);
+        return rc;
+}
+
+/*  Test case 12436578 :*/
+
+int mm_app_dtc_2(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview on back Camera and RDI on Front camera 2...\n");
+        LOGE("DUAL open back camera \n");
+        if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_open() back camera err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                LOGE("system_dimension_set() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        LOGE("DUAL open front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_open() front camera err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                LOGE("system_dimension_set() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        LOGE("DUAL start camera Rdi for front \n");
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+               LOGE(" back camera startPreview() err=%d\n",  rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+
+        LOGE("DUAL start camera Preview for back \n");
+
+        if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+                LOGE(" startPreview() backcamera err=%d\n",  rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+        LOGE("DUAL end \n");
+
+        LOGE("DUAL stop camera Preview for front \n");
+        if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+                LOGE(" startPreview() backcamera err=%d\n",  rc);
+                goto end;
+        }
+        usleep(10*1000);
+        LOGE("DUAL stop camera Preview for back \n");
+        if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+                LOGD(" startPreview() err=%d\n",  rc);
+                goto end;
+        }
+        usleep(10*1000);
+        LOGE("DUAL close back camera \n");
+        if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_close() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        LOGE("DUAL close front camera\n");
+        if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_close() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        LOGD("END, rc = %d\n",  rc);
+        return rc;
+}
+
+/*  Test case 241395768 : 1357 * 3, This is performed three times
+* And for each iteration 9 is performed thrice */
+
+int mm_app_dtc_3(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j,k;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview and snapshot on back Camera and RDI on Front camera 3...\n");
+        LOGE("DUAL open front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_open() front camera err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                LOGE("system_dimension_set() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        LOGE("DUAL start camera Preview for front \n");
+
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+                LOGE(" startPreview() frontcamera err=%d\n",  rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        usleep(10*1000);
+
+        for (k = 0; k < MM_QCAMERA_APP_INTERATION ; k++) {
+          LOGE("DUAL open back camera %d \n",k);
+          if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                  LOGE("mm_app_open() back camera err=%d\n", rc);
+                  rc = -1;
+                  goto end;
+          }
+
+          if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                  LOGE("system_dimension_set() err=%d\n", rc);
+                  rc = -1;
+                  goto end;
+          }
+
+          LOGE("DUAL start camera Preview for back \n");
+          if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+                 LOGE(" back camera startPreview() err=%d\n",  rc);
+                  goto end;
+          }
+
+          for (j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+              LOGE("DUAL take picture for back \n");
+              if ( MM_CAMERA_OK != (rc = takePicture_yuv(back_camera))) {
+                  LOGE(" TakePicture() err=%d\n",  rc);
+                  break;
+              }
+              mm_camera_app_wait();
+
+          }
+          usleep(10*1000);
+          LOGE("DUAL stop camera Preview for back \n");
+          if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+                  LOGE(" stopPreview() backcamera err=%d\n",  rc);
+                  goto end;
+          }
+          usleep(10*1000);
+
+          LOGE("DUAL close back camera\n");
+          if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+                  LOGE("mm_app_close() err=%d\n", rc);
+                  rc = -1;
+                  goto end;
+          }
+          usleep(20*1000);
+        }
+        LOGE("DUAL stop camera Preview for Rdi \n");
+        if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+                LOGD(" stopRdi() err=%d\n",  rc);
+                goto end;
+        }
+        usleep(10*1000);
+        LOGE("DUAL close front camera \n");
+        if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_close() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        LOGE("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        LOGD("END, rc = %d\n",  rc);
+        return rc;
+}
+
+/*  Test case 2413ab5768 : 1357 * 3, This is performed three times
+* And for each iteration ab is performed thrice */
+
+int mm_app_dtc_4(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j,k;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview on back Camera and RDI on Front camera 4...\n");
+        LOGE("DUAL open front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_open() front camera err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                LOGE("system_dimension_set() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        LOGE("DUAL start camera Preview for front \n");
+
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+                LOGE(" startPreview() frontcamera err=%d\n",  rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        usleep(20*1000);
+
+        for (k = 0; k < MM_QCAMERA_APP_INTERATION ; k++){
+          LOGE("DUAL open back camera %d \n",k);
+          if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                 LOGE("mm_app_open() back camera err=%d\n", rc);
+                 rc = -1;
+                 goto end;
+          }
+
+          if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                 LOGE("system_dimension_set() err=%d\n", rc);
+                 rc = -1;
+                 goto end;
+          }
+
+          LOGE("DUAL start camera Preview for back \n");
+          if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+                LOGE(" back camera startPreview() err=%d\n",  rc);
+                 goto end;
+          }
+          usleep(30*1000);
+
+          for (j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+             LOGE("DUAL start camera record for back \n");
+             if ( MM_CAMERA_OK != (rc = startRecording(back_camera))) {
+                 LOGE(" StartVideorecording() err=%d\n",  rc);
+                 break;
+             }
+
+             mm_camera_app_wait();
+             usleep(15*1000);
+             LOGE("DUAL stop camera record for back \n");
+             if ( MM_CAMERA_OK != (rc = stopRecording(back_camera))) {
+                 LOGE(" Stopvideorecording() err=%d\n",  rc);
+                 break;
+             }
+          }
+          usleep(10*1000);
+
+          LOGE("DUAL stop camera Preview for back \n");
+          if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+                 LOGE(" stopPreview() backcamera err=%d\n",  rc);
+                 goto end;
+          }
+          usleep(10*1000);
+
+          LOGE("DUAL close back camera\n");
+          if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+                 LOGE("mm_app_close() err=%d\n", rc);
+                 rc = -1;
+                 goto end;
+          }
+          usleep(20*1000);
+        }
+        LOGE("DUAL stop camera Preview for Rdi \n");
+        if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+                LOGD(" stopRdi() err=%d\n",  rc);
+                goto end;
+        }
+        usleep(10*1000);
+        LOGE("DUAL close front camera \n");
+        if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_close() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        LOGE("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        LOGD("END, rc = %d\n",  rc);
+        return rc;
+}
+
+/*  Test case 24135768 : 1357 * 3, This is performed three times*/
+
+int mm_app_dtc_5(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j,k;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview on back Camera and RDI on Front camera 5...\n");
+        LOGE("DUAL open front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_open() front camera err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                LOGE("system_dimension_set() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        LOGE("DUAL start camera Preview for front \n");
+
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+                LOGE(" startPreview() frontcamera err=%d\n",  rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+
+        for (k = 0; k < 4 ; k++) {
+          LOGE("DUAL open back camera %d \n",k);
+          if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                  LOGE("mm_app_open() back camera err=%d\n", rc);
+                  rc = -1;
+                  goto end;
+          }
+
+          if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                  LOGE("system_dimension_set() err=%d\n", rc);
+                  rc = -1;
+                  goto end;
+          }
+
+          LOGE("DUAL start camera Preview for back \n");
+          if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+                 LOGE(" back camera startPreview() err=%d\n",  rc);
+                  goto end;
+          }
+          mm_camera_app_wait();
+          sleep(1);
+
+          LOGE("DUAL stop camera Preview for back \n");
+          if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+                  LOGE(" stopPreview() backcamera err=%d\n",  rc);
+                  goto end;
+          }
+          usleep(10*1000);
+
+          LOGE("DUAL close back camera\n");
+          if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+                  LOGE("mm_app_close() err=%d\n", rc);
+                  rc = -1;
+                  goto end;
+          }
+          sleep(1);
+        }
+        LOGE("DUAL stop camera Preview for Rdi \n");
+        if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+                LOGD(" stopRdi() err=%d\n",  rc);
+                goto end;
+        }
+        usleep(10*1000);
+        LOGE("DUAL close front camera \n");
+        if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_close() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        LOGE("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        LOGD("END, rc = %d\n",  rc);
+        return rc;
+}
+
+/*  Test case 13246857 : 2468 * 3, This is performed three times*/
+
+int mm_app_dtc_6(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j,k;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview on back Camera and RDI on Front camera 6...\n");
+        LOGE("DUAL open back camera \n");
+        if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_open() back camera err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                LOGE("system_dimension_set() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        LOGE("DUAL start camera Preview for back \n");
+
+        if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+                LOGE(" startPreview() backcamera err=%d\n",  rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+
+        for (k = 0; k < 4 ; k++) {
+        LOGE("DUAL open front camera %d \n",k);
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_open() front camera err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                LOGE("system_dimension_set() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        LOGE("DUAL start camera Rdi for front \n");
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+               LOGE(" back camera startPreview() err=%d\n",  rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+
+        LOGE("DUAL stop camera Preview for front \n");
+        if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+                LOGE(" startPreview() backcamera err=%d\n",  rc);
+                goto end;
+        }
+        usleep(10*1000);
+
+        LOGE("DUAL close front camera\n");
+        if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_close() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        sleep(1);
+        }
+        LOGE("DUAL stop camera Preview for back \n");
+        if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+                LOGD(" startPreview() err=%d\n",  rc);
+                goto end;
+        }
+        usleep(10*1000);
+        LOGE("DUAL close back camera \n");
+        if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_close() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        LOGE("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        LOGD("END, rc = %d\n",  rc);
+        return rc;
+}
+
+/*Multi Threaded Test Cases*/
+static void *front_thread(void *data)
+{
+        int front_camera = 1;
+        int rc = MM_CAMERA_OK;
+        int i,j,k,m;
+        struct test_case_params params
+          = *((struct test_case_params *)data);
+        for (i = 0; i < params.launch; i++) {
+          LOGE("DUAL open front camera %d\n",i);
+          if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+            LOGE("mm_app_open() front camera err=%d\n", rc);
+            rc = -1;
+            goto end;
+          }
+
+          if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+            LOGE("system_dimension_set() err=%d\n", rc);
+            rc = -1;
+            goto end;
+          }
+
+          for (j = 0; j < params.preview; j++) {
+            LOGE("DUAL start camera Rdi for front %d ,%d \n",i,j);
+            if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+              LOGE(" back camera startPreview() err=%d\n",  rc);
+              goto end;
+            }
+            mm_camera_app_wait();
+            usleep(20*1000);
+            for (k = 0; k < params.snapshot; k++) {
+              LOGE("DUAL take picture for front %d,%d,%d \n",i,j,k);
+              if ( MM_CAMERA_OK != (rc = takePicture_rdi(front_camera))) {
+                LOGE(" TakePicture() err=%d\n",  rc);
+                goto end;
+              }
+              mm_camera_app_wait();
+              usleep(30*1000);
+            }
+            LOGE("DUAL stop camera Rdi for front %d,%d\n",i,j);
+            if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+              LOGE(" startPreview() backcamera err=%d\n",  rc);
+              goto end;
+            }
+            usleep(10*1000);
+          }
+
+          LOGE("DUAL close front camera %d\n",i);
+          if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+            LOGE("mm_app_close() err=%d\n", rc);
+            rc = -1;
+            goto end;
+          }
+        }
+end:
+        LOGE("DUAL front thread close %d",rc);
+        return NULL;
+}
+
+static void *back_thread(void *data)
+{
+        int rc = MM_CAMERA_OK;
+        int back_camera = 0;
+        int i,j,k,m;
+        struct test_case_params params
+          = *((struct test_case_params *)data);
+        for (i = 0; i < params.launch; i++) {
+          LOGE("DUAL open back camera %d\n",i);
+          if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+            LOGE("mm_app_open() back camera err=%d\n", rc);
+            rc = -1;
+            goto end;
+          }
+          if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+            LOGE("system_dimension_set() err=%d\n", rc);
+            rc = -1;
+            goto end;
+          }
+
+          for (j = 0; j < params.preview; j++) {
+            LOGE("DUAL start camera Preview for back %d, %d\n",i,j);
+            if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+              LOGE(" startPreview() backcamera err=%d\n",  rc);
+              goto end;
+            }
+            mm_camera_app_wait();
+            usleep(20*1000);
+            for (k = 0; k < params.snapshot; k++) {
+              LOGE("DUAL take picture for back %d, %d, %d\n",i,j,k);
+              if ( MM_CAMERA_OK != (rc = takePicture_yuv(back_camera))) {
+                LOGE(" TakePicture() err=%d\n",  rc);
+                goto end;
+              }
+              mm_camera_app_wait();
+              usleep(30*1000);
+            }
+
+            for (m = 0; m < params.recording; m++) {
+              LOGE("DUAL start record for back %d, %d, %d\n",i,j,m);
+              if ( MM_CAMERA_OK != (rc = startRecording(back_camera))) {
+                LOGE(" StartVideorecording() err=%d\n",  rc);
+                break;
+              }
+
+              mm_camera_app_wait();
+              usleep(10*1000);
+              LOGE("DUAL stop camera record for back \n");
+              if ( MM_CAMERA_OK != (rc = stopRecording(back_camera))) {
+                LOGE(" Stopvideorecording() err=%d\n",  rc);
+                break;
+              }
+              usleep(10*1000);
+            }
+            LOGE("DUAL stop camera Preview for back %d, %d\n",i,j);
+            if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+              LOGD(" startPreview() err=%d\n",  rc);
+              goto end;
+            }
+            usleep(10*1000);
+          }
+
+          LOGE("DUAL close back camera %d\n",i);
+          if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+            LOGE("mm_app_close() err=%d\n", rc);
+            rc = -1;
+            goto end;
+          }
+        }
+end:
+        LOGE("DUAL back thread close %d",rc);
+        return NULL;
+}
+
+/*  Test case m13572468 : Open & start  in 2 concurrent pthread*/
+int mm_app_dtc_7(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int result = 0;
+        pthread_t back_thread_id, front_thread_id;
+        struct test_case_params params;
+        memset(&params, 0, sizeof(struct test_case_params));
+        params.launch = 5;
+        params.preview = 5;
+        printf("\n Verifying Preview on back Camera and RDI on Front camera 7...\n");
+
+        LOGE("start back DUAL ");
+        rc = pthread_create(&back_thread_id, NULL, back_thread, &params);
+        LOGE("start front DUAL ");
+        rc = pthread_create(&front_thread_id, NULL, front_thread, &params);
+        sleep(1);
+        LOGE("stop back DUAL ");
+        rc = pthread_join(back_thread_id, NULL);
+        LOGE("stop front DUAL ");
+        rc = pthread_join(front_thread_id, NULL);
+        LOGE("DUAL end \n");
+
+end:
+        if(rc == 0) {
+          printf("\nPassed\n");
+        }else{
+          printf("\nFailed\n");
+        }
+        LOGD("END, rc = %d\n",  rc);
+        return rc;
+}
+
+/*  Test case m139572468 : Open & start in 2 concurrent pthread*/
+int mm_app_dtc_8(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int result = 0;
+
+        pthread_t back_thread_id, front_thread_id;
+        struct test_case_params bparams, fparams;
+        memset(&bparams, 0, sizeof(struct test_case_params));
+        memset(&fparams, 0, sizeof(struct test_case_params));
+        bparams.launch = 5;
+        bparams.preview = 5;
+        bparams.snapshot= 5;
+        fparams.launch = 5;
+        fparams.preview = 5;
+        printf("\n Verifying Preview on back Camera and RDI on Front camera 8...\n");
+
+        LOGE("start back DUAL ");
+        rc = pthread_create(&back_thread_id, NULL, back_thread, &bparams);
+        LOGE("start front DUAL ");
+        rc = pthread_create(&front_thread_id, NULL, front_thread, &fparams);
+        sleep(1);
+        LOGE("stop back DUAL ");
+        rc = pthread_join(back_thread_id, NULL);
+        LOGE("stop front DUAL ");
+        rc = pthread_join(front_thread_id, NULL);
+        LOGE("DUAL end \n");
+
+end:
+        if(rc == 0)
+          printf("\nPassed\n");
+        else
+          printf("\nFailed\n");
+        LOGD("END, rc = %d\n",  rc);
+        return rc;
+}
+
+/*  Test case m1395724c68 : Open & start in 2 concurrent pthread*/
+int mm_app_dtc_9(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int result = 0;
+
+        pthread_t back_thread_id, front_thread_id;
+        struct test_case_params bparams, fparams;
+        memset(&bparams, 0, sizeof(struct test_case_params));
+        memset(&fparams, 0, sizeof(struct test_case_params));
+        bparams.launch = 5;
+        bparams.preview = 5;
+        bparams.snapshot= 5;
+        fparams.launch = 5;
+        fparams.preview = 5;
+        fparams.snapshot = 5;
+        printf("\n Verifying Preview on back Camera and RDI on Front camera 9...\n");
+
+        LOGE("start back DUAL ");
+        rc = pthread_create(&back_thread_id, NULL, back_thread, &bparams);
+        LOGE("start front DUAL ");
+        rc = pthread_create(&front_thread_id, NULL, front_thread, &fparams);
+        sleep(1);
+        LOGE("stop back DUAL ");
+        rc = pthread_join(back_thread_id, NULL);
+        LOGE("stop front DUAL ");
+        rc = pthread_join(front_thread_id, NULL);
+        LOGE("DUAL end \n");
+
+end:
+        if(rc == 0) {
+          printf("\nPassed\n");
+        }else{
+          printf("\nFailed\n");
+        }
+        LOGD("END, rc = %d\n",  rc);
+        return rc;
+}
+
+/*  Test case m13ab572468 : Open & start in 2 concurrent pthread*/
+int mm_app_dtc_10(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int result = 0;
+
+        pthread_t back_thread_id, front_thread_id;
+        struct test_case_params bparams, fparams;
+        memset(&bparams, 0, sizeof(struct test_case_params));
+        memset(&fparams, 0, sizeof(struct test_case_params));
+        bparams.launch = 5;
+        bparams.preview = 5;
+        bparams.recording= 5;
+        fparams.launch = 5;
+        fparams.preview = 5;
+        printf("\n Verifying Preview on back Camera and RDI on Front camera 10...\n");
+
+        LOGE("start back DUAL ");
+        rc = pthread_create(&back_thread_id, NULL, back_thread, &bparams);
+        LOGE("start front DUAL ");
+        rc = pthread_create(&front_thread_id, NULL, front_thread, &fparams);
+        sleep(1);
+        LOGE("stop back DUAL ");
+        rc = pthread_join(back_thread_id, NULL);
+        LOGE("stop front DUAL ");
+        rc = pthread_join(front_thread_id, NULL);
+        LOGE("DUAL end \n");
+end:
+        if(rc == 0) {
+          printf("\nPassed\n");
+        }else{
+          printf("\nFailed\n");
+        }
+        LOGD("END, rc = %d\n",  rc);
+        return rc;
+}
+
+/*  Test case m13ab5724c68 : Open & start in 2 concurrent pthread*/
+int mm_app_dtc_11(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int result = 0;
+
+        pthread_t back_thread_id, front_thread_id;
+        struct test_case_params bparams, fparams;
+        memset(&bparams, 0, sizeof(struct test_case_params));
+        memset(&fparams, 0, sizeof(struct test_case_params));
+        bparams.launch = 5;
+        bparams.preview = 5;
+        bparams.recording= 5;
+        fparams.launch = 5;
+        fparams.preview = 5;
+        fparams.snapshot = 5;
+        printf("\n Verifying Preview on back Camera and RDI on Front camera 11...\n");
+
+        LOGE("start back DUAL ");
+        rc = pthread_create(&back_thread_id, NULL, back_thread, &bparams);
+        LOGE("start front DUAL ");
+        rc = pthread_create(&front_thread_id, NULL, front_thread, &fparams);
+        sleep(1);
+        LOGE("stop back DUAL ");
+        rc = pthread_join(back_thread_id, NULL);
+        LOGE("stop front DUAL ");
+        rc = pthread_join(front_thread_id, NULL);
+        LOGE("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        LOGD("END, rc = %d\n",  rc);
+        return rc;
+}
+
+/*  Test case m1728 : Open & start in 2 concurrent pthread*/
+int mm_app_dtc_12(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int result = 0;
+
+        pthread_t back_thread_id, front_thread_id;
+        struct test_case_params bparams, fparams;
+        memset(&bparams, 0, sizeof(struct test_case_params));
+        memset(&fparams, 0, sizeof(struct test_case_params));
+        bparams.launch = 15;
+        fparams.launch = 15;
+        printf("\n Verifying Preview on back Camera and RDI on Front camera 12...\n");
+
+        LOGE("start back DUAL ");
+        rc = pthread_create(&back_thread_id, NULL, back_thread, &bparams);
+        LOGE("start front DUAL ");
+        rc = pthread_create(&front_thread_id, NULL, front_thread, &fparams);
+        sleep(1);
+        LOGE("stop back DUAL ");
+        rc = pthread_join(back_thread_id, NULL);
+        LOGE("stop front DUAL ");
+        rc = pthread_join(front_thread_id, NULL);
+        LOGE("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        LOGD("END, rc = %d\n",  rc);
+        return rc;
+}
+
+/*  Test case 2413(ab)5768
+ *  Test the dual camera usecase. We startPreview on front camera,
+ *  but backend will allocate RDI buffers and start front camera in
+ *  RDI streaming mode. It then diverts RDI frames, converts them into YUV 420
+ *  through C2D and generate preview data in the buffers allocated here.
+ *  Back camera will use the pixel interface as usual.
+ */
+
+int mm_app_dtc_13(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j,k;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n 13. Verifying Preview + Recording on back Camera and Preview(through RDI) on Front camera\n");
+        LOGE("DUAL open front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_open() front camera err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                LOGE("system_dimension_set() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        LOGE("DUAL start camera Preview for front \n");
+        if( MM_CAMERA_OK != (rc = startPreview(front_camera))) {
+               LOGE(" front camera startPreview() err=%d\n",  rc);
+               goto end;
+        }
+        mm_camera_app_wait();
+        usleep(20*1000);
+
+        for (k = 0; k < MM_QCAMERA_APP_INTERATION ; k++){
+          LOGE("DUAL open back camera %d \n",k);
+          if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                 LOGE("mm_app_open() back camera err=%d\n", rc);
+                 rc = -1;
+                 goto end;
+          }
+
+          if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                 LOGE("system_dimension_set() err=%d\n", rc);
+                 rc = -1;
+                 goto end;
+          }
+
+          LOGE("DUAL start camera Preview for back \n");
+          if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+                LOGE(" back camera startPreview() err=%d\n",  rc);
+                 goto end;
+          }
+          usleep(30*1000);
+
+          for (j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+             LOGE("DUAL start camera record for back Iteration %d \n", j);
+             if ( MM_CAMERA_OK != (rc = startRecording(back_camera))) {
+                 LOGE(" StartVideorecording() err=%d\n",  rc);
+                 break;
+             }
+
+             mm_camera_app_wait();
+             usleep(10*1000*1000);
+             LOGE("DUAL stop camera record for back Iteration %d\n", j);
+             if ( MM_CAMERA_OK != (rc = stopRecording(back_camera))) {
+                 LOGE(" Stopvideorecording() err=%d\n",  rc);
+                 break;
+             }
+          }
+          usleep(10*1000);
+
+          LOGE("DUAL stop camera Preview for back \n");
+          if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+                 LOGE(" stopPreview() backcamera err=%d\n",  rc);
+                 goto end;
+          }
+          usleep(10*1000);
+
+          LOGE("DUAL close back camera\n");
+          if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+                 LOGE("mm_app_close() err=%d\n", rc);
+                 rc = -1;
+                 goto end;
+          }
+          usleep(20*1000);
+        }
+        LOGE("DUAL stop camera Preview for Rdi \n");
+        if( MM_CAMERA_OK != (rc = stopPreview(front_camera))) {
+                LOGE(" stopPreview() frontcamera err=%d\n",  rc);
+                goto end;
+        }
+        usleep(10*1000);
+        LOGE("DUAL close front camera \n");
+        if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_close() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        LOGE("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        LOGD("END, rc = %d\n",  rc);
+        return rc;
+}
+
+/*Below 6  are reference test cases just to test the open path for dual camera*/
+int mm_app_dtc_1243(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+        LOGE("DUAL open back camera \n");
+        if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_open() back camera err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                LOGE("system_dimension_set() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        LOGE("DUAL open front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_open() front camera err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                LOGE("system_dimension_set() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        LOGE("DUAL start camera Rdi for front \n");
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+               LOGE(" back camera startPreview() err=%d\n",  rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+
+        LOGE("DUAL start camera Preview for back \n");
+
+        if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+                LOGE(" startPreview() backcamera err=%d\n",  rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+        LOGE("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        LOGD("END, rc = %d\n",  rc);
+        return rc;
+}
+
+int mm_app_dtc_2134(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+        LOGE("DUAL open front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_open() back camera err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                LOGE("system_dimension_set() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        LOGE("DUAL open back camera \n");
+        if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_open() front camera err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                LOGE("system_dimension_set() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        LOGE("DUAL start camera Preview for front \n");
+        if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+               LOGE(" back camera startPreview() err=%d\n",  rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+
+        LOGE("DUAL start camera Rdi for back \n");
+
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+                LOGE(" startPreview() backcamera err=%d\n",  rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+        LOGE("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        LOGD("END, rc = %d\n",  rc);
+        return rc;
+}
+int mm_app_dtc_2143(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+        LOGE("DUAL open front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_open() back camera err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                LOGE("system_dimension_set() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        LOGE("DUAL open back camera \n");
+        if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_open() front camera err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                LOGE("system_dimension_set() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        LOGE("DUAL start camera rdi for front \n");
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+               LOGE(" back camera startPreview() err=%d\n",  rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+
+        LOGE("DUAL start camera preview for back \n");
+
+        if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+                LOGE(" startPreview() backcamera err=%d\n",  rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+        LOGE("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        LOGD("END, rc = %d\n",  rc);
+        return rc;
+}
+
+int mm_app_dtc_2413(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+        LOGE("DUAL open front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_open() back camera err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                LOGE("system_dimension_set() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        LOGE("DUAL start camera rdi for front \n");
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+               LOGE(" back camera startPreview() err=%d\n",  rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+
+        LOGE("DUAL open back camera \n");
+        if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_open() front camera err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                LOGE("system_dimension_set() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        LOGE("DUAL start camera preview for back \n");
+
+        if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+                LOGE(" startPreview() backcamera err=%d\n",  rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+        LOGE("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        LOGD("END, rc = %d\n",  rc);
+        return rc;
+}
+
+int mm_app_dtc_1234(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+        LOGE("DUAL open back camera \n");
+        if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_open() back camera err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                LOGE("system_dimension_set() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        LOGE("DUAL open front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_open() front camera err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                LOGE("system_dimension_set() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+        LOGE("DUAL start camera preview for back \n");
+        if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+               LOGE(" back camera startPreview() err=%d\n",  rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+
+        LOGE("DUAL start camera rdi for front \n");
+
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+                LOGE(" startPreview() backcamera err=%d\n",  rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+        LOGE("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        LOGD("END, rc = %d\n",  rc);
+        return rc;
+}
+
+int mm_app_dtc_1324(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+        LOGE("DUAL start back camera \n");
+        if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_open() back camera err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                LOGE("system_dimension_set() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        LOGE("DUAL start camera preview for back \n");
+        if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+                LOGE(" back camera startPreview() err=%d\n",  rc);
+                goto end;
+        }
+        //mm_camera_app_wait();
+        sleep(1);
+        LOGE("DUAL start front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                LOGE("mm_app_open() front camera err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+
+       if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                LOGE("system_dimension_set() err=%d\n", rc);
+                rc = -1;
+                goto end;
+        }
+        LOGE("DUAL start rdi preview \n");
+
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+                LOGE(" startPreview() backcamera err=%d\n",  rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+        LOGE("DUAL end \n");
+
+end:
+        if(rc == 0) {
+          printf("\nPassed\n");
+        }else{
+          printf("\nFailed\n");
+        }
+        LOGD("END, rc = %d\n",  rc);
+        return rc;
+}
+
+/* single camera test cases*/
+int mm_app_dtc_s_0(mm_camera_app_t *cam_apps)
+{
+    int rc = MM_CAMERA_OK;
+    int i,j;
+    int result = 0;
+    int front_camera = 1;
+    int back_camera = 0;
+
+    printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+
+    if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+        LOGE("mm_app_open() back camera err=%d\n", rc);
+        rc = -1;
+        goto end;
+    }
+    if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+    LOGE("system_dimension_set() err=%d\n", rc);
+        rc = -1;
+        goto end;
+    }
+
+    if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+        LOGE(" back camera startPreview() err=%d\n",  rc);
+        goto end;
+    }
+
+    mm_camera_app_wait();
+    if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+        LOGE("mm_app_open() front camera err=%d\n", rc);
+        rc = -1;
+        goto end;
+    }
+    if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+        LOGE("system_dimension_set() err=%d\n", rc);
+        rc = -1;
+        goto end;
+    }
+
+    if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+        LOGE(" startPreview() backcamera err=%d\n",  rc);
+        goto end;
+    }
+    mm_camera_app_wait();
+
+    if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+        LOGE(" startPreview() backcamera err=%d\n",  rc);
+        goto end;
+    }
+
+    if( MM_CAMERA_OK != (rc = stopPreview(my_cam_app.cam_open))) {
+        LOGD(" startPreview() err=%d\n",  rc);
+        goto end;
+    }
+
+    if( mm_app_close(my_cam_app.cam_open) != MM_CAMERA_OK) {
+        LOGE("mm_app_close() err=%d\n", rc);
+        rc = -1;
+        goto end;
+    }
+end:
+    if(rc == 0) {
+        printf("\nPassed\n");
+    }else{
+        printf("\nFailed\n");
+    }
+    LOGD("END, rc = %d\n",  rc);
+    return rc;
+}
+
+int mm_app_dtc_s_1(mm_camera_app_t *cam_apps)
+{
+    int rc = MM_CAMERA_OK;
+    int i,j;
+    int result = 0;
+
+    printf("\n Verifying Snapshot on front and back camera...\n");
+    for(i = 0; i < cam_apps->num_cameras; i++) {
+        if( mm_app_open(i) != MM_CAMERA_OK) {
+            LOGE("mm_app_open() err=%d\n", rc);
+            rc = -1;
+            goto end;
+        }
+        if(system_dimension_set(my_cam_app.cam_open) != MM_CAMERA_OK){
+            LOGE("system_dimension_set() err=%d\n", rc);
+            rc = -1;
+            goto end;
+        }
+
+        if( MM_CAMERA_OK != (rc = startPreview(my_cam_app.cam_open))) {
+                LOGE(" startPreview() err=%d\n",  rc);
+                break;
+        }
+        for(j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+            if( MM_CAMERA_OK != (rc = takePicture_yuv(my_cam_app.cam_open))) {
+                LOGE(" TakePicture() err=%d\n",  rc);
+                break;
+            }
+            /*if(mm_camera_app_timedwait() == ETIMEDOUT) {
+                LOGE(" Snapshot/Preview Callback not received in time or qbuf Faile\n");
+                break;
+            }*/
+            mm_camera_app_wait();
+            result++;
+        }
+        if( MM_CAMERA_OK != (rc = stopPreview(my_cam_app.cam_open))) {
+            LOGD(" startPreview() err=%d\n",  rc);
+            break;
+        }
+        if( mm_app_close(my_cam_app.cam_open) != MM_CAMERA_OK) {
+            LOGE("mm_app_close() err=%d\n", rc);
+            rc = -1;
+            goto end;
+        }
+        if(result != MM_QCAMERA_APP_INTERATION) {
+            printf(" Snapshot Start/Stop Fails for Camera %d in %d iteration",  i,j);
+            rc = -1;
+            break;
+        }
+
+        result = 0;
+    }
+end:
+    if(rc == 0) {
+        printf("\t***Passed***\n");
+    }else{
+        printf("\t***Failed***\n");
+    }
+    LOGD("END, rc = %d\n",  rc);
+    return rc;
+}
+
+int mm_app_dtc_s_2(mm_camera_app_t *cam_apps)
+{
+    int rc = MM_CAMERA_OK;
+    int i,j;
+    int result = 0;
+
+    printf("\n Verifying Video on front and back camera...\n");
+    for(i = 0; i < cam_apps->num_cameras; i++) {
+        if( mm_app_open(i) != MM_CAMERA_OK) {
+            LOGE("mm_app_open() err=%d\n", rc);
+            rc = -1;
+            goto end;
+        }
+        if(system_dimension_set(my_cam_app.cam_open) != MM_CAMERA_OK){
+            LOGE("system_dimension_set() err=%d\n", rc);
+            rc = -1;
+            goto end;
+        }
+
+        if( MM_CAMERA_OK != (rc = startPreview(my_cam_app.cam_open))) {
+            LOGE(" startPreview() err=%d\n",  rc);
+            break;
+        }
+        for(j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+            if( MM_CAMERA_OK != (rc = startRecording(my_cam_app.cam_open))) {
+                LOGE(" StartVideorecording() err=%d\n",  rc);
+                break;
+            }
+
+            /*if(mm_camera_app_timedwait() == ETIMEDOUT) {
+            LOGE(" Video Callback not received in time\n");
+            break;
+            }*/
+            mm_camera_app_wait();
+            if( MM_CAMERA_OK != (rc = stopRecording(my_cam_app.cam_open))) {
+                LOGE(" Stopvideorecording() err=%d\n",  rc);
+                break;
+            }
+            result++;
+        }
+        if( MM_CAMERA_OK != (rc = stopPreview(my_cam_app.cam_open))) {
+            LOGD(" startPreview() err=%d\n",  rc);
+            break;
+        }
+        if( mm_app_close(my_cam_app.cam_open) != MM_CAMERA_OK) {
+            LOGE("mm_app_close() err=%d\n", rc);
+            rc = -1;
+            goto end;
+        }
+        if(result != MM_QCAMERA_APP_INTERATION) {
+            printf(" Video Start/Stop Fails for Camera %d in %d iteration",  i,j);
+            rc = -1;
+            break;
+        }
+
+        result = 0;
+    }
+end:
+    if(rc == 0) {
+        printf("\nPassed\n");
+    }else{
+        printf("\nFailed\n");
+    }
+    LOGD("END, rc = %d\n",  rc);
+    return rc;
+}
+
+int mm_app_dtc_s_3(mm_camera_app_t *cam_apps)
+{
+    int rc = MM_CAMERA_OK;
+    int i,j;
+    int result = 0;
+
+    printf("\n Verifying RDI Stream on front and back camera...\n");
+    if(cam_apps->num_cameras == 0) {
+        LOGE("Query Failed: Num of cameras = %d\n", cam_apps->num_cameras);
+        rc = -1;
+        goto end;
+    }
+    for(i = 0; i < cam_apps->num_cameras; i++) {
+        if( mm_app_open(i) != MM_CAMERA_OK) {
+            LOGE("mm_app_open() err=%d\n", rc);
+            rc = -1;
+            goto end;
+        }
+        if(system_dimension_set(my_cam_app.cam_open) != MM_CAMERA_OK){
+            LOGE("system_dimension_set() err=%d\n", rc);
+            rc = -1;
+            goto end;
+        }
+        for(j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+            if( MM_CAMERA_OK != (rc = startRdi(my_cam_app.cam_open))) {
+                LOGE(" StartVideorecording() err=%d\n",  rc);
+                break;
+            }
+
+            /*if(mm_camera_app_timedwait() == ETIMEDOUT) {
+            LOGE(" Video Callback not received in time\n");
+            break;
+            }*/
+            mm_camera_app_wait();
+            if( MM_CAMERA_OK != (rc = stopRdi(my_cam_app.cam_open))) {
+                LOGE(" Stopvideorecording() err=%d\n",  rc);
+                break;
+            }
+            result++;
+        }
+        if( mm_app_close(my_cam_app.cam_open) != MM_CAMERA_OK) {
+            LOGE("mm_app_close() err=%d\n", rc);
+            rc = -1;
+            goto end;
+        }
+        if(result != MM_QCAMERA_APP_INTERATION) {
+            printf(" Video Start/Stop Fails for Camera %d in %d iteration",  i,j);
+            rc = -1;
+            break;
+        }
+
+        result = 0;
+    }
+end:
+    if(rc == 0) {
+        printf("\nPassed\n");
+    }else{
+        printf("\nFailed\n");
+    }
+    LOGD("END, rc = %d\n",  rc);
+    return rc;
+}
+
+/*Stats Test Case*/
+int mm_app_dtc_s_5(mm_camera_app_t *cam_apps)
+{
+    int rc = MM_CAMERA_OK;
+    int i,j;
+    int result = 0;
+    int front_camera = 1;
+    int back_camera = 0;
+
+    printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+
+    if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+        LOGE("mm_app_open() back camera err=%d\n", rc);
+        rc = -1;
+        goto end;
+    }
+    if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+    LOGE("system_dimension_set() err=%d\n", rc);
+        rc = -1;
+        goto end;
+    }
+
+    if( MM_CAMERA_OK != (rc = startStats(back_camera))) {
+        LOGE(" back camera startPreview() err=%d\n",  rc);
+        goto end;
+    }
+
+    mm_camera_app_wait();
+
+    if( MM_CAMERA_OK != (rc = stopStats(my_cam_app.cam_open))) {
+        LOGD(" startPreview() err=%d\n",  rc);
+        goto end;
+    }
+
+    if( mm_app_close(my_cam_app.cam_open) != MM_CAMERA_OK) {
+        LOGE("mm_app_close() err=%d\n", rc);
+        rc = -1;
+        goto end;
+    }
+end:
+    if(rc == 0) {
+        printf("\nPassed\n");
+    }else{
+        printf("\nFailed\n");
+    }
+    LOGD("END, rc = %d\n",  rc);
+    return rc;
+}
+
+int mm_app_gen_dual_test_cases()
+{
+    int tc = 0;
+    memset(mm_app_tc, 0, sizeof(mm_app_tc));
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_0;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_1;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_2;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_3;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_4;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_5;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_6;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_7;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_8;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_9;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_10;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_11;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_12;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_13;
+
+    return tc;
+}
+
+int mm_app_dual_test_entry(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK;
+    int i, tc = 0;
+    int cam_id = 0;
+
+    tc = mm_app_gen_dual_test_cases();
+    LOGD("Running %d test cases\n",tc);
+    for(i = 0; i < tc; i++) {
+        mm_app_tc[i].r = mm_app_tc[i].f(cam_app);
+        if(mm_app_tc[i].r != MM_CAMERA_OK) {
+            printf(" test case %d error = %d, abort unit testing engine!!!!\n",
+                     i, mm_app_tc[i].r);
+            rc = mm_app_tc[i].r;
+            goto end;
+        }
+    }
+end:
+    printf("nTOTAL_TSET_CASE = %d, NUM_TEST_RAN = %d, rc=%d\n", tc, i, rc);
+    return rc;
+}
+
+
+
+
diff --git a/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_main_menu.c b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_main_menu.c
new file mode 100644
index 0000000..0865c6f
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_main_menu.c
@@ -0,0 +1,2047 @@
+/* Copyright (c) 2013-2014, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <ctype.h>
+#include <errno.h>
+
+// Camera dependencies
+#include "mm_qcamera_main_menu.h"
+#include "mm_qcamera_app.h"
+#include "mm_qcamera_dbg.h"
+
+/*===========================================================================
+ * Macro
+ *===========================================================================*/
+#define MIN(X,Y) ((X) < (Y) ? (X) : (Y))
+#define VIDEO_BUFFER_SIZE       (PREVIEW_WIDTH * PREVIEW_HEIGHT * 3/2)
+#define THUMBNAIL_BUFFER_SIZE   (THUMBNAIL_WIDTH * THUMBNAIL_HEIGHT * 3/2)
+#define SNAPSHOT_BUFFER_SIZE    (PICTURE_WIDTH * PICTURE_HEIGHT * 3/2)
+//TODO:check this Macros with current app.
+
+/*===========================================================================
+ * Defines
+ *===========================================================================*/
+//#define VIDEO_FRAMES_NUM      4
+#define THUMBNAIL_FRAMES_NUM  1
+#define SNAPSHOT_FRAMES_NUM   1
+#define MAX_NUM_FORMAT        32
+#define ZOOM_STEP             2
+#define ZOOM_MIN_VALUE        0
+#define EXPOSURE_COMPENSATION_MAXIMUM_NUMERATOR 12
+#define EXPOSURE_COMPENSATION_MINIMUM_NUMERATOR -12
+#define EXPOSURE_COMPENSATION_DEFAULT_NUMERATOR 0
+#define EXPOSURE_COMPENSATION_DENOMINATOR 6
+
+//TODO: find correct values of Contrast defines.
+#define CAMERA_MIN_CONTRAST    0
+#define CAMERA_DEF_CONTRAST    5
+#define CAMERA_MAX_CONTRAST    10
+#define CAMERA_CONTRAST_STEP   1
+
+//TODO: find correct values of Brightness defines.
+#define CAMERA_MIN_BRIGHTNESS  0
+#define CAMERA_DEF_BRIGHTNESS  3
+#define CAMERA_MAX_BRIGHTNESS  6
+#define CAMERA_BRIGHTNESS_STEP 1
+
+//TODO: find correct values of Saturation defines.
+#define CAMERA_MIN_SATURATION  0
+#define CAMERA_DEF_SATURATION  5
+#define CAMERA_MAX_SATURATION  10
+#define CAMERA_SATURATION_STEP 1
+
+#define CAMERA_MIN_SHARPNESS 0
+#define CAMERA_MAX_SHARPNESS 10
+#define CAMERA_DEF_SHARPNESS 5
+#define CAMERA_SHARPNESS_STEP 1
+
+const CAMERA_MAIN_MENU_TBL_T camera_main_menu_tbl[] = {
+  {START_PREVIEW,               "Start preview"},
+  {STOP_PREVIEW,               "Stop preview/video"},
+  {SET_WHITE_BALANCE,          "Set white balance mode"},
+  {SET_TINTLESS_ENABLE,        "Set Tintless Enable"},
+  {SET_TINTLESS_DISABLE,       "Set Tintless Disable"},
+  {SET_EXP_METERING,           "Set exposure metering mode"},
+  {GET_CTRL_VALUE,             "Get control value menu"},
+  {TOGGLE_AFR,                 "Toggle auto frame rate. Default fixed frame rate"},
+  {SET_ISO,                    "ISO changes."},
+  {BRIGHTNESS_GOTO_SUBMENU,    "Brightness changes."},
+  {CONTRAST_GOTO_SUBMENU,      "Contrast changes."},
+  {EV_GOTO_SUBMENU,            "EV changes."},
+  {SATURATION_GOTO_SUBMENU,    "Saturation changes."},
+  {SET_ZOOM,                   "Set Digital Zoom."},
+  {SET_SHARPNESS,              "Set Sharpness."},
+  {TAKE_JPEG_SNAPSHOT,         "Take a snapshot"},
+  {START_RECORDING,            "Start RECORDING"},
+  {STOP_RECORDING,             "Stop RECORDING"},
+  {BEST_SHOT,                  "Set best-shot mode"},
+  {LIVE_SHOT,                  "Take a live snapshot"},
+  {FLASH_MODES,                "Set Flash modes"},
+  {TOGGLE_ZSL,                 "Toggle ZSL On/Off"},
+  {TAKE_RAW_SNAPSHOT,          "Take RAW snapshot"},
+  {SWITCH_SNAP_RESOLUTION,     "Select Jpeg resolution"},
+  {TOGGLE_WNR,                 "Toggle Wavelet Denoise"},
+  {EXIT,                       "Exit"}
+};
+
+CAMERA_SENSOR_MENU_TLB_T sensor_tbl[] = {
+        {"Primary Camera",      0},
+        {"Secondary Camera",    0},
+        {"Camera Sensor 3",     0},
+        {"Camera Sensor 4",     0}
+};
+
+const CAMERA_BRIGHTNESS_TBL_T brightness_change_tbl[] = {
+  {INC_BRIGHTNESS, "Increase Brightness by one step."},
+  {DEC_BRIGHTNESS, "Decrease Brightness by one step."},
+};
+
+const CAMERA_CONTRST_TBL_T contrast_change_tbl[] = {
+  {INC_CONTRAST, "Increase Contrast by one step."},
+  {DEC_CONTRAST, "Decrease Contrast by one step."},
+};
+
+const CAMERA_EV_TBL_T camera_EV_tbl[] = {
+  {INCREASE_EV, "Increase EV by one step."},
+  {DECREASE_EV, "Decrease EV by one step."},
+};
+
+const CAMERA_SATURATION_TBL_T camera_saturation_tbl[] = {
+  {INC_SATURATION, "Increase Satuation by one step."},
+  {DEC_SATURATION, "Decrease Satuation by one step."},
+};
+
+const CAMERA_SHARPNESS_TBL_T camera_sharpness_tbl[] = {
+  {INC_SHARPNESS, "Increase Sharpness."},
+  {DEC_SHARPNESS, "Decrease Sharpness."},
+};
+
+const WHITE_BALANCE_TBL_T white_balance_tbl[] = {
+  {   WB_AUTO,               "White Balance - Auto"},
+  {   WB_INCANDESCENT,       "White Balance - Incandescent"},
+  {   WB_FLUORESCENT,        "White Balance - Fluorescent"},
+  {   WB_WARM_FLUORESCENT,   "White Balance - Warm Fluorescent"},
+  {   WB_DAYLIGHT,           "White Balance - Daylight"},
+  {   WB_CLOUDY_DAYLIGHT,    "White Balance - Cloudy Daylight"},
+  {   WB_TWILIGHT,           "White Balance - Twilight"},
+  {   WB_SHADE,              "White Balance - Shade"},
+};
+
+const GET_CTRL_TBL_T get_ctrl_tbl[] = {
+  {     WHITE_BALANCE_STATE,            "Get white balance state (auto/off)"},
+  {     WHITE_BALANCE_TEMPERATURE,      "Get white balance temperature"},
+  {     BRIGHTNESS_CTRL,                "Get brightness value"},
+  {     EV,                             "Get exposure value"},
+  {     CONTRAST_CTRL,                  "Get contrast value"},
+  {     SATURATION_CTRL,                "Get saturation value"},
+  {     SHARPNESS_CTRL,                 "Get sharpness value"},
+};
+
+const EXP_METERING_TBL_T exp_metering_tbl[] = {
+  {   AUTO_EXP_FRAME_AVG,          "Exposure Metering - Frame Average"},
+  {   AUTO_EXP_CENTER_WEIGHTED,    "Exposure Metering - Center Weighted"},
+  {   AUTO_EXP_SPOT_METERING,      "Exposure Metering - Spot Metering"},
+  {   AUTO_EXP_SMART_METERING,     "Exposure Metering - Smart Metering"},
+  {   AUTO_EXP_USER_METERING,      "Exposure Metering - User Metering"},
+  {   AUTO_EXP_SPOT_METERING_ADV,  "Exposure Metering - Spot Metering Adv"},
+  {   AUTO_EXP_CENTER_WEIGHTED_ADV,"Exposure Metering - Center Weighted Adv"},
+};
+
+const ISO_TBL_T iso_tbl[] = {
+  {   ISO_AUTO,   "ISO: Auto"},
+  {   ISO_DEBLUR, "ISO: Deblur"},
+  {   ISO_100,    "ISO: 100"},
+  {   ISO_200,    "ISO: 200"},
+  {   ISO_400,    "ISO: 400"},
+  {   ISO_800,    "ISO: 800"},
+  {   ISO_1600,   "ISO: 1600"},
+};
+
+const ZOOM_TBL_T zoom_tbl[] = {
+  {   ZOOM_IN,  "Zoom In one step"},
+  {   ZOOM_OUT, "Zoom Out one step"},
+};
+
+const BESTSHOT_MODE_TBT_T bestshot_mode_tbl[] = {
+  {BESTSHOT_AUTO,           "Bestshot Mode: Auto"},
+  {BESTSHOT_ACTION,         "Bestshot Mode: Action"},
+  {BESTSHOT_PORTRAIT,       "Bestshot Mode: Portrait"},
+  {BESTSHOT_LANDSCAPE,      "Bestshot Mode: Landscape"},
+  {BESTSHOT_NIGHT,          "Bestshot Mode: Night"},
+  {BESTSHOT_NIGHT_PORTRAIT, "Bestshot Mode: Night Portrait"},
+  {BESTSHOT_THEATRE,        "Bestshot Mode: Theatre"},
+  {BESTSHOT_BEACH,          "Bestshot Mode: Beach"},
+  {BESTSHOT_SNOW,           "Bestshot Mode: Snow"},
+  {BESTSHOT_SUNSET,         "Bestshot Mode: Sunset"},
+  {BESTSHOT_ANTISHAKE,      "Bestshot Mode: Antishake"},
+  {BESTSHOT_FIREWORKS,      "Bestshot Mode: Fireworks"},
+  {BESTSHOT_SPORTS,         "Bestshot Mode: Sports"},
+  {BESTSHOT_PARTY,          "Bestshot Mode: Party"},
+  {BESTSHOT_CANDLELIGHT,    "Bestshot Mode: Candlelight"},
+  {BESTSHOT_ASD,            "Bestshot Mode: ASD"},
+  {BESTSHOT_BACKLIGHT,      "Bestshot Mode: Backlight"},
+  {BESTSHOT_FLOWERS,        "Bestshot Mode: Flowers"},
+  {BESTSHOT_AR,             "Bestshot Mode: Augmented Reality"},
+  {BESTSHOT_HDR,            "Bestshot Mode: HDR"},
+};
+
+const FLASH_MODE_TBL_T flashmodes_tbl[] = {
+  {   FLASH_MODE_OFF,   "Flash Mode Off"},
+  {   FLASH_MODE_AUTO,  "Flash Mode Auto"},
+  {   FLASH_MODE_ON,    "Flash Mode On"},
+  {   FLASH_MODE_TORCH, "Flash Mode Torch"},
+};
+
+DIMENSION_TBL_T dimension_tbl[] = {
+{VGA_WIDTH,      VGA_HEIGHT,      "VGA",   "Size: VGA <640x480>"   , 0},
+{MP1_WIDTH,      MP1_HEIGHT,      "1MP",   "Size: 1MP <1280x960>"  , 0},
+{MP5_WIDTH,      MP5_HEIGHT,      "5MP",   "Size: 5MP <2592x1944>",  0},
+{MP8_WIDTH,      MP8_HEIGHT,      "8MP",   "Size: 8MP <3264x2448>",  0},
+{MP12_WIDTH,     MP12_HEIGHT,     "12MP",  "Size: 12MP <4000x3000>", 0},
+};
+
+/*===========================================================================
+ * Forward declarations
+ *===========================================================================*/
+//static void system_dimension_set(mm_camera_test_obj_t *test_obj);
+/*===========================================================================
+ * Static global variables
+ *===========================================================================*/
+USER_INPUT_DISPLAY_T input_display;
+int preview_video_resolution_flag = 0;
+
+//TODO: default values.
+#if 1
+int brightness = CAMERA_DEF_BRIGHTNESS;
+int contrast = CAMERA_DEF_CONTRAST;
+int saturation = CAMERA_DEF_SATURATION;
+int sharpness = CAMERA_DEF_SHARPNESS;
+#else
+int brightness = 0;
+int contrast = 0;
+int saturation = 0;
+int sharpness = 0;
+#endif
+//TODO: find new method to calculate ev.
+//int32_t ev_numerator = EXPOSURE_COMPENSATION_DEFAULT_NUMERATOR;
+
+//TODO:
+//fps_mode_t fps_mode = FPS_MODE_FIXED;
+int zoom_level;
+int zoom_max_value;
+int cam_id;
+int is_rec = 0;
+
+
+static int submain();
+
+/*===========================================================================
+ * FUNCTION    - keypress_to_event -
+ *
+ * DESCRIPTION:
+ *==========================================================================*/
+int keypress_to_event(char keypress)
+{
+  int out_buf = INVALID_KEY_PRESS;
+  if ((keypress >= 'A' && keypress <= 'Z') ||
+    (keypress >= 'a' && keypress <= 'z')) {
+    out_buf = tolower(keypress);
+    out_buf = out_buf - 'a';
+  } else if (keypress >= '0' && keypress <= '9') {
+    out_buf = keypress - '0';
+  }
+  return out_buf;
+}
+
+int next_menu(menu_id_change_t current_menu_id, char keypress, camera_action_t * action_id_ptr, int * action_param)
+{
+  int output_to_event;
+  menu_id_change_t next_menu_id = MENU_ID_INVALID;
+  * action_id_ptr = ACTION_NO_ACTION;
+
+  output_to_event = keypress_to_event(keypress);
+  LOGD("current_menu_id=%d\n",current_menu_id);
+  printf("output_to_event=%d\n",output_to_event);
+  switch(current_menu_id) {
+    case MENU_ID_MAIN:
+      switch(output_to_event) {
+        case START_PREVIEW:
+          * action_id_ptr = ACTION_START_PREVIEW;
+          LOGD("START_PREVIEW\n");
+          break;
+        case STOP_PREVIEW:
+          * action_id_ptr = ACTION_STOP_PREVIEW;
+          LOGD("STOP_PREVIEW\n");
+          break;
+
+        case SET_WHITE_BALANCE:
+          next_menu_id = MENU_ID_WHITEBALANCECHANGE;
+          LOGD("next_menu_id = MENU_ID_WHITEBALANCECHANGE = %d\n", next_menu_id);
+          break;
+
+        case SET_TINTLESS_ENABLE:
+          * action_id_ptr = ACTION_SET_TINTLESS_ENABLE;
+          next_menu_id = MENU_ID_MAIN;
+          LOGD("next_menu_id = MENU_ID_TINTLESSENABLE = %d\n", next_menu_id);
+          break;
+
+        case SET_TINTLESS_DISABLE:
+          * action_id_ptr = ACTION_SET_TINTLESS_DISABLE;
+          next_menu_id = MENU_ID_MAIN;
+          LOGD("next_menu_id = MENU_ID_TINTLESSDISABLE = %d\n", next_menu_id);
+          break;
+
+        case SET_EXP_METERING:
+          next_menu_id = MENU_ID_EXPMETERINGCHANGE;
+          LOGD("next_menu_id = MENU_ID_EXPMETERINGCHANGE = %d\n", next_menu_id);
+          break;
+
+        case GET_CTRL_VALUE:
+          next_menu_id = MENU_ID_GET_CTRL_VALUE;
+          LOGD("next_menu_id = MENU_ID_GET_CTRL_VALUE = %d\n", next_menu_id);
+          break;
+
+        case BRIGHTNESS_GOTO_SUBMENU:
+          next_menu_id = MENU_ID_BRIGHTNESSCHANGE;
+          LOGD("next_menu_id = MENU_ID_BRIGHTNESSCHANGE = %d\n", next_menu_id);
+          break;
+
+        case CONTRAST_GOTO_SUBMENU:
+          next_menu_id = MENU_ID_CONTRASTCHANGE;
+          break;
+
+        case EV_GOTO_SUBMENU:
+          next_menu_id = MENU_ID_EVCHANGE;
+          break;
+
+        case SATURATION_GOTO_SUBMENU:
+          next_menu_id = MENU_ID_SATURATIONCHANGE;
+          break;
+
+        case TOGGLE_AFR:
+          * action_id_ptr = ACTION_TOGGLE_AFR;
+          LOGD("next_menu_id = MENU_ID_TOGGLEAFR = %d\n", next_menu_id);
+          break;
+
+        case SET_ISO:
+          next_menu_id = MENU_ID_ISOCHANGE;
+          LOGD("next_menu_id = MENU_ID_ISOCHANGE = %d\n", next_menu_id);
+          break;
+
+        case SET_ZOOM:
+          next_menu_id = MENU_ID_ZOOMCHANGE;
+          LOGD("next_menu_id = MENU_ID_ZOOMCHANGE = %d\n", next_menu_id);
+          break;
+
+        case BEST_SHOT:
+          next_menu_id = MENU_ID_BESTSHOT;
+          LOGD("next_menu_id = MENU_ID_BESTSHOT = %d\n", next_menu_id);
+          break;
+
+        case LIVE_SHOT:
+          * action_id_ptr = ACTION_TAKE_LIVE_SNAPSHOT;
+          LOGD("\nTaking Live snapshot\n");
+          break;
+
+        case FLASH_MODES:
+          next_menu_id = MENU_ID_FLASHMODE;
+          LOGD("next_menu_id = MENU_ID_FLASHMODE = %d\n", next_menu_id);
+          break;
+
+        case SET_SHARPNESS:
+          next_menu_id = MENU_ID_SHARPNESSCHANGE;
+          LOGD("next_menu_id = MENU_ID_SHARPNESSCHANGE = %d\n", next_menu_id);
+          break;
+
+        case SWITCH_SNAP_RESOLUTION:
+          next_menu_id = MENU_ID_SWITCH_RES;
+          LOGD("next_menu_id = MENU_ID_SWITCH_RES = %d\n", next_menu_id);
+          break;
+
+        case TAKE_JPEG_SNAPSHOT:
+          * action_id_ptr = ACTION_TAKE_JPEG_SNAPSHOT;
+          printf("\n Taking JPEG snapshot\n");
+          break;
+
+        case START_RECORDING:
+          * action_id_ptr = ACTION_START_RECORDING;
+          LOGD("Start recording\n");
+          break;
+        case STOP_RECORDING:
+          * action_id_ptr = ACTION_STOP_RECORDING;
+          LOGD("Stop recording\n");
+          break;
+        case TOGGLE_ZSL:
+          * action_id_ptr = ACTION_TOGGLE_ZSL;
+          LOGD("Toggle ZSL\n");
+          break;
+        case TAKE_RAW_SNAPSHOT:
+            * action_id_ptr = ACTION_TAKE_RAW_SNAPSHOT;
+            next_menu_id = MENU_ID_MAIN;
+            LOGD("Capture RAW\n");
+            break;
+        case TOGGLE_WNR:
+            * action_id_ptr = ACTION_TOGGLE_WNR;
+            next_menu_id = MENU_ID_MAIN;
+            LOGD("Toggle WNR");
+            break;
+        case EXIT:
+          * action_id_ptr = ACTION_EXIT;
+          LOGD("Exit \n");
+          break;
+        default:
+          next_menu_id = MENU_ID_MAIN;
+          LOGD("next_menu_id = MENU_ID_MAIN = %d\n", next_menu_id);
+          break;
+      }
+      break;
+
+    case MENU_ID_SWITCH_RES:
+        printf("MENU_ID_SWITCH_RES\n");
+        *action_id_ptr = ACTION_SWITCH_RESOLUTION;
+        *action_param = output_to_event;
+        int available_sizes = sizeof(dimension_tbl)/sizeof(dimension_tbl[0]);
+        if ( ( *action_param >= 0 ) &&
+             ( *action_param < available_sizes ) &&
+             ( dimension_tbl[*action_param].supported )) {
+            next_menu_id = MENU_ID_MAIN;
+        }
+        else {
+          next_menu_id = current_menu_id;
+        }
+        break;
+
+    case MENU_ID_SENSORS:
+        next_menu_id = MENU_ID_MAIN;
+        *action_id_ptr = ACTION_SWITCH_CAMERA;
+        *action_param = output_to_event;
+        break;
+
+    case MENU_ID_WHITEBALANCECHANGE:
+      printf("MENU_ID_WHITEBALANCECHANGE\n");
+      if (output_to_event >= WB_MAX) {
+        next_menu_id = current_menu_id;
+        * action_id_ptr = ACTION_NO_ACTION;
+      } else {
+        next_menu_id = MENU_ID_MAIN;
+        * action_id_ptr = ACTION_SET_WHITE_BALANCE;
+        * action_param = output_to_event;
+      }
+      break;
+
+    case MENU_ID_EXPMETERINGCHANGE:
+      printf("MENU_ID_EXPMETERINGCHANGE\n");
+      if (output_to_event >= AUTO_EXP_MAX) {
+        next_menu_id = current_menu_id;
+        * action_id_ptr = ACTION_NO_ACTION;
+      } else {
+        next_menu_id = MENU_ID_MAIN;
+        * action_id_ptr = ACTION_SET_EXP_METERING;
+        * action_param = output_to_event;
+      }
+      break;
+
+    case MENU_ID_GET_CTRL_VALUE:
+      printf("MENU_ID_GET_CTRL_VALUE\n");
+      * action_id_ptr = ACTION_GET_CTRL_VALUE;
+      if (output_to_event > 0 &&
+        output_to_event <= (int)(sizeof(get_ctrl_tbl)/sizeof(get_ctrl_tbl[0]))) {
+          next_menu_id = MENU_ID_MAIN;
+          * action_param = output_to_event;
+      }
+      else {
+        next_menu_id = current_menu_id;
+      }
+      break;
+
+    case MENU_ID_BRIGHTNESSCHANGE:
+      switch (output_to_event) {
+        case INC_BRIGHTNESS:
+          * action_id_ptr = ACTION_BRIGHTNESS_INCREASE;
+          next_menu_id = MENU_ID_MAIN;
+          break;
+
+        case DEC_BRIGHTNESS:
+          * action_id_ptr = ACTION_BRIGHTNESS_DECREASE;
+          next_menu_id = MENU_ID_MAIN;
+          break;
+
+        default:
+          next_menu_id = MENU_ID_BRIGHTNESSCHANGE;
+          break;
+      }
+      break;
+
+    case MENU_ID_CONTRASTCHANGE:
+      switch (output_to_event) {
+        case INC_CONTRAST:
+          * action_id_ptr = ACTION_CONTRAST_INCREASE;
+          next_menu_id = MENU_ID_MAIN;
+          break;
+
+        case DEC_CONTRAST:
+          * action_id_ptr = ACTION_CONTRAST_DECREASE;
+          next_menu_id = MENU_ID_MAIN;
+          break;
+
+        default:
+          next_menu_id = MENU_ID_CONTRASTCHANGE;
+          break;
+      }
+      break;
+
+    case MENU_ID_EVCHANGE:
+      switch (output_to_event) {
+        case INCREASE_EV:
+          * action_id_ptr = ACTION_EV_INCREASE;
+          next_menu_id = MENU_ID_MAIN;
+          break;
+
+        case DECREASE_EV:
+          * action_id_ptr = ACTION_EV_DECREASE;
+          next_menu_id = MENU_ID_MAIN;
+          break;
+
+        default:
+          next_menu_id = MENU_ID_EVCHANGE;
+          break;
+      }
+      break;
+
+    case MENU_ID_SATURATIONCHANGE:
+      switch (output_to_event) {
+        case INC_SATURATION:
+          * action_id_ptr = ACTION_SATURATION_INCREASE;
+          next_menu_id = MENU_ID_MAIN;
+          break;
+
+        case DEC_SATURATION:
+          * action_id_ptr = ACTION_SATURATION_DECREASE;
+          next_menu_id = MENU_ID_MAIN;
+          break;
+
+        default:
+          next_menu_id = MENU_ID_EVCHANGE;
+          break;
+      }
+      break;
+
+    case MENU_ID_ISOCHANGE:
+      printf("MENU_ID_ISOCHANGE\n");
+      if (output_to_event >= ISO_MAX) {
+        next_menu_id = current_menu_id;
+        * action_id_ptr = ACTION_NO_ACTION;
+      } else {
+        next_menu_id = MENU_ID_MAIN;
+        * action_id_ptr = ACTION_SET_ISO;
+        * action_param = output_to_event;
+      }
+      break;
+
+    case MENU_ID_ZOOMCHANGE:
+      * action_id_ptr = ACTION_SET_ZOOM;
+      if (output_to_event > 0 &&
+        output_to_event <= (int)(sizeof(zoom_tbl)/sizeof(zoom_tbl[0]))) {
+          next_menu_id = MENU_ID_MAIN;
+          * action_param = output_to_event;
+      } else {
+        next_menu_id = current_menu_id;
+      }
+      break;
+
+    case MENU_ID_SHARPNESSCHANGE:
+      switch (output_to_event) {
+        case INC_SHARPNESS:
+          * action_id_ptr = ACTION_SHARPNESS_INCREASE;
+          next_menu_id = MENU_ID_MAIN;
+          break;
+        case DEC_SHARPNESS:
+          * action_id_ptr = ACTION_SHARPNESS_DECREASE;
+          next_menu_id = MENU_ID_MAIN;
+          break;
+        default:
+          next_menu_id = MENU_ID_SHARPNESSCHANGE;
+          break;
+      }
+      break;
+
+    case MENU_ID_BESTSHOT:
+      if (output_to_event >= BESTSHOT_MAX) {
+        next_menu_id = current_menu_id;
+        * action_id_ptr = ACTION_NO_ACTION;
+      } else {
+        next_menu_id = MENU_ID_MAIN;
+        * action_id_ptr = ACTION_SET_BESTSHOT_MODE;
+        * action_param = output_to_event;
+      }
+      break;
+
+    case MENU_ID_FLASHMODE:
+      if (output_to_event >= FLASH_MODE_MAX) {
+        next_menu_id = current_menu_id;
+        * action_id_ptr = ACTION_NO_ACTION;
+      } else {
+        next_menu_id = MENU_ID_MAIN;
+        * action_id_ptr = ACTION_SET_FLASH_MODE;
+        * action_param = output_to_event;
+      }
+      break;
+
+    default:
+      LOGD("menu id is wrong: %d\n", current_menu_id);
+      break;
+  }
+
+  return next_menu_id;
+}
+
+/*===========================================================================
+ * FUNCTION    - print_menu_preview_video -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+static void print_menu_preview_video(void) {
+  unsigned int i;
+  if (!is_rec) {
+    printf("\n");
+    printf("===========================================\n");
+    printf("      Camera is in preview/video mode now        \n");
+    printf("===========================================\n\n");
+  } else {
+    printf("\n");
+    printf("===========================================\n");
+    printf("      Camera is in RECORDING mode now       \n");
+    printf("        Press 'Q' To Stop Recording          \n");
+    printf("        Press 'S' To Take Live Snapshot       \n");
+    printf("===========================================\n\n");
+  }
+  char menuNum = 'A';
+  for (i = 0; i < sizeof(camera_main_menu_tbl)/sizeof(camera_main_menu_tbl[0]); i++) {
+    if (i == BASE_OFFSET) {
+      menuNum = '1';
+    }
+
+    printf("%c.  %s\n", menuNum, camera_main_menu_tbl[i].menu_name);
+    menuNum++;
+  }
+
+  printf("\nPlease enter your choice: ");
+
+  return;
+}
+
+static void camera_preview_video_wb_change_tbl(void) {
+  unsigned int i;
+  printf("\n");
+  printf("==========================================================\n");
+  printf("      Camera is in white balance change mode       \n");
+  printf("==========================================================\n\n");
+
+  char submenuNum = 'A';
+  for (i = 0 ; i < sizeof(white_balance_tbl) /
+                   sizeof(white_balance_tbl[0]); i++) {
+        printf("%c.  %s\n", submenuNum, white_balance_tbl[i].wb_name);
+        submenuNum++;
+  }
+  printf("\nPlease enter your choice for White Balance modes: ");
+  return;
+}
+
+static void camera_preview_video_get_ctrl_value_tbl(void) {
+  unsigned int i;
+  printf("\n");
+  printf("==========================================================\n");
+  printf("      Camera is in get control value mode       \n");
+  printf("==========================================================\n\n");
+
+  char submenuNum = 'A';
+  for (i = 0 ; i < sizeof(get_ctrl_tbl) /
+                   sizeof(get_ctrl_tbl[0]); i++) {
+        printf("%c.  %s\n", submenuNum, get_ctrl_tbl[i].get_ctrl_name);
+        submenuNum++;
+  }
+  printf("\nPlease enter your choice for control value you want to get: ");
+  return;
+}
+
+static void camera_preview_video_exp_metering_change_tbl(void) {
+  unsigned int i;
+  printf("\n");
+  printf("==========================================================\n");
+  printf("      Camera is in exposure metering change mode       \n");
+  printf("==========================================================\n\n");
+
+  char submenuNum = 'A';
+  for (i = 0 ; i < sizeof(exp_metering_tbl) /
+                   sizeof(exp_metering_tbl[0]); i++) {
+        printf("%c.  %s\n", submenuNum, exp_metering_tbl[i].exp_metering_name);
+        submenuNum++;
+  }
+  printf("\nPlease enter your choice for exposure metering modes: ");
+  return;
+}
+
+static void camera_contrast_change_tbl(void) {
+    unsigned int i;
+
+    printf("\n");
+    printf("==========================================================\n");
+    printf("      Camera is in change contrast resolution mode       \n");
+    printf("==========================================================\n\n");
+
+    char contrastmenuNum = 'A';
+    for (i = 0; i < sizeof(contrast_change_tbl) /
+                    sizeof(contrast_change_tbl[0]); i++) {
+        printf("%c.  %s\n", contrastmenuNum,
+                            contrast_change_tbl[i].contrast_name);
+        contrastmenuNum++;
+    }
+
+    printf("\nPlease enter your choice for contrast Change: ");
+    return;
+}
+
+static void camera_EV_change_tbl(void) {
+  unsigned int i;
+
+  printf("\n");
+  printf("===========================================\n");
+  printf("      Camera is in EV change mode now       \n");
+  printf("===========================================\n\n");
+
+  char submenuNum = 'A';
+  for (i = 0; i < sizeof(camera_EV_tbl)/sizeof(camera_EV_tbl[0]); i++) {
+    printf("%c.  %s\n", submenuNum, camera_EV_tbl[i].EV_name);
+    submenuNum++;
+  }
+
+  printf("\nPlease enter your choice for EV changes: ");
+  return;
+}
+
+static void camera_resolution_change_tbl(void) {
+    unsigned int i;
+
+    printf("\n");
+    printf("==========================================================\n");
+    printf("      Camera is in snapshot resolution mode               \n");
+    printf("==========================================================\n\n");
+
+    for (i = 0; i < sizeof(dimension_tbl) /
+      sizeof(dimension_tbl[0]); i++) {
+        if ( dimension_tbl[i].supported ) {
+            printf("%d.  %s\n", i,
+                    dimension_tbl[i].str_name);
+        }
+    }
+
+    printf("\nPlease enter your choice for Resolution: ");
+    return;
+}
+
+static void camera_preview_video_zoom_change_tbl(void) {
+    unsigned int i;
+    zoom_max_value = MAX_ZOOMS_CNT;
+    printf("\nCurrent Zoom Value = %d ,Max Zoom Value = %d\n",zoom_level,zoom_max_value);
+    char submenuNum = 'A';
+    for (i = 0 ; i < sizeof(zoom_tbl) /
+                   sizeof(zoom_tbl[0]); i++) {
+        printf("%c.  %s\n", submenuNum, zoom_tbl[i].zoom_direction_name);
+        submenuNum++;
+    }
+    printf("\nPlease enter your choice for zoom change direction: ");
+    return;
+}
+
+static void camera_brightness_change_tbl(void) {
+    unsigned int i;
+
+    printf("\n");
+    printf("==========================================================\n");
+    printf("      Camera is in change brightness mode       \n");
+    printf("==========================================================\n\n");
+
+    char brightnessmenuNum = 'A';
+    for (i = 0; i < sizeof(brightness_change_tbl) /
+                    sizeof(brightness_change_tbl[0]); i++) {
+        printf("%c.  %s\n", brightnessmenuNum,
+                            brightness_change_tbl[i].brightness_name);
+        brightnessmenuNum++;
+    }
+
+    printf("\nPlease enter your choice for Brightness Change: ");
+    return;
+}
+
+static void camera_saturation_change_tbl(void) {
+    unsigned int i;
+
+    printf("\n");
+    printf("==========================================================\n");
+    printf("      Camera is in change saturation mode       \n");
+    printf("==========================================================\n\n");
+
+    char saturationmenuNum = 'A';
+    for (i = 0; i < sizeof(camera_saturation_tbl) /
+                    sizeof(camera_saturation_tbl[0]); i++) {
+        printf("%c.  %s\n", saturationmenuNum,
+                            camera_saturation_tbl[i].saturation_name);
+        saturationmenuNum++;
+    }
+
+    printf("\nPlease enter your choice for Saturation Change: ");
+    return;
+}
+
+static void camera_preview_video_iso_change_tbl(void) {
+  unsigned int i;
+  printf("\n");
+  printf("==========================================================\n");
+  printf("      Camera is in ISO change mode       \n");
+  printf("==========================================================\n\n");
+
+  char submenuNum = 'A';
+  for (i = 0 ; i < sizeof(iso_tbl) /
+                   sizeof(iso_tbl[0]); i++) {
+        printf("%c.  %s\n", submenuNum, iso_tbl[i].iso_modes_name);
+        submenuNum++;
+  }
+  printf("\nPlease enter your choice for iso modes: ");
+  return;
+}
+
+static void camera_preview_video_sharpness_change_tbl(void) {
+  unsigned int i;
+  printf("\n");
+  printf("==========================================================\n");
+  printf("      Camera is in sharpness change mode       \n");
+  printf("==========================================================\n\n");
+
+  char submenuNum = 'A';
+  for (i = 0 ; i < sizeof(camera_sharpness_tbl) /
+                   sizeof(camera_sharpness_tbl[0]); i++) {
+        printf("%c.  %s\n", submenuNum, camera_sharpness_tbl[i].sharpness_name);
+        submenuNum++;
+  }
+  printf("\nPlease enter your choice for sharpness modes: ");
+  return;
+}
+
+static void camera_set_bestshot_tbl(void)
+{
+  unsigned int i;
+
+  printf("\n");
+  printf("===========================================\n");
+  printf("      Camera is in set besthot mode now       \n");
+  printf("===========================================\n\n");
+
+
+  char bsmenuNum = 'A';
+  for (i = 0; i < sizeof(bestshot_mode_tbl)/sizeof(bestshot_mode_tbl[0]); i++) {
+    printf("%c.  %s\n", bsmenuNum,
+      bestshot_mode_tbl[i].name);
+    bsmenuNum++;
+  }
+
+  printf("\nPlease enter your choice of Bestshot Mode: ");
+  return;
+}
+
+static void camera_set_flashmode_tbl(void)
+{
+  unsigned int i;
+
+  printf("\n");
+  printf("===========================================\n");
+  printf("      Camera is in set flash mode now       \n");
+  printf("===========================================\n\n");
+
+
+  char bsmenuNum = 'A';
+  for (i = 0; i < sizeof(flashmodes_tbl)/sizeof(flashmodes_tbl[0]); i++) {
+    printf("%c.  %s\n", bsmenuNum,
+      flashmodes_tbl[i].name);
+    bsmenuNum++;
+  }
+
+  printf("\nPlease enter your choice of Bestshot Mode: ");
+  return;
+}
+
+static void camera_sensors_tbl(void)
+{
+  unsigned int i;
+  size_t available_sensors = sizeof(sensor_tbl)/sizeof(sensor_tbl[0]);
+
+  printf("\n");
+  printf("===========================================\n");
+  printf("      Camera Sensor to be used:            \n");
+  printf("===========================================\n\n");
+
+
+  char bsmenuNum = 'A';
+  for (i = 0; ( i < available_sensors ) && ( sensor_tbl[i].present ) ; i++) {
+    printf("%c.  %s\n", bsmenuNum,
+            sensor_tbl[i].menu_name);
+    bsmenuNum++;
+  }
+
+  printf("\nPlease enter your choice for sensor: ");
+  return;
+}
+
+/*===========================================================================
+ * FUNCTION     - increase_contrast -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int increase_contrast (mm_camera_lib_handle *lib_handle) {
+        contrast += CAMERA_CONTRAST_STEP;
+        if (contrast > CAMERA_MAX_CONTRAST) {
+                contrast = CAMERA_MAX_CONTRAST;
+                printf("Reached max CONTRAST. \n");
+        }
+        printf("Increase Contrast to %d\n", contrast);
+        return mm_camera_lib_send_command(lib_handle,
+                                          MM_CAMERA_LIB_CONTRAST,
+                                          &contrast,
+                                          NULL);
+}
+
+/*===========================================================================
+ * FUNCTION     - decrease_contrast -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int decrease_contrast (mm_camera_lib_handle *lib_handle) {
+        contrast -= CAMERA_CONTRAST_STEP;
+        if (contrast < CAMERA_MIN_CONTRAST) {
+                contrast = CAMERA_MIN_CONTRAST;
+                printf("Reached min CONTRAST. \n");
+        }
+        printf("Decrease Contrast to %d\n", contrast);
+        return mm_camera_lib_send_command(lib_handle,
+                                          MM_CAMERA_LIB_CONTRAST,
+                                          &contrast,
+                                          NULL);
+}
+
+/*===========================================================================
+ * FUNCTION     - decrease_brightness -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int decrease_brightness (mm_camera_lib_handle *lib_handle) {
+        brightness -= CAMERA_BRIGHTNESS_STEP;
+        if (brightness < CAMERA_MIN_BRIGHTNESS) {
+                brightness = CAMERA_MIN_BRIGHTNESS;
+                printf("Reached min BRIGHTNESS. \n");
+        }
+        printf("Decrease Brightness to %d\n", brightness);
+        return mm_camera_lib_send_command(lib_handle,
+                                          MM_CAMERA_LIB_BRIGHTNESS,
+                                          &brightness,
+                                          NULL);
+}
+
+/*===========================================================================
+ * FUNCTION     - increase_brightness -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int increase_brightness (mm_camera_lib_handle *lib_handle) {
+        brightness += CAMERA_BRIGHTNESS_STEP;
+        if (brightness > CAMERA_MAX_BRIGHTNESS) {
+                brightness = CAMERA_MAX_BRIGHTNESS;
+                printf("Reached max BRIGHTNESS. \n");
+        }
+        printf("Increase Brightness to %d\n", brightness);
+        return mm_camera_lib_send_command(lib_handle,
+                                          MM_CAMERA_LIB_BRIGHTNESS,
+                                          &brightness,
+                                          NULL);
+}
+
+/*===========================================================================
+ * FUNCTION     - increase_EV -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+
+int increase_EV (void) {
+#if 0
+   int rc = 0;
+   int32_t value = 0;
+   rc = cam_config_is_parm_supported(cam_id, MM_CAMERA_PARM_EXPOSURE_COMPENSATION);
+    if(!rc) {
+       printf("MM_CAMERA_PARM_EXPOSURE_COMPENSATION mode is not supported for this sensor");
+       return -1;
+    }
+    ev_numerator += 1;
+    if(ev_numerator >= EXPOSURE_COMPENSATION_MINIMUM_NUMERATOR &&
+            ev_numerator <= EXPOSURE_COMPENSATION_MAXIMUM_NUMERATOR){
+        int16_t  numerator16 = (int16_t)(ev_numerator & 0x0000ffff);
+        uint16_t denominator16 = EXPOSURE_COMPENSATION_DENOMINATOR;
+        value = numerator16 << 16 | denominator16;
+    } else {
+       printf("Reached max EV.\n");
+    }
+    return mm_app_set_config_parm(cam_id, MM_CAMERA_PARM_EXPOSURE_COMPENSATION, value);
+#endif
+  return 0;
+}
+
+/*===========================================================================
+ * FUNCTION     - decrease_EV -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int decrease_EV (void) {
+#if 0
+   int rc = 0;
+   int32_t  value = 0;
+   rc = cam_config_is_parm_supported(cam_id, MM_CAMERA_PARM_EXPOSURE_COMPENSATION);
+    if(!rc) {
+       printf("MM_CAMERA_PARM_EXPOSURE_COMPENSATION mode is not supported for this sensor");
+       return -1;
+    }
+    ev_numerator -= 1;
+    if(ev_numerator >= EXPOSURE_COMPENSATION_MINIMUM_NUMERATOR &&
+            ev_numerator <= EXPOSURE_COMPENSATION_MAXIMUM_NUMERATOR){
+        int16_t  numerator16 = (int16_t)(ev_numerator & 0x0000ffff);
+        uint16_t denominator16 = EXPOSURE_COMPENSATION_DENOMINATOR;
+        value = numerator16 << 16 | denominator16;
+    } else {
+       printf("Reached min EV.\n");
+    }
+    return mm_app_set_config_parm(cam_id, MM_CAMERA_PARM_EXPOSURE_COMPENSATION, value);
+#endif
+  return 0;
+}
+
+/*===========================================================================
+ * FUNCTION     - increase_saturation -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int increase_saturation (mm_camera_lib_handle *lib_handle) {
+#if 0
+  saturation += CAMERA_SATURATION_STEP;
+  if (saturation > CAMERA_MAX_SATURATION) {
+    saturation = CAMERA_MAX_SATURATION;
+    printf("Reached max saturation. \n");
+  }
+  printf("Increase Saturation to %d\n", saturation);
+  return mm_app_set_config_parm(cam_id, MM_CAMERA_PARM_SATURATION, saturation);
+#endif
+  saturation += CAMERA_SATURATION_STEP;
+  if (saturation > CAMERA_MAX_SATURATION) {
+    saturation = CAMERA_MAX_SATURATION;
+    printf("Reached max saturation. \n");
+  }
+  printf("Increase saturation to %d\n", contrast);
+  return mm_camera_lib_send_command(lib_handle,
+                                       MM_CAMERA_LIB_SATURATION,
+                                       &saturation,
+                                       NULL);
+}
+
+/*===========================================================================
+ * FUNCTION     - decrease_saturation -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int decrease_saturation (mm_camera_lib_handle *lib_handle) {
+#if 0
+  saturation -= CAMERA_SATURATION_STEP;
+  if (saturation < CAMERA_MIN_SATURATION) {
+    saturation = CAMERA_MIN_SATURATION;
+    printf("Reached min saturation. \n");
+  }
+  printf("Dcrease Saturation to %d\n", saturation);
+  return mm_app_set_config_parm(cam_id, MM_CAMERA_PARM_SATURATION, saturation);
+#endif
+  saturation -= CAMERA_SATURATION_STEP;
+  if (saturation < CAMERA_MIN_SATURATION) {
+    saturation = CAMERA_MIN_SATURATION;
+    printf("Reached min saturation. \n");
+  }
+  printf("decrease saturation to %d\n", contrast);
+  return mm_camera_lib_send_command(lib_handle,
+                                       MM_CAMERA_LIB_SATURATION,
+                                       &saturation,
+                                       NULL);
+}
+
+
+int take_jpeg_snapshot(mm_camera_test_obj_t *test_obj, int is_burst_mode)
+{
+  LOGH("\nEnter take_jpeg_snapshot!!\n");
+  int rc = mm_app_take_picture (test_obj, (uint8_t)is_burst_mode);
+  if (MM_CAMERA_OK != rc) {
+    LOGE(" mm_app_take_picture() err=%d\n",  rc);
+  }
+  return rc;
+}
+
+/*===========================================================================
+ * FUNCTION    - main -
+ *
+ * DESCRIPTION:
+ *==========================================================================*/
+int main()
+{
+    char tc_buf[3];
+    int mode = 0;
+    int rc = 0;
+
+    printf("Please Select Execution Mode:\n");
+    printf("0: Menu Based 1: Regression\n");
+    fgets(tc_buf, 3, stdin);
+    mode = tc_buf[0] - '0';
+    if(mode == 0) {
+      printf("\nStarting Menu based!!\n");
+    } else if(mode == 1) {
+      printf("Starting Regression testing!!\n");
+      if(!mm_app_start_regression_test(1)) {
+         printf("\nRegressiion test passed!!\n");
+         return 0;
+      } else {
+        printf("\nRegression test failed!!\n");
+        exit(-1);
+      }
+    } else {
+       printf("\nPlease Enter 0 or 1\n");
+       printf("\nExisting the App!!\n");
+       exit(-1);
+    }
+
+
+    rc = submain();
+
+    printf("Exiting application\n");
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION     - set_whitebalance -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int set_whitebalance (mm_camera_lib_handle *lib_handle, int wb_action_param) {
+        cam_wb_mode_type type = 0;
+        switch (wb_action_param) {
+                case WB_AUTO:
+                        printf("\n WB_AUTO\n");
+                        type = CAM_WB_MODE_AUTO;
+                        break;
+                case WB_INCANDESCENT:
+                        printf("\n WB_INCANDESCENT\n");
+                        type = CAM_WB_MODE_INCANDESCENT;
+                        break;
+                case WB_FLUORESCENT:
+                        printf("\n WB_FLUORESCENT\n");
+                        type = CAM_WB_MODE_FLUORESCENT;
+                        break;
+                case WB_WARM_FLUORESCENT:
+                        printf("\n WB_WARM_FLUORESCENT\n");
+                        type = CAM_WB_MODE_WARM_FLUORESCENT;
+                        break;
+                case WB_DAYLIGHT:
+                        printf("\n WB_DAYLIGHT\n");
+                        type = CAM_WB_MODE_DAYLIGHT;
+                        break;
+                case WB_CLOUDY_DAYLIGHT:
+                        printf("\n WB_CLOUDY_DAYLIGHT\n");
+                        type = CAM_WB_MODE_CLOUDY_DAYLIGHT;
+                        break;
+               case WB_TWILIGHT:
+                        printf("\n WB_TWILIGHT\n");
+                        type = CAM_WB_MODE_TWILIGHT;
+                        break;
+               case WB_SHADE:
+                        printf("\n WB_SHADE\n");
+                        type = CAM_WB_MODE_SHADE;
+                        break;
+                default:
+                        break;
+        }
+        return mm_camera_lib_send_command(lib_handle,
+                                          MM_CAMERA_LIB_WB,
+                                          &type,
+                                          NULL);
+}
+
+
+/*===========================================================================
+ * FUNCTION     - set_exp_metering -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int set_exp_metering (mm_camera_lib_handle *lib_handle, int exp_metering_action_param) {
+        cam_auto_exposure_mode_type type = 0;
+        switch (exp_metering_action_param) {
+		case AUTO_EXP_FRAME_AVG:
+                        printf("\nAUTO_EXP_FRAME_AVG\n");
+                        type = CAM_AEC_MODE_FRAME_AVERAGE;
+                        break;
+                case AUTO_EXP_CENTER_WEIGHTED:
+                        printf("\n AUTO_EXP_CENTER_WEIGHTED\n");
+                        type = CAM_AEC_MODE_CENTER_WEIGHTED;
+                        break;
+                case AUTO_EXP_SPOT_METERING:
+                        printf("\n AUTO_EXP_SPOT_METERING\n");
+                        type = CAM_AEC_MODE_SPOT_METERING;
+                        break;
+                case AUTO_EXP_SMART_METERING:
+                        printf("\n AUTO_EXP_SMART_METERING\n");
+                        type = CAM_AEC_MODE_SMART_METERING;
+                        break;
+                case AUTO_EXP_USER_METERING:
+                        printf("\n AUTO_EXP_USER_METERING\n");
+                        type = CAM_AEC_MODE_USER_METERING;
+                        break;
+                case AUTO_EXP_SPOT_METERING_ADV:
+                        printf("\n AUTO_EXP_SPOT_METERING_ADV\n");
+                        type = CAM_AEC_MODE_SPOT_METERING_ADV;
+                        break;
+                case AUTO_EXP_CENTER_WEIGHTED_ADV:
+                        printf("\n AUTO_EXP_CENTER_WEIGHTED_ADV\n");
+                        type = CAM_AEC_MODE_CENTER_WEIGHTED_ADV;
+                        break;
+                default:
+                        break;
+        }
+        return mm_camera_lib_send_command(lib_handle,
+                                          MM_CAMERA_LIB_EXPOSURE_METERING,
+                                          &type,
+                                          NULL);
+}
+
+int get_ctrl_value (int ctrl_value_mode_param){
+#if 0
+    int rc = 0;
+    struct v4l2_control ctrl;
+
+    if (ctrl_value_mode_param == WHITE_BALANCE_STATE) {
+        printf("You chose WHITE_BALANCE_STATE\n");
+        ctrl.id = V4L2_CID_AUTO_WHITE_BALANCE;
+    }
+    else if (ctrl_value_mode_param == WHITE_BALANCE_TEMPERATURE) {
+        printf("You chose WHITE_BALANCE_TEMPERATURE\n");
+        ctrl.id = V4L2_CID_WHITE_BALANCE_TEMPERATURE;
+    }
+    else if (ctrl_value_mode_param == BRIGHTNESS_CTRL) {
+        printf("You chose brightness value\n");
+        ctrl.id = V4L2_CID_BRIGHTNESS;
+    }
+    else if (ctrl_value_mode_param == EV) {
+        printf("You chose exposure value\n");
+        ctrl.id = V4L2_CID_EXPOSURE;
+    }
+    else if (ctrl_value_mode_param == CONTRAST_CTRL) {
+        printf("You chose contrast value\n");
+        ctrl.id = V4L2_CID_CONTRAST;
+    }
+    else if (ctrl_value_mode_param == SATURATION_CTRL) {
+        printf("You chose saturation value\n");
+        ctrl.id = V4L2_CID_SATURATION;
+    } else if (ctrl_value_mode_param == SHARPNESS_CTRL) {
+        printf("You chose sharpness value\n");
+        ctrl.id = V4L2_CID_SHARPNESS;
+    }
+
+  //  rc = ioctl(camfd, VIDIOC_G_CTRL, &ctrl);
+    return rc;
+#endif
+  return ctrl_value_mode_param;
+}
+
+/*===========================================================================
+ * FUNCTION     - toggle_afr -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int toggle_afr () {
+#if 0
+    if (fps_mode == FPS_MODE_AUTO) {
+        printf("\nSetting FPS_MODE_FIXED\n");
+        fps_mode = FPS_MODE_FIXED;
+    } else {
+        printf("\nSetting FPS_MODE_AUTO\n");
+        fps_mode = FPS_MODE_AUTO;
+    }
+    return mm_app_set_config_parm(cam_id, MM_CAMERA_PARM_FPS_MODE, fps_mode);
+#endif
+  return 0;
+}
+
+int set_zoom (mm_camera_lib_handle *lib_handle, int zoom_action_param) {
+
+    if (zoom_action_param == ZOOM_IN) {
+        zoom_level += ZOOM_STEP;
+        if (zoom_level > zoom_max_value)
+            zoom_level = zoom_max_value;
+    } else if (zoom_action_param == ZOOM_OUT) {
+        zoom_level -= ZOOM_STEP;
+        if (zoom_level < ZOOM_MIN_VALUE)
+            zoom_level = ZOOM_MIN_VALUE;
+    } else {
+        LOGD(" Invalid zoom_action_param value\n");
+        return -EINVAL;
+    }
+    return mm_camera_lib_send_command(lib_handle,
+                                      MM_CAMERA_LIB_ZOOM,
+                                      &zoom_level,
+                                      NULL);
+}
+
+/*===========================================================================
+ * FUNCTION     - set_iso -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int set_iso (mm_camera_lib_handle *lib_handle, int iso_action_param) {
+    cam_iso_mode_type type = 0;
+    switch (iso_action_param) {
+        case ISO_AUTO:
+            printf("\n ISO_AUTO\n");
+            type = CAM_ISO_MODE_AUTO;
+            break;
+        case ISO_DEBLUR:
+            printf("\n ISO_DEBLUR\n");
+            type = CAM_ISO_MODE_DEBLUR;
+            break;
+        case ISO_100:
+            printf("\n ISO_100\n");
+            type = CAM_ISO_MODE_100;
+            break;
+        case ISO_200:
+            printf("\n ISO_200\n");
+            type = CAM_ISO_MODE_200;
+            break;
+        case ISO_400:
+            printf("\n ISO_400\n");
+            type = CAM_ISO_MODE_400;
+            break;
+        case ISO_800:
+            printf("\n ISO_800\n");
+            type = CAM_ISO_MODE_800;
+            break;
+        case ISO_1600:
+            printf("\n ISO_1600\n");
+            type = CAM_ISO_MODE_1600;
+            break;
+        default:
+            break;
+    }
+    return mm_camera_lib_send_command(lib_handle,
+                                      MM_CAMERA_LIB_ISO,
+                                      &type,
+                                      NULL);
+}
+
+/*===========================================================================
+ * FUNCTION     - increase_sharpness -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int increase_sharpness (mm_camera_lib_handle *lib_handle) {
+    sharpness += CAMERA_SHARPNESS_STEP;
+    if (sharpness > CAMERA_MAX_SHARPNESS) {
+        sharpness = CAMERA_MAX_SHARPNESS;
+        printf("Reached max SHARPNESS. \n");
+    }
+    printf("Increase Sharpness to %d\n", sharpness);
+    return mm_camera_lib_send_command(lib_handle,
+                                      MM_CAMERA_LIB_SHARPNESS,
+                                      &sharpness,
+                                      NULL);
+}
+
+/*===========================================================================
+ * FUNCTION     - decrease_sharpness -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int decrease_sharpness (mm_camera_lib_handle *lib_handle) {
+    sharpness -= CAMERA_SHARPNESS_STEP;
+    if (sharpness < CAMERA_MIN_SHARPNESS) {
+        sharpness = CAMERA_MIN_SHARPNESS;
+        printf("Reached min SHARPNESS. \n");
+    }
+    printf("Decrease Sharpness to %d\n", sharpness);
+    return mm_camera_lib_send_command(lib_handle,
+                                      MM_CAMERA_LIB_SHARPNESS,
+                                      &sharpness,
+                                      NULL);
+}
+
+int set_flash_mode (mm_camera_lib_handle *lib_handle, int action_param) {
+    cam_flash_mode_t type = 0;
+    switch (action_param) {
+        case FLASH_MODE_OFF:
+            printf("\n FLASH_MODE_OFF\n");
+            type = CAM_FLASH_MODE_OFF;
+            break;
+        case FLASH_MODE_AUTO:
+            printf("\n FLASH_MODE_AUTO\n");
+            type = CAM_FLASH_MODE_AUTO;
+            break;
+        case FLASH_MODE_ON:
+            printf("\n FLASH_MODE_ON\n");
+            type = CAM_FLASH_MODE_ON;
+            break;
+        case FLASH_MODE_TORCH:
+            printf("\n FLASH_MODE_TORCH\n");
+            type = CAM_FLASH_MODE_TORCH;
+            break;
+        default:
+            break;
+    }
+    return mm_camera_lib_send_command(lib_handle,
+                                      MM_CAMERA_LIB_FLASH,
+                                      &type,
+                                      NULL);
+}
+
+int set_bestshot_mode(mm_camera_lib_handle *lib_handle, int action_param) {
+    cam_scene_mode_type type = 0;
+    switch (action_param) {
+       case BESTSHOT_AUTO:
+            printf("\n BEST SHOT AUTO\n");
+            type = CAM_SCENE_MODE_OFF;
+            break;
+        case BESTSHOT_ACTION:
+            printf("\n BEST SHOT ACTION\n");
+            type = CAM_SCENE_MODE_ACTION;
+            break;
+        case BESTSHOT_PORTRAIT:
+            printf("\n BEST SHOT PORTRAIT\n");
+            type = CAM_SCENE_MODE_PORTRAIT;
+            break;
+        case BESTSHOT_LANDSCAPE:
+            printf("\n BEST SHOT LANDSCAPE\n");
+            type = CAM_SCENE_MODE_LANDSCAPE;
+            break;
+        case BESTSHOT_NIGHT:
+            printf("\n BEST SHOT NIGHT\n");
+            type = CAM_SCENE_MODE_NIGHT;
+            break;
+        case BESTSHOT_NIGHT_PORTRAIT:
+            printf("\n BEST SHOT NIGHT PORTRAIT\n");
+            type = CAM_SCENE_MODE_NIGHT_PORTRAIT;
+            break;
+        case BESTSHOT_THEATRE:
+            printf("\n BEST SHOT THREATRE\n");
+            type = CAM_SCENE_MODE_THEATRE;
+            break;
+        case BESTSHOT_BEACH:
+            printf("\n BEST SHOT BEACH\n");
+            type = CAM_SCENE_MODE_BEACH;
+            break;
+        case BESTSHOT_SNOW:
+            printf("\n BEST SHOT SNOW\n");
+            type = CAM_SCENE_MODE_SNOW;
+            break;
+        case BESTSHOT_SUNSET:
+            printf("\n BEST SHOT SUNSET\n");
+            type = CAM_SCENE_MODE_SUNSET;
+            break;
+        case BESTSHOT_ANTISHAKE:
+            printf("\n BEST SHOT ANTISHAKE\n");
+            type = CAM_SCENE_MODE_ANTISHAKE;
+            break;
+        case BESTSHOT_FIREWORKS:
+            printf("\n BEST SHOT FIREWORKS\n");
+            type = CAM_SCENE_MODE_FIREWORKS;
+            break;
+        case BESTSHOT_SPORTS:
+            printf("\n BEST SHOT SPORTS\n");
+            type = CAM_SCENE_MODE_SPORTS;
+            break;
+        case BESTSHOT_PARTY:
+            printf("\n BEST SHOT PARTY\n");
+            type = CAM_SCENE_MODE_PARTY;
+            break;
+        case BESTSHOT_CANDLELIGHT:
+            printf("\n BEST SHOT CANDLELIGHT\n");
+            type = CAM_SCENE_MODE_CANDLELIGHT;
+            break;
+        case BESTSHOT_ASD:
+            printf("\n BEST SHOT ASD\n");
+            type = CAM_SCENE_MODE_AUTO;
+            break;
+        case BESTSHOT_BACKLIGHT:
+            printf("\n BEST SHOT BACKLIGHT\n");
+            type = CAM_SCENE_MODE_BACKLIGHT;
+            break;
+        case BESTSHOT_FLOWERS:
+            printf("\n BEST SHOT FLOWERS\n");
+            type = CAM_SCENE_MODE_FLOWERS;
+            break;
+        case BESTSHOT_AR:
+            printf("\n BEST SHOT AR\n");
+            type = CAM_SCENE_MODE_AR;
+            break;
+        case BESTSHOT_HDR:
+            printf("\n BEST SHOT HDR\n");
+            type = CAM_SCENE_MODE_OFF;
+            break;
+        default:
+            break;
+        }
+        return mm_camera_lib_send_command(lib_handle,
+                                          MM_CAMERA_LIB_BESTSHOT,
+                                          &type,
+                                          NULL);
+}
+/*===========================================================================
+ * FUNCTION     - print_current_menu -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int print_current_menu (menu_id_change_t current_menu_id) {
+  if (current_menu_id == MENU_ID_MAIN) {
+    print_menu_preview_video ();
+  } else if (current_menu_id == MENU_ID_WHITEBALANCECHANGE) {
+    camera_preview_video_wb_change_tbl();
+  } else if (current_menu_id == MENU_ID_EXPMETERINGCHANGE) {
+    camera_preview_video_exp_metering_change_tbl();
+  } else if (current_menu_id == MENU_ID_GET_CTRL_VALUE) {
+    camera_preview_video_get_ctrl_value_tbl();
+  } else if (current_menu_id == MENU_ID_ISOCHANGE) {
+    camera_preview_video_iso_change_tbl();
+  } else if (current_menu_id == MENU_ID_BRIGHTNESSCHANGE) {
+    camera_brightness_change_tbl ();
+  } else if (current_menu_id == MENU_ID_CONTRASTCHANGE) {
+    camera_contrast_change_tbl ();
+  } else if (current_menu_id == MENU_ID_EVCHANGE) {
+    camera_EV_change_tbl ();
+  } else if (current_menu_id == MENU_ID_SATURATIONCHANGE) {
+    camera_saturation_change_tbl ();
+  } else if (current_menu_id == MENU_ID_ZOOMCHANGE) {
+    camera_preview_video_zoom_change_tbl();
+  } else if (current_menu_id == MENU_ID_SHARPNESSCHANGE) {
+    camera_preview_video_sharpness_change_tbl();
+  } else if (current_menu_id == MENU_ID_BESTSHOT) {
+    camera_set_bestshot_tbl();
+  } else if (current_menu_id == MENU_ID_FLASHMODE) {
+    camera_set_flashmode_tbl();
+  } else if (current_menu_id == MENU_ID_SENSORS ) {
+    camera_sensors_tbl();
+  } else if (current_menu_id == MENU_ID_SWITCH_RES ) {
+    camera_resolution_change_tbl();
+  }
+
+  return 0;
+}
+
+int filter_resolutions(mm_camera_lib_handle *lib_handle,
+                       DIMENSION_TBL_T *tbl,
+                       size_t tbl_size)
+{
+    size_t i, j;
+    cam_capability_t camera_cap;
+    int rc = 0;
+
+    if ( ( NULL == lib_handle ) || ( NULL == tbl ) ) {
+        return -1;
+    }
+
+    rc = mm_camera_lib_get_caps(lib_handle, &camera_cap);
+    if ( MM_CAMERA_OK != rc ) {
+        LOGE("mm_camera_lib_get_caps() err=%d\n",  rc);
+        return -1;
+    }
+
+    for( i = 0 ; i < tbl_size ; i++ ) {
+        for( j = 0; j < camera_cap.picture_sizes_tbl_cnt; j++ ) {
+            if ( ( tbl[i].width == camera_cap.picture_sizes_tbl[j].width ) &&
+                 ( tbl[i].height == camera_cap.picture_sizes_tbl[j].height ) ) {
+                tbl[i].supported = 1;
+                rc = (int)i;
+                break;
+            }
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : enableAFR
+ *
+ * DESCRIPTION: This function will go through the list
+ *              of supported FPS ranges and select the
+ *              one which has maximum range
+ *
+ * PARAMETERS :
+ *   @lib_handle   : camera test library handle
+ *
+ * RETURN     : uint32_t type of stream handle
+ *              MM_CAMERA_OK  -- Success
+ *              !=MM_CAMERA_OK -- Error status
+ *==========================================================================*/
+int enableAFR(mm_camera_lib_handle *lib_handle)
+{
+    size_t i, j;
+    float max_range = 0.0f;
+    cam_capability_t cap;
+    int rc = MM_CAMERA_OK;
+
+    if ( NULL == lib_handle ) {
+        return MM_CAMERA_E_INVALID_INPUT;
+    }
+
+    rc = mm_camera_lib_get_caps(lib_handle, &cap);
+    if ( MM_CAMERA_OK != rc ) {
+        LOGE("mm_camera_lib_get_caps() err=%d\n",  rc);
+        return rc;
+    }
+
+    for( i = 0, j = 0 ; i < cap.fps_ranges_tbl_cnt ; i++ ) {
+        if ( max_range < (cap.fps_ranges_tbl[i].max_fps - cap.fps_ranges_tbl[i].min_fps) ) {
+            j = i;
+        }
+    }
+
+    rc = mm_camera_lib_send_command(lib_handle,
+                                    MM_CAMERA_LIB_FPS_RANGE,
+                                    &cap.fps_ranges_tbl[j],
+                                    NULL);
+
+    LOGE("FPS range [%5.2f:%5.2f] rc = %d",
+              cap.fps_ranges_tbl[j].min_fps,
+              cap.fps_ranges_tbl[j].max_fps,
+              rc);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION     - submain -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+static int submain()
+{
+    int rc = 0;
+    char tc_buf[3];
+    menu_id_change_t current_menu_id = MENU_ID_MAIN, next_menu_id;
+    camera_action_t action_id;
+    int action_param;
+    uint8_t previewing = 0;
+    int isZSL = 0;
+    uint8_t wnr_enabled = 0;
+    mm_camera_lib_handle lib_handle;
+    int num_cameras;
+    int available_sensors =
+        (int)(sizeof(sensor_tbl) / sizeof(sensor_tbl[0]));
+    int available_snap_sizes =
+        (int)(sizeof(dimension_tbl)/sizeof(dimension_tbl[0]));
+    int i,c;
+    mm_camera_lib_snapshot_params snap_dim;
+    snap_dim.width = DEFAULT_SNAPSHOT_WIDTH;
+    snap_dim.height = DEFAULT_SNAPSHOT_HEIGHT;
+    cam_scene_mode_type default_scene= CAM_SCENE_MODE_OFF;
+    int set_tintless= 0;
+
+    mm_camera_test_obj_t test_obj;
+    memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+
+    rc = mm_camera_lib_open(&lib_handle, 0);
+    if (rc != MM_CAMERA_OK) {
+        LOGE("mm_camera_lib_open() err=%d\n",  rc);
+        return -1;
+    }
+
+    num_cameras = mm_camera_lib_number_of_cameras(&lib_handle);
+    if ( 0 >= num_cameras ) {
+        LOGE(" No camera sensors reported!");
+        rc = -1;
+        goto ERROR;
+    } else if ( 1 <= num_cameras ) {
+        c = MIN(num_cameras, available_sensors);
+        for ( i = 0 ; i < c ; i++ ) {
+            sensor_tbl[i].present = 1;
+        }
+        current_menu_id = MENU_ID_SENSORS;
+    } else {
+        i = filter_resolutions(&lib_handle,
+                                dimension_tbl,
+                                (size_t)available_snap_sizes);
+        if ( ( i < 0 ) || ( i >= available_snap_sizes ) ) {
+            LOGE("filter_resolutions()\n");
+            goto ERROR;
+        }
+        snap_dim.width = dimension_tbl[i].width;
+        snap_dim.height = dimension_tbl[i].height;
+
+        rc = enableAFR(&lib_handle);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("enableAFR() err=%d\n",  rc);
+            goto ERROR;
+        }
+
+        rc =  mm_camera_lib_send_command(&lib_handle,
+                                         MM_CAMERA_LIB_BESTSHOT,
+                                         &default_scene,
+                                         NULL);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_camera_lib_send_command() err=%d\n",  rc);
+            goto ERROR;
+        }
+    }
+    /*start the eztune server*/
+    LOGH("Starting eztune Server \n");
+    eztune_server_start(&lib_handle);
+
+    do {
+        print_current_menu (current_menu_id);
+        fgets(tc_buf, 3, stdin);
+
+        next_menu_id = next_menu(current_menu_id, tc_buf[0], & action_id, & action_param);
+
+        if (next_menu_id != MENU_ID_INVALID) {
+          current_menu_id = next_menu_id;
+        }
+        if (action_id == ACTION_NO_ACTION) {
+          continue;
+        }
+
+        switch(action_id) {
+            case ACTION_START_PREVIEW:
+                LOGE("ACTION_START_PREVIEW \n");
+                rc = mm_camera_lib_start_stream(&lib_handle);
+                if (rc != MM_CAMERA_OK) {
+                    LOGE("mm_camera_lib_start_stream() err=%d\n",  rc);
+                    goto ERROR;
+                }
+                previewing = 1;
+                break;
+
+            case ACTION_STOP_PREVIEW:
+                LOGD("ACTION_STOP_PREVIEW \n");
+                rc = mm_camera_lib_stop_stream(&lib_handle);
+                if (rc != MM_CAMERA_OK) {
+                    LOGE("mm_camera_lib_stop_stream() err=%d\n",  rc);
+                    goto ERROR;
+                }
+                previewing = 0;
+                break;
+
+            case ACTION_SET_WHITE_BALANCE:
+                LOGD("Selection for the White Balance changes\n");
+                set_whitebalance(&lib_handle, action_param);
+                break;
+
+            case ACTION_SET_TINTLESS_ENABLE:
+                LOGD("Selection for the Tintless enable changes\n");
+                set_tintless = 1;
+                rc =  mm_camera_lib_send_command(&lib_handle,
+                                                 MM_CAMERA_LIB_SET_TINTLESS,
+                                                 &set_tintless,
+                                                 NULL);
+                if (rc != MM_CAMERA_OK) {
+                    LOGE("mm_camera_lib_send_command() err=%d\n",  rc);
+                    goto ERROR;
+                }
+                break;
+
+            case ACTION_SET_TINTLESS_DISABLE:
+                LOGD("Selection for the Tintless disable changes\n");
+                set_tintless = 0;
+                rc =  mm_camera_lib_send_command(&lib_handle,
+                                                 MM_CAMERA_LIB_SET_TINTLESS,
+                                                 &set_tintless,
+                                                 NULL);
+                if (rc != MM_CAMERA_OK) {
+                    LOGE("mm_camera_lib_send_command() err=%d\n",  rc);
+                    goto ERROR;
+                }
+                break;
+
+            case ACTION_SET_EXP_METERING:
+                LOGD("Selection for the Exposure Metering changes\n");
+                set_exp_metering(&lib_handle, action_param);
+                break;
+
+            case ACTION_GET_CTRL_VALUE:
+                LOGD("Selection for getting control value\n");
+                get_ctrl_value(action_param);
+                break;
+
+            case ACTION_BRIGHTNESS_INCREASE:
+                printf("Increase brightness\n");
+                increase_brightness(&lib_handle);
+                break;
+
+            case ACTION_BRIGHTNESS_DECREASE:
+                printf("Decrease brightness\n");
+                decrease_brightness(&lib_handle);
+                break;
+
+            case ACTION_CONTRAST_INCREASE:
+                LOGD("Selection for the contrast increase\n");
+                increase_contrast (&lib_handle);
+                break;
+
+            case ACTION_CONTRAST_DECREASE:
+                LOGD("Selection for the contrast decrease\n");
+                decrease_contrast (&lib_handle);
+                break;
+
+            case ACTION_EV_INCREASE:
+                LOGD("Selection for the EV increase\n");
+                increase_EV ();
+                break;
+
+            case ACTION_EV_DECREASE:
+                LOGD("Selection for the EV decrease\n");
+                decrease_EV ();
+                break;
+
+            case ACTION_SATURATION_INCREASE:
+                LOGD("Selection for the EV increase\n");
+                increase_saturation (&lib_handle);
+                break;
+
+            case ACTION_SATURATION_DECREASE:
+                LOGD("Selection for the EV decrease\n");
+                decrease_saturation (&lib_handle);
+                break;
+
+            case ACTION_TOGGLE_AFR:
+                LOGD("Select for auto frame rate toggling\n");
+                toggle_afr();
+                break;
+
+            case ACTION_SET_ISO:
+                LOGD("Select for ISO changes\n");
+                set_iso(&lib_handle, action_param);
+                break;
+
+            case ACTION_SET_ZOOM:
+                LOGD("Selection for the zoom direction changes\n");
+                set_zoom(&lib_handle, action_param);
+                break;
+
+            case ACTION_SHARPNESS_INCREASE:
+                LOGD("Selection for sharpness increase\n");
+                increase_sharpness(&lib_handle);
+                break;
+
+            case ACTION_SHARPNESS_DECREASE:
+                LOGD("Selection for sharpness decrease\n");
+                decrease_sharpness(&lib_handle);
+                break;
+
+            case ACTION_SET_BESTSHOT_MODE:
+                LOGD("Selection for bestshot\n");
+                set_bestshot_mode(&lib_handle, action_param);
+                break;
+
+            case ACTION_SET_FLASH_MODE:
+                printf("\n Selection for flashmode\n");
+                set_flash_mode(&lib_handle, action_param);
+                break;
+
+            case ACTION_SWITCH_CAMERA:
+                rc = mm_camera_lib_close(&lib_handle);
+                if (rc != MM_CAMERA_OK) {
+                    LOGE("mm_camera_lib_close() err=%d\n",  rc);
+                    goto ERROR;
+                }
+
+                rc = mm_camera_lib_open(&lib_handle, action_param);
+                if (rc != MM_CAMERA_OK) {
+                    LOGE("mm_camera_lib_open() err=%d\n",  rc);
+                    goto ERROR;
+                }
+
+                i = filter_resolutions(&lib_handle,
+                                        dimension_tbl,
+                                        sizeof(dimension_tbl)/sizeof(dimension_tbl[0]));
+                if ( ( i < 0 ) || ( i >=  available_snap_sizes ) ) {
+                    LOGE("filter_resolutions()\n");
+                    goto ERROR;
+                }
+                snap_dim.width = dimension_tbl[i].width;
+                snap_dim.height = dimension_tbl[i].height;
+
+                rc = enableAFR(&lib_handle);
+                if (rc != MM_CAMERA_OK) {
+                    LOGE("enableAFR() err=%d\n",  rc);
+                    goto ERROR;
+                }
+
+                rc =  mm_camera_lib_send_command(&lib_handle,
+                                                 MM_CAMERA_LIB_BESTSHOT,
+                                                 &default_scene,
+                                                 NULL);
+                if (rc != MM_CAMERA_OK) {
+                    LOGE("mm_camera_lib_send_command() err=%d\n",  rc);
+                    goto ERROR;
+                }
+                break;
+
+            case ACTION_TOGGLE_ZSL:
+                printf("ZSL Toggle !!!\n");
+                isZSL = !isZSL;
+                if ( isZSL ) {
+                    printf("ZSL on !!!\n");
+                } else {
+                    printf("ZSL off !!!\n");
+                }
+                rc = mm_camera_lib_send_command(&lib_handle,
+                                                MM_CAMERA_LIB_ZSL_ENABLE,
+                                                &isZSL,
+                                                NULL);
+                if (rc != MM_CAMERA_OK) {
+                    LOGE("mm_camera_lib_send_command() err=%d\n",  rc);
+                    goto ERROR;
+                }
+                break;
+
+            case ACTION_TAKE_RAW_SNAPSHOT:
+                LOGH("\n Take RAW snapshot\n");
+
+                rc = mm_camera_lib_send_command(&lib_handle,
+                                                MM_CAMERA_LIB_DO_AF,
+                                                NULL,
+                                                NULL);
+
+                if (rc != MM_CAMERA_OK) {
+                    LOGE("mm_camera_lib_send_command() err=%d\n",  rc);
+                    goto ERROR;
+                }
+
+                rc = mm_camera_lib_send_command(&lib_handle,
+                                                MM_CAMERA_LIB_RAW_CAPTURE,
+                                                NULL,
+                                                NULL);
+                if (rc != MM_CAMERA_OK) {
+                    LOGE("mm_camera_lib_send_command() err=%d\n",  rc);
+                    goto ERROR;
+                }
+                break;
+
+            case ACTION_TAKE_JPEG_SNAPSHOT:
+                LOGH("\n Take JPEG snapshot\n");
+
+                rc = mm_camera_lib_send_command(&lib_handle,
+                                                MM_CAMERA_LIB_JPEG_CAPTURE,
+                                                &snap_dim,
+                                                NULL);
+                if (rc != MM_CAMERA_OK) {
+                    LOGE("mm_camera_lib_send_command() err=%d\n",  rc);
+                    goto ERROR;
+                }
+                break;
+            case ACTION_SWITCH_RESOLUTION:
+                printf("\n Switch snapshot resolution to %dx%d\n",
+                       dimension_tbl[action_param].width,
+                       dimension_tbl[action_param].height);
+                snap_dim.width = dimension_tbl[action_param].width;
+                snap_dim.height = dimension_tbl[action_param].height;
+                break;
+
+      case ACTION_START_RECORDING:
+        LOGD("Start recording action\n");
+#if 0
+        if (mm_app_start_video(cam_id) < 0)
+          goto ERROR;
+        is_rec = 1;
+#endif
+        break;
+      case ACTION_STOP_RECORDING:
+        LOGD("Stop recording action\n");
+#if 0
+        if(is_rec) {
+          if (mm_app_stop_video(cam_id) < 0)
+            goto ERROR;
+          is_rec = 0;
+        }
+#endif
+        break;
+      case ACTION_TAKE_LIVE_SNAPSHOT:
+        printf("Selection for live shot\n");
+#if 0
+        if(is_rec)
+           mm_app_take_live_snapshot(cam_id);
+        else
+           printf("\n !!! Use live snapshot option while recording only !!!\n");
+#endif
+        break;
+
+        case ACTION_TOGGLE_WNR:
+          wnr_enabled = !wnr_enabled;
+          printf("WNR Enabled = %d\n", wnr_enabled);
+          rc = mm_camera_lib_send_command(&lib_handle,
+                                          MM_CAMERA_LIB_WNR_ENABLE,
+                                          &wnr_enabled,
+                                          NULL);
+          if (rc != MM_CAMERA_OK) {
+              LOGE("mm_camera_lib_send_command() err=%d\n",  rc);
+              goto ERROR;
+          }
+          break;
+
+        case ACTION_EXIT:
+            printf("Exiting....\n");
+            break;
+        case ACTION_NO_ACTION:
+            printf("Go back to main menu");
+            break;
+
+        default:
+            printf("\n\n!!!!!WRONG INPUT: %d!!!!\n", action_id);
+            break;
+    }
+
+    usleep(1000 * 1000);
+    LOGD("action_id = %d\n", action_id);
+
+  } while (action_id != ACTION_EXIT);
+  action_id = ACTION_NO_ACTION;
+
+    mm_camera_lib_close(&lib_handle);
+    return 0;
+
+ERROR:
+
+    mm_camera_lib_close(&lib_handle);
+
+    return rc;
+}
+
diff --git a/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_preview.c b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_preview.c
new file mode 100644
index 0000000..ce9d632
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_preview.c
@@ -0,0 +1,1315 @@
+/*
+Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of The Linux Foundation nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+// System dependencies
+#include <assert.h>
+#include <errno.h>
+#include <fcntl.h>
+#define MMAN_H <SYSTEM_HEADER_PREFIX/mman.h>
+#include MMAN_H
+
+// Camera dependencies
+#include "mm_qcamera_app.h"
+#include "mm_qcamera_dbg.h"
+#include "mm_qcamera_app.h"
+#include <assert.h>
+#include <sys/mman.h>
+#include <semaphore.h>
+
+static void mm_app_metadata_notify_cb(mm_camera_super_buf_t *bufs,
+                                     void *user_data)
+{
+  uint32_t i = 0;
+  mm_camera_channel_t *channel = NULL;
+  mm_camera_stream_t *p_stream = NULL;
+  mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+  mm_camera_buf_def_t *frame;
+  metadata_buffer_t *pMetadata;
+
+  if (NULL == bufs || NULL == user_data) {
+      LOGE("bufs or user_data are not valid ");
+      return;
+  }
+  frame = bufs->bufs[0];
+
+  /* find channel */
+  for (i = 0; i < MM_CHANNEL_TYPE_MAX; i++) {
+      if (pme->channels[i].ch_id == bufs->ch_id) {
+          channel = &pme->channels[i];
+          break;
+      }
+  }
+
+  if (NULL == channel) {
+      LOGE("Channel object is NULL ");
+      return;
+  }
+
+  /* find preview stream */
+  for (i = 0; i < channel->num_streams; i++) {
+      if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_METADATA) {
+          p_stream = &channel->streams[i];
+          break;
+      }
+  }
+
+  if (NULL == p_stream) {
+      LOGE("cannot find metadata stream");
+      return;
+  }
+
+  /* find preview frame */
+  for (i = 0; i < bufs->num_bufs; i++) {
+      if (bufs->bufs[i]->stream_id == p_stream->s_id) {
+          frame = bufs->bufs[i];
+          break;
+      }
+  }
+
+  if (pme->metadata == NULL) {
+    /* The app will free the meta data, we don't need to bother here */
+    pme->metadata = malloc(sizeof(metadata_buffer_t));
+    if (NULL == pme->metadata) {
+        LOGE("Canot allocate metadata memory\n");
+        return;
+    }
+  }
+  memcpy(pme->metadata, frame->buffer, sizeof(metadata_buffer_t));
+
+  pMetadata = (metadata_buffer_t *)frame->buffer;
+  IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, pMetadata) {
+    if ((cam_af_state_t)(*afState) == CAM_AF_STATE_FOCUSED_LOCKED ||
+            (cam_af_state_t)(*afState) == CAM_AF_STATE_NOT_FOCUSED_LOCKED) {
+        LOGE("AutoFocus Done Call Back Received\n");
+        mm_camera_app_done();
+    } else if ((cam_af_state_t)(*afState) == CAM_AF_STATE_NOT_FOCUSED_LOCKED) {
+        LOGE("AutoFocus failed\n");
+        mm_camera_app_done();
+    }
+  }
+
+  if (pme->user_metadata_cb) {
+      LOGD("[DBG] %s, user defined own metadata cb. calling it...");
+      pme->user_metadata_cb(frame);
+  }
+
+  if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+                                          bufs->ch_id,
+                                          frame)) {
+      LOGE("Failed in Preview Qbuf\n");
+  }
+  mm_app_cache_ops((mm_camera_app_meminfo_t *)frame->mem_info,
+                   ION_IOC_INV_CACHES);
+}
+
+static void mm_app_snapshot_notify_cb(mm_camera_super_buf_t *bufs,
+                                      void *user_data)
+{
+
+    int rc = 0;
+    uint32_t i = 0;
+    mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+    mm_camera_channel_t *channel = NULL;
+    mm_camera_stream_t *p_stream = NULL;
+    mm_camera_stream_t *m_stream = NULL;
+    mm_camera_buf_def_t *p_frame = NULL;
+    mm_camera_buf_def_t *m_frame = NULL;
+
+    /* find channel */
+    for (i = 0; i < MM_CHANNEL_TYPE_MAX; i++) {
+        if (pme->channels[i].ch_id == bufs->ch_id) {
+            channel = &pme->channels[i];
+            break;
+        }
+    }
+    if (NULL == channel) {
+        LOGE("Wrong channel id (%d)",  bufs->ch_id);
+        rc = -1;
+        goto error;
+    }
+
+    /* find snapshot stream */
+    for (i = 0; i < channel->num_streams; i++) {
+        if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_SNAPSHOT) {
+            m_stream = &channel->streams[i];
+            break;
+        }
+    }
+    if (NULL == m_stream) {
+        LOGE("cannot find snapshot stream");
+        rc = -1;
+        goto error;
+    }
+
+    /* find snapshot frame */
+    for (i = 0; i < bufs->num_bufs; i++) {
+        if (bufs->bufs[i]->stream_id == m_stream->s_id) {
+            m_frame = bufs->bufs[i];
+            break;
+        }
+    }
+    if (NULL == m_frame) {
+        LOGE("main frame is NULL");
+        rc = -1;
+        goto error;
+    }
+
+    mm_app_dump_frame(m_frame, "main", "yuv", m_frame->frame_idx);
+
+    /* find postview stream */
+    for (i = 0; i < channel->num_streams; i++) {
+        if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_POSTVIEW) {
+            p_stream = &channel->streams[i];
+            break;
+        }
+    }
+    if (NULL != p_stream) {
+        /* find preview frame */
+        for (i = 0; i < bufs->num_bufs; i++) {
+            if (bufs->bufs[i]->stream_id == p_stream->s_id) {
+                p_frame = bufs->bufs[i];
+                break;
+            }
+        }
+        if (NULL != p_frame) {
+            mm_app_dump_frame(p_frame, "postview", "yuv", p_frame->frame_idx);
+        }
+    }
+
+    mm_app_cache_ops((mm_camera_app_meminfo_t *)m_frame->mem_info,
+                     ION_IOC_CLEAN_INV_CACHES);
+
+    pme->jpeg_buf.buf.buffer = (uint8_t *)malloc(m_frame->frame_len);
+    if ( NULL == pme->jpeg_buf.buf.buffer ) {
+        LOGE("error allocating jpeg output buffer");
+        goto error;
+    }
+
+    pme->jpeg_buf.buf.frame_len = m_frame->frame_len;
+    /* create a new jpeg encoding session */
+    rc = createEncodingSession(pme, m_stream, m_frame);
+    if (0 != rc) {
+        LOGE("error creating jpeg session");
+        free(pme->jpeg_buf.buf.buffer);
+        goto error;
+    }
+
+    /* start jpeg encoding job */
+    rc = encodeData(pme, bufs, m_stream);
+    if (0 != rc) {
+        LOGE("error creating jpeg session");
+        free(pme->jpeg_buf.buf.buffer);
+        goto error;
+    }
+
+error:
+    /* buf done rcvd frames in error case */
+    if ( 0 != rc ) {
+        for (i=0; i<bufs->num_bufs; i++) {
+            if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+                                                    bufs->ch_id,
+                                                    bufs->bufs[i])) {
+                LOGE("Failed in Qbuf\n");
+            }
+            mm_app_cache_ops((mm_camera_app_meminfo_t *)bufs->bufs[i]->mem_info,
+                             ION_IOC_INV_CACHES);
+        }
+    }
+
+    LOGD(" END\n");
+}
+
+static void mm_app_preview_notify_cb(mm_camera_super_buf_t *bufs,
+                                     void *user_data)
+{
+    uint32_t i = 0;
+    mm_camera_channel_t *channel = NULL;
+    mm_camera_stream_t *p_stream = NULL;
+    mm_camera_buf_def_t *frame = NULL;
+    mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+
+    if (NULL == bufs || NULL == user_data) {
+        LOGE("bufs or user_data are not valid ");
+        return;
+    }
+
+    frame = bufs->bufs[0];
+
+    /* find channel */
+    for (i = 0; i < MM_CHANNEL_TYPE_MAX; i++) {
+        if (pme->channels[i].ch_id == bufs->ch_id) {
+            channel = &pme->channels[i];
+            break;
+        }
+    }
+    if (NULL == channel) {
+        LOGE("Channel object is NULL ");
+        return;
+    }
+    /* find preview stream */
+    for (i = 0; i < channel->num_streams; i++) {
+        if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_PREVIEW) {
+            p_stream = &channel->streams[i];
+            break;
+        }
+    }
+
+    if (NULL == p_stream) {
+        LOGE("cannot find preview stream");
+        return;
+    }
+
+    /* find preview frame */
+    for (i = 0; i < bufs->num_bufs; i++) {
+        if (bufs->bufs[i]->stream_id == p_stream->s_id) {
+            frame = bufs->bufs[i];
+            break;
+        }
+    }
+
+    if ( 0 < pme->fb_fd ) {
+        mm_app_overlay_display(pme, frame->fd);
+    }
+#ifdef DUMP_PRV_IN_FILE
+    {
+        char file_name[64];
+        snprintf(file_name, sizeof(file_name), "P_C%d", pme->cam->camera_handle);
+        mm_app_dump_frame(frame, file_name, "yuv", frame->frame_idx);
+    }
+#endif
+    if (pme->user_preview_cb) {
+        LOGE("[DBG] %s, user defined own preview cb. calling it...");
+        pme->user_preview_cb(frame);
+    }
+    if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+                bufs->ch_id,
+                frame)) {
+        LOGE("Failed in Preview Qbuf\n");
+    }
+    mm_app_cache_ops((mm_camera_app_meminfo_t *)frame->mem_info,
+            ION_IOC_INV_CACHES);
+
+    LOGD(" END\n");
+}
+
+static void mm_app_zsl_notify_cb(mm_camera_super_buf_t *bufs,
+                                 void *user_data)
+{
+    int rc = MM_CAMERA_OK;
+    uint32_t i = 0;
+    mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+    mm_camera_channel_t *channel = NULL;
+    mm_camera_stream_t *p_stream = NULL;
+    mm_camera_stream_t *m_stream = NULL;
+    mm_camera_stream_t *md_stream = NULL;
+    mm_camera_buf_def_t *p_frame = NULL;
+    mm_camera_buf_def_t *m_frame = NULL;
+    mm_camera_buf_def_t *md_frame = NULL;
+
+    LOGD(" BEGIN\n");
+
+    if (NULL == bufs || NULL == user_data) {
+        LOGE("bufs or user_data are not valid ");
+        return;
+    }
+
+    /* find channel */
+    for (i = 0; i < MM_CHANNEL_TYPE_MAX; i++) {
+        if (pme->channels[i].ch_id == bufs->ch_id) {
+            channel = &pme->channels[i];
+            break;
+        }
+    }
+    if (NULL == channel) {
+        LOGE("Wrong channel id (%d)",  bufs->ch_id);
+        return;
+    }
+
+    /* find preview stream */
+    for (i = 0; i < channel->num_streams; i++) {
+        if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_PREVIEW) {
+            p_stream = &channel->streams[i];
+            break;
+        }
+    }
+    if (NULL == p_stream) {
+        LOGE("cannot find preview stream");
+        return;
+    }
+
+    /* find snapshot stream */
+    for (i = 0; i < channel->num_streams; i++) {
+        if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_SNAPSHOT) {
+            m_stream = &channel->streams[i];
+            break;
+        }
+    }
+    if (NULL == m_stream) {
+        LOGE("cannot find snapshot stream");
+        return;
+    }
+
+    /* find metadata stream */
+    for (i = 0; i < channel->num_streams; i++) {
+        if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_METADATA) {
+            md_stream = &channel->streams[i];
+            break;
+        }
+    }
+    if (NULL == md_stream) {
+        LOGE("cannot find metadata stream");
+    }
+
+    /* find preview frame */
+    for (i = 0; i < bufs->num_bufs; i++) {
+        if (bufs->bufs[i]->stream_id == p_stream->s_id) {
+            p_frame = bufs->bufs[i];
+            break;
+        }
+    }
+
+    if(md_stream) {
+      /* find metadata frame */
+      for (i = 0; i < bufs->num_bufs; i++) {
+          if (bufs->bufs[i]->stream_id == md_stream->s_id) {
+              md_frame = bufs->bufs[i];
+              break;
+          }
+      }
+      if (!md_frame) {
+          LOGE("md_frame is null\n");
+          return;
+      }
+      if (!pme->metadata) {
+          /* App will free the metadata */
+          pme->metadata = malloc(sizeof(metadata_buffer_t));
+          if (!pme->metadata) {
+              ALOGE("not enough memory\n");
+              return;
+          }
+      }
+
+      memcpy(pme->metadata , md_frame->buffer, sizeof(metadata_buffer_t));
+    }
+    /* find snapshot frame */
+    for (i = 0; i < bufs->num_bufs; i++) {
+        if (bufs->bufs[i]->stream_id == m_stream->s_id) {
+            m_frame = bufs->bufs[i];
+            break;
+        }
+    }
+
+    if (!m_frame || !p_frame) {
+        LOGE("cannot find preview/snapshot frame");
+        return;
+    }
+
+    LOGD(" ZSL CB with fb_fd = %d, m_frame = %p, p_frame = %p \n",
+         pme->fb_fd,
+         m_frame,
+         p_frame);
+
+    if ( 0 < pme->fb_fd ) {
+        mm_app_overlay_display(pme, p_frame->fd);
+    }/* else {
+        mm_app_dump_frame(p_frame, "zsl_preview", "yuv", p_frame->frame_idx);
+        mm_app_dump_frame(m_frame, "zsl_main", "yuv", m_frame->frame_idx);
+    }*/
+
+    if ( pme->enable_reproc && ( NULL != pme->reproc_stream ) ) {
+
+        if (NULL != md_frame) {
+            rc = mm_app_do_reprocess(pme,
+                    m_frame,
+                    md_frame->buf_idx,
+                    bufs,
+                    md_stream);
+
+            if (MM_CAMERA_OK != rc ) {
+                LOGE("reprocess failed rc = %d",  rc);
+            }
+        } else {
+            LOGE("md_frame is null\n");
+        }
+
+      return;
+    }
+
+    if ( pme->encodeJpeg ) {
+        pme->jpeg_buf.buf.buffer = (uint8_t *)malloc(m_frame->frame_len);
+        if ( NULL == pme->jpeg_buf.buf.buffer ) {
+            LOGE("error allocating jpeg output buffer");
+            goto exit;
+        }
+
+        pme->jpeg_buf.buf.frame_len = m_frame->frame_len;
+        /* create a new jpeg encoding session */
+        rc = createEncodingSession(pme, m_stream, m_frame);
+        if (0 != rc) {
+            LOGE("error creating jpeg session");
+            free(pme->jpeg_buf.buf.buffer);
+            goto exit;
+        }
+
+        /* start jpeg encoding job */
+        rc = encodeData(pme, bufs, m_stream);
+        pme->encodeJpeg = 0;
+    } else {
+        if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+                                                bufs->ch_id,
+                                                m_frame)) {
+            LOGE("Failed in main Qbuf\n");
+        }
+        mm_app_cache_ops((mm_camera_app_meminfo_t *)m_frame->mem_info,
+                         ION_IOC_INV_CACHES);
+    }
+
+exit:
+
+    if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+                                            bufs->ch_id,
+                                            p_frame)) {
+        LOGE("Failed in preview Qbuf\n");
+    }
+    mm_app_cache_ops((mm_camera_app_meminfo_t *)p_frame->mem_info,
+                     ION_IOC_INV_CACHES);
+
+    if(md_frame) {
+      if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+                                              bufs->ch_id,
+                                              md_frame)) {
+          LOGE("Failed in metadata Qbuf\n");
+      }
+      mm_app_cache_ops((mm_camera_app_meminfo_t *)md_frame->mem_info,
+                       ION_IOC_INV_CACHES);
+    }
+
+    LOGD(" END\n");
+}
+
+mm_camera_stream_t * mm_app_add_metadata_stream(mm_camera_test_obj_t *test_obj,
+                                               mm_camera_channel_t *channel,
+                                               mm_camera_buf_notify_t stream_cb,
+                                               void *userdata,
+                                               uint8_t num_bufs)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_stream_t *stream = NULL;
+    cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+    stream = mm_app_add_stream(test_obj, channel);
+    if (NULL == stream) {
+        LOGE("add stream failed\n");
+        return NULL;
+    }
+
+    stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+    stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+    stream->s_config.mem_vtbl.clean_invalidate_buf =
+      mm_app_stream_clean_invalidate_buf;
+    stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+    stream->s_config.mem_vtbl.user_data = (void *)stream;
+    stream->s_config.stream_cb = stream_cb;
+    stream->s_config.stream_cb_sync = NULL;
+    stream->s_config.userdata = userdata;
+    stream->num_of_bufs = num_bufs;
+
+    stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+    memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+    stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_METADATA;
+    stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+    stream->s_config.stream_info->fmt = DEFAULT_PREVIEW_FORMAT;
+    stream->s_config.stream_info->dim.width = sizeof(metadata_buffer_t);
+    stream->s_config.stream_info->dim.height = 1;
+    stream->s_config.padding_info = cam_cap->padding_info;
+
+    rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("config preview stream err=%d\n",  rc);
+        return NULL;
+    }
+
+    return stream;
+}
+
+cam_dimension_t mm_app_get_analysis_stream_dim(
+                                               const mm_camera_test_obj_t *test_obj,
+                                               const cam_dimension_t* preview_dim)
+{
+    cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+    cam_dimension_t max_analysis_dim =
+        cam_cap->analysis_info[CAM_ANALYSIS_INFO_FD_STILL].analysis_max_res;
+    cam_dimension_t analysis_dim = {0, 0};
+
+    if (preview_dim->width > max_analysis_dim.width ||
+            preview_dim->height > max_analysis_dim.height) {
+        double max_ratio, requested_ratio;
+
+        max_ratio = (double)max_analysis_dim.width / (double)max_analysis_dim.height;
+        requested_ratio = (double)preview_dim->width / (double)preview_dim->height;
+
+        if (max_ratio < requested_ratio) {
+            analysis_dim.width = max_analysis_dim.width;
+            analysis_dim.height = (int32_t)((double)max_analysis_dim.width / requested_ratio);
+        } else {
+            analysis_dim.height = max_analysis_dim.height;
+            analysis_dim.width = (int32_t)((double)max_analysis_dim.height * requested_ratio);
+        }
+        analysis_dim.width &= ~0x1;
+        analysis_dim.height &= ~0x1;
+    } else {
+        analysis_dim = *preview_dim;
+    }
+
+    LOGI("analysis stream dim (%d x %d)\n",  analysis_dim.width, analysis_dim.height);
+    return analysis_dim;
+}
+
+mm_camera_stream_t * mm_app_add_analysis_stream(mm_camera_test_obj_t *test_obj,
+                                               mm_camera_channel_t *channel,
+                                               mm_camera_buf_notify_t stream_cb,
+                                               void *userdata,
+                                               uint8_t num_bufs)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_stream_t *stream = NULL;
+    cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+    cam_dimension_t preview_dim = {0, 0};
+    cam_dimension_t analysis_dim = {0, 0};
+
+
+    stream = mm_app_add_stream(test_obj, channel);
+    if (NULL == stream) {
+        LOGE("add stream failed\n");
+        return NULL;
+    }
+
+    if ((test_obj->preview_resolution.user_input_display_width == 0) ||
+           ( test_obj->preview_resolution.user_input_display_height == 0)) {
+        preview_dim.width = DEFAULT_PREVIEW_WIDTH;
+        preview_dim.height = DEFAULT_PREVIEW_HEIGHT;
+    } else {
+        preview_dim.width = test_obj->preview_resolution.user_input_display_width;
+        preview_dim.height = test_obj->preview_resolution.user_input_display_height;
+    }
+
+    analysis_dim = mm_app_get_analysis_stream_dim(test_obj, &preview_dim);
+    LOGI("analysis stream dimesion: %d x %d\n",
+            analysis_dim.width, analysis_dim.height);
+    if (analysis_dim.width == 0 || analysis_dim.height == 0) {
+        /* FD or PAAF might not be enabled , use preview dim */
+        return NULL;
+    }
+
+    stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+    stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+    stream->s_config.mem_vtbl.clean_invalidate_buf =
+      mm_app_stream_clean_invalidate_buf;
+    stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+    stream->s_config.mem_vtbl.user_data = (void *)stream;
+    stream->s_config.stream_cb = stream_cb;
+    stream->s_config.userdata = userdata;
+    stream->num_of_bufs = num_bufs;
+
+    stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+    memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+    stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_ANALYSIS;
+    stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+    stream->s_config.stream_info->fmt = DEFAULT_PREVIEW_FORMAT;
+    stream->s_config.stream_info->dim = analysis_dim;
+    stream->s_config.padding_info =
+        cam_cap->analysis_info[CAM_ANALYSIS_INFO_FD_STILL].analysis_padding_info;
+
+    rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("config preview stream err=%d\n",  rc);
+        return NULL;
+    }
+
+    return stream;
+}
+
+mm_camera_stream_t * mm_app_add_preview_stream(mm_camera_test_obj_t *test_obj,
+                                               mm_camera_channel_t *channel,
+                                               mm_camera_buf_notify_t stream_cb,
+                                               void *userdata,
+                                               uint8_t num_bufs)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_stream_t *stream = NULL;
+    cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+    cam_dimension_t preview_dim = {0, 0};
+    cam_dimension_t analysis_dim = {0, 0};
+
+    if ((test_obj->preview_resolution.user_input_display_width == 0) ||
+           ( test_obj->preview_resolution.user_input_display_height == 0)) {
+        preview_dim.width = DEFAULT_PREVIEW_WIDTH;
+        preview_dim.height = DEFAULT_PREVIEW_HEIGHT;
+    } else {
+        preview_dim.width = test_obj->preview_resolution.user_input_display_width;
+        preview_dim.height = test_obj->preview_resolution.user_input_display_height;
+    }
+    LOGI("preview dimesion: %d x %d\n",  preview_dim.width, preview_dim.height);
+
+    analysis_dim = mm_app_get_analysis_stream_dim(test_obj, &preview_dim);
+    LOGI("analysis stream dimesion: %d x %d\n",
+            analysis_dim.width, analysis_dim.height);
+
+    uint32_t analysis_pp_mask = cam_cap->qcom_supported_feature_mask &
+                                        (CAM_QCOM_FEATURE_SHARPNESS |
+                                         CAM_QCOM_FEATURE_EFFECT |
+                                         CAM_QCOM_FEATURE_DENOISE2D);
+    LOGI("analysis stream pp mask:%x\n",  analysis_pp_mask);
+
+    cam_stream_size_info_t abc ;
+    memset (&abc , 0, sizeof (cam_stream_size_info_t));
+
+    abc.num_streams = 2;
+    abc.postprocess_mask[0] = 2178;
+    abc.stream_sizes[0].width = preview_dim.width;
+    abc.stream_sizes[0].height = preview_dim.height;
+    abc.type[0] = CAM_STREAM_TYPE_PREVIEW;
+
+    abc.postprocess_mask[1] = analysis_pp_mask;
+    abc.stream_sizes[1].width = analysis_dim.width;
+    abc.stream_sizes[1].height = analysis_dim.height;
+    abc.type[1] = CAM_STREAM_TYPE_ANALYSIS;
+
+    abc.buffer_info.min_buffers = 10;
+    abc.buffer_info.max_buffers = 10;
+    abc.is_type[0] = IS_TYPE_NONE;
+
+    rc = setmetainfoCommand(test_obj, &abc);
+    if (rc != MM_CAMERA_OK) {
+       LOGE("meta info command failed\n");
+    }
+
+    stream = mm_app_add_stream(test_obj, channel);
+    if (NULL == stream) {
+        LOGE("add stream failed\n");
+        return NULL;
+    }
+    stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+    stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+    stream->s_config.mem_vtbl.clean_invalidate_buf =
+      mm_app_stream_clean_invalidate_buf;
+    stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+    stream->s_config.mem_vtbl.user_data = (void *)stream;
+    stream->s_config.stream_cb = stream_cb;
+    stream->s_config.stream_cb_sync = NULL;
+    stream->s_config.userdata = userdata;
+    stream->num_of_bufs = num_bufs;
+
+    stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+    memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+    stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_PREVIEW;
+    stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+    stream->s_config.stream_info->fmt = DEFAULT_PREVIEW_FORMAT;
+
+    stream->s_config.stream_info->dim.width = preview_dim.width;
+    stream->s_config.stream_info->dim.height = preview_dim.height;
+
+    stream->s_config.padding_info = cam_cap->padding_info;
+
+    rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("config preview stream err=%d\n",  rc);
+        return NULL;
+    }
+
+    return stream;
+}
+
+mm_camera_stream_t * mm_app_add_raw_stream(mm_camera_test_obj_t *test_obj,
+                                                mm_camera_channel_t *channel,
+                                                mm_camera_buf_notify_t stream_cb,
+                                                void *userdata,
+                                                uint8_t num_bufs,
+                                                uint8_t num_burst)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_stream_t *stream = NULL;
+    cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+
+    cam_stream_size_info_t abc ;
+    memset (&abc , 0, sizeof (cam_stream_size_info_t));
+
+    abc.num_streams = 1;
+    abc.postprocess_mask[0] = 0;
+
+    if ( test_obj->buffer_width == 0 || test_obj->buffer_height == 0 ) {
+        abc.stream_sizes[0].width = DEFAULT_SNAPSHOT_WIDTH;
+        abc.stream_sizes[0].height = DEFAULT_SNAPSHOT_HEIGHT;
+    } else {
+        abc.stream_sizes[0].width = (int32_t)test_obj->buffer_width;
+        abc.stream_sizes[0].height = (int32_t)test_obj->buffer_height;
+    }
+    abc.type[0] = CAM_STREAM_TYPE_RAW;
+
+    abc.buffer_info.min_buffers = num_bufs;
+    abc.buffer_info.max_buffers = num_bufs;
+    abc.is_type[0] = IS_TYPE_NONE;
+
+    rc = setmetainfoCommand(test_obj, &abc);
+    if (rc != MM_CAMERA_OK) {
+       LOGE("meta info command failed\n");
+    }
+
+    stream = mm_app_add_stream(test_obj, channel);
+    if (NULL == stream) {
+        LOGE("add stream failed\n");
+        return NULL;
+    }
+
+    stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+    stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+    stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+    stream->s_config.mem_vtbl.user_data = (void *)stream;
+    stream->s_config.stream_cb = stream_cb;
+    stream->s_config.stream_cb_sync = NULL;
+    stream->s_config.userdata = userdata;
+    stream->num_of_bufs = num_bufs;
+
+    stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+    memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+    stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_RAW;
+    if (num_burst == 0) {
+        stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+    } else {
+        stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_BURST;
+        stream->s_config.stream_info->num_of_burst = num_burst;
+    }
+    stream->s_config.stream_info->fmt = test_obj->buffer_format;
+    if ( test_obj->buffer_width == 0 || test_obj->buffer_height == 0 ) {
+        stream->s_config.stream_info->dim.width = DEFAULT_SNAPSHOT_WIDTH;
+        stream->s_config.stream_info->dim.height = DEFAULT_SNAPSHOT_HEIGHT;
+    } else {
+        stream->s_config.stream_info->dim.width = (int32_t)test_obj->buffer_width;
+        stream->s_config.stream_info->dim.height = (int32_t)test_obj->buffer_height;
+    }
+    stream->s_config.padding_info = cam_cap->padding_info;
+
+    rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("config preview stream err=%d\n",  rc);
+        return NULL;
+    }
+
+    return stream;
+}
+
+mm_camera_stream_t * mm_app_add_snapshot_stream(mm_camera_test_obj_t *test_obj,
+                                                mm_camera_channel_t *channel,
+                                                mm_camera_buf_notify_t stream_cb,
+                                                void *userdata,
+                                                uint8_t num_bufs,
+                                                uint8_t num_burst)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_stream_t *stream = NULL;
+    cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+    cam_stream_size_info_t abc_snap ;
+    memset (&abc_snap , 0, sizeof (cam_stream_size_info_t));
+
+    abc_snap.num_streams = 2;
+    abc_snap.postprocess_mask[1] = 2178;
+    abc_snap.stream_sizes[1].width = DEFAULT_PREVIEW_WIDTH;
+    abc_snap.stream_sizes[1].height = DEFAULT_PREVIEW_HEIGHT;
+    abc_snap.type[1] = CAM_STREAM_TYPE_POSTVIEW;
+
+    abc_snap.postprocess_mask[0] = 0;
+    abc_snap.stream_sizes[0].width = DEFAULT_SNAPSHOT_WIDTH;
+    abc_snap.stream_sizes[0].height = DEFAULT_SNAPSHOT_HEIGHT;
+    abc_snap.type[0] = CAM_STREAM_TYPE_SNAPSHOT;
+
+    abc_snap.buffer_info.min_buffers = 7;
+    abc_snap.buffer_info.max_buffers = 7;
+    abc_snap.is_type[0] = IS_TYPE_NONE;
+
+    rc = setmetainfoCommand(test_obj, &abc_snap);
+    if (rc != MM_CAMERA_OK) {
+       LOGE("meta info command snapshot failed\n");
+    }
+
+    stream = mm_app_add_stream(test_obj, channel);
+    if (NULL == stream) {
+        LOGE("add stream failed\n");
+        return NULL;
+    }
+
+    stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+    stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+    stream->s_config.mem_vtbl.clean_invalidate_buf =
+      mm_app_stream_clean_invalidate_buf;
+    stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+    stream->s_config.mem_vtbl.user_data = (void *)stream;
+    stream->s_config.stream_cb = stream_cb;
+    stream->s_config.stream_cb_sync = NULL;
+    stream->s_config.userdata = userdata;
+    stream->num_of_bufs = num_bufs;
+
+    stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+    memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+    stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_SNAPSHOT;
+    if (num_burst == 0) {
+        stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+    } else {
+        stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_BURST;
+        stream->s_config.stream_info->num_of_burst = num_burst;
+    }
+    stream->s_config.stream_info->fmt = DEFAULT_SNAPSHOT_FORMAT;
+    if ( test_obj->buffer_width == 0 || test_obj->buffer_height == 0 ) {
+        stream->s_config.stream_info->dim.width = DEFAULT_SNAPSHOT_WIDTH;
+        stream->s_config.stream_info->dim.height = DEFAULT_SNAPSHOT_HEIGHT;
+    } else {
+        stream->s_config.stream_info->dim.width = DEFAULT_SNAPSHOT_WIDTH;
+        stream->s_config.stream_info->dim.height = DEFAULT_SNAPSHOT_HEIGHT;
+    }
+    stream->s_config.padding_info = cam_cap->padding_info;
+
+    rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("config preview stream err=%d\n",  rc);
+        return NULL;
+    }
+
+    return stream;
+}
+
+mm_camera_channel_t * mm_app_add_preview_channel(mm_camera_test_obj_t *test_obj)
+{
+    mm_camera_channel_t *channel = NULL;
+    mm_camera_stream_t *stream = NULL;
+
+    channel = mm_app_add_channel(test_obj,
+                                 MM_CHANNEL_TYPE_PREVIEW,
+                                 NULL,
+                                 NULL,
+                                 NULL);
+    if (NULL == channel) {
+        LOGE("add channel failed");
+        return NULL;
+    }
+
+    stream = mm_app_add_preview_stream(test_obj,
+                                       channel,
+                                       mm_app_preview_notify_cb,
+                                       (void *)test_obj,
+                                       PREVIEW_BUF_NUM);
+    if (NULL == stream) {
+        LOGE("add stream failed\n");
+        mm_app_del_channel(test_obj, channel);
+        return NULL;
+    }
+
+    return channel;
+}
+
+int mm_app_stop_and_del_channel(mm_camera_test_obj_t *test_obj,
+                                mm_camera_channel_t *channel)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_stream_t *stream = NULL;
+    uint8_t i;
+    cam_stream_size_info_t abc ;
+    memset (&abc , 0, sizeof (cam_stream_size_info_t));
+
+    rc = mm_app_stop_channel(test_obj, channel);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("Stop Preview failed rc=%d\n",  rc);
+    }
+
+    if (channel->num_streams <= MAX_STREAM_NUM_IN_BUNDLE) {
+        for (i = 0; i < channel->num_streams; i++) {
+            stream = &channel->streams[i];
+            rc = mm_app_del_stream(test_obj, channel, stream);
+            if (MM_CAMERA_OK != rc) {
+                LOGE("del stream(%d) failed rc=%d\n",  i, rc);
+            }
+        }
+    } else {
+        LOGE("num_streams = %d. Should not be more than %d\n",
+             channel->num_streams, MAX_STREAM_NUM_IN_BUNDLE);
+    }
+
+    rc = setmetainfoCommand(test_obj, &abc);
+    if (rc != MM_CAMERA_OK) {
+       LOGE("meta info command failed\n");
+    }
+
+    rc = mm_app_del_channel(test_obj, channel);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("delete channel failed rc=%d\n",  rc);
+    }
+
+    return rc;
+}
+
+int mm_app_start_preview(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_channel_t *channel = NULL;
+    mm_camera_stream_t *stream = NULL;
+    mm_camera_stream_t *s_metadata = NULL;
+    mm_camera_stream_t *s_analysis = NULL;
+    uint8_t i;
+
+    channel =  mm_app_add_preview_channel(test_obj);
+    if (NULL == channel) {
+        LOGE("add channel failed");
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    s_metadata = mm_app_add_metadata_stream(test_obj,
+                                            channel,
+                                            mm_app_metadata_notify_cb,
+                                            (void *)test_obj,
+                                            PREVIEW_BUF_NUM);
+    if (NULL == s_metadata) {
+        LOGE("add metadata stream failed\n");
+        mm_app_del_channel(test_obj, channel);
+        return rc;
+    }
+
+    s_analysis = mm_app_add_analysis_stream(test_obj,
+                                            channel,
+                                            NULL,
+                                            (void *)test_obj,
+                                            PREVIEW_BUF_NUM);
+    if (NULL == s_analysis) {
+        LOGE("Analysis Stream could not be added\n");
+    }
+
+    rc = mm_app_start_channel(test_obj, channel);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("start preview failed rc=%d\n",  rc);
+        if (channel->num_streams <= MAX_STREAM_NUM_IN_BUNDLE) {
+            for (i = 0; i < channel->num_streams; i++) {
+                stream = &channel->streams[i];
+                mm_app_del_stream(test_obj, channel, stream);
+            }
+        }
+        mm_app_del_channel(test_obj, channel);
+        return rc;
+    }
+
+    return rc;
+}
+
+int mm_app_stop_preview(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_channel_t *channel =
+        mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_PREVIEW);
+
+    rc = mm_app_stop_and_del_channel(test_obj, channel);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("Stop Preview failed rc=%d\n",  rc);
+    }
+
+    return rc;
+}
+
+int mm_app_start_preview_zsl(mm_camera_test_obj_t *test_obj)
+{
+    int32_t rc = MM_CAMERA_OK;
+    mm_camera_channel_t *channel = NULL;
+    mm_camera_stream_t *s_preview = NULL;
+    mm_camera_stream_t *s_metadata = NULL;
+    mm_camera_stream_t *s_main = NULL;
+    mm_camera_channel_attr_t attr;
+    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
+    attr.look_back = 2;
+    attr.post_frame_skip = 0;
+    attr.water_mark = 2;
+    attr.max_unmatched_frames = 3;
+    channel = mm_app_add_channel(test_obj,
+                                 MM_CHANNEL_TYPE_ZSL,
+                                 &attr,
+                                 mm_app_zsl_notify_cb,
+                                 test_obj);
+    if (NULL == channel) {
+        LOGE("add channel failed");
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    s_preview = mm_app_add_preview_stream(test_obj,
+                                          channel,
+                                          mm_app_preview_notify_cb,
+                                          (void *)test_obj,
+                                          PREVIEW_BUF_NUM);
+    if (NULL == s_preview) {
+        LOGE("add preview stream failed\n");
+        mm_app_del_channel(test_obj, channel);
+        return rc;
+    }
+
+    s_main = mm_app_add_snapshot_stream(test_obj,
+                                        channel,
+                                        mm_app_snapshot_notify_cb,
+                                        (void *)test_obj,
+                                        PREVIEW_BUF_NUM,
+                                        0);
+    if (NULL == s_main) {
+        LOGE("add main snapshot stream failed\n");
+        mm_app_del_stream(test_obj, channel, s_preview);
+        mm_app_del_channel(test_obj, channel);
+        return rc;
+    }
+
+    s_metadata = mm_app_add_metadata_stream(test_obj,
+                                            channel,
+                                            mm_app_metadata_notify_cb,
+                                            (void *)test_obj,
+                                            PREVIEW_BUF_NUM);
+    if (NULL == s_metadata) {
+        LOGE("add metadata stream failed\n");
+        mm_app_del_channel(test_obj, channel);
+        return rc;
+    }
+
+    rc = mm_app_start_channel(test_obj, channel);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("start zsl failed rc=%d\n",  rc);
+        mm_app_del_stream(test_obj, channel, s_preview);
+        mm_app_del_stream(test_obj, channel, s_metadata);
+        mm_app_del_stream(test_obj, channel, s_main);
+        mm_app_del_channel(test_obj, channel);
+        return rc;
+    }
+
+    if ( test_obj->enable_reproc ) {
+        if ( NULL == mm_app_add_reprocess_channel(test_obj, s_main) ) {
+            LOGE("Reprocess channel failed to initialize \n");
+            mm_app_del_stream(test_obj, channel, s_preview);
+#ifdef USE_METADATA_STREAM
+            mm_app_del_stream(test_obj, channel, s_metadata);
+#endif
+            mm_app_del_stream(test_obj, channel, s_main);
+            mm_app_del_channel(test_obj, channel);
+            return rc;
+        }
+        rc = mm_app_start_reprocess(test_obj);
+        if (MM_CAMERA_OK != rc) {
+            LOGE("reprocess start failed rc=%d\n",  rc);
+            mm_app_del_stream(test_obj, channel, s_preview);
+#ifdef USE_METADATA_STREAM
+            mm_app_del_stream(test_obj, channel, s_metadata);
+#endif
+            mm_app_del_stream(test_obj, channel, s_main);
+            mm_app_del_channel(test_obj, channel);
+            return rc;
+        }
+    }
+
+    return rc;
+}
+
+int mm_app_stop_preview_zsl(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+
+    mm_camera_channel_t *channel =
+        mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_ZSL);
+
+    rc = mm_app_stop_and_del_channel(test_obj, channel);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("Stop Preview failed rc=%d\n",  rc);
+    }
+
+    if ( test_obj->enable_reproc ) {
+        rc |= mm_app_stop_reprocess(test_obj);
+    }
+
+    return rc;
+}
+
+int mm_app_initialize_fb(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+    int brightness_fd;
+    const char brightness_level[] = BACKLIGHT_LEVEL;
+    void *fb_base = NULL;
+
+    assert( ( NULL != test_obj ) && ( 0 == test_obj->fb_fd ) );
+
+    test_obj->fb_fd = open(FB_PATH, O_RDWR);
+    if ( 0 > test_obj->fb_fd ) {
+        LOGE("FB device open failed rc=%d, %s\n",
+                   -errno,
+                   strerror(errno));
+        rc = -errno;
+        goto FAIL;
+    }
+
+    rc = ioctl(test_obj->fb_fd, FBIOGET_VSCREENINFO, &test_obj->vinfo);
+    if ( MM_CAMERA_OK != rc ) {
+        LOGE("Can not retrieve screen info rc=%d, %s\n",
+                   -errno,
+                   strerror(errno));
+        rc = -errno;
+        goto FAIL;
+    }
+
+    if ( ( 0 == test_obj->vinfo.yres_virtual ) ||
+         ( 0 == test_obj->vinfo.yres ) ||
+         ( test_obj->vinfo.yres > test_obj->vinfo.yres_virtual ) ) {
+        LOGE("Invalid FB virtual yres: %d, yres: %d\n",
+                   test_obj->vinfo.yres_virtual,
+                   test_obj->vinfo.yres);
+        rc = MM_CAMERA_E_GENERAL;
+        goto FAIL;
+    }
+
+    if ( ( 0 == test_obj->vinfo.xres_virtual ) ||
+         ( 0 == test_obj->vinfo.xres ) ||
+         ( test_obj->vinfo.xres > test_obj->vinfo.xres_virtual ) ) {
+        LOGE("Invalid FB virtual xres: %d, xres: %d\n",
+                   test_obj->vinfo.xres_virtual,
+                   test_obj->vinfo.xres);
+        rc = MM_CAMERA_E_GENERAL;
+        goto FAIL;
+    }
+
+    brightness_fd = open(BACKLIGHT_CONTROL, O_RDWR);
+    if ( brightness_fd >= 0 ) {
+        write(brightness_fd, brightness_level, strlen(brightness_level));
+        close(brightness_fd);
+    }
+
+    test_obj->slice_size = test_obj->vinfo.xres * ( test_obj->vinfo.yres - 1 ) * DEFAULT_OV_FORMAT_BPP;
+    memset(&test_obj->data_overlay, 0, sizeof(struct mdp_overlay));
+    test_obj->data_overlay.src.width  = test_obj->buffer_width;
+    test_obj->data_overlay.src.height = test_obj->buffer_height;
+    test_obj->data_overlay.src_rect.w = test_obj->buffer_width;
+    test_obj->data_overlay.src_rect.h = test_obj->buffer_height;
+    test_obj->data_overlay.dst_rect.w = test_obj->buffer_width;
+    test_obj->data_overlay.dst_rect.h = test_obj->buffer_height;
+    test_obj->data_overlay.src.format = DEFAULT_OV_FORMAT;
+    test_obj->data_overlay.src_rect.x = 0;
+    test_obj->data_overlay.src_rect.y = 0;
+    test_obj->data_overlay.dst_rect.x = 0;
+    test_obj->data_overlay.dst_rect.y = 0;
+    test_obj->data_overlay.z_order = 2;
+    test_obj->data_overlay.alpha = 0x80;
+    test_obj->data_overlay.transp_mask = 0xffe0;
+    test_obj->data_overlay.flags = MDP_FLIP_LR | MDP_FLIP_UD;
+
+    // Map and clear FB portion
+    fb_base = mmap(0,
+                   test_obj->slice_size,
+                   PROT_WRITE,
+                   MAP_SHARED,
+                   test_obj->fb_fd,
+                   0);
+    if ( MAP_FAILED  == fb_base ) {
+            LOGE("( Error while memory mapping frame buffer %s",
+                       strerror(errno));
+            rc = -errno;
+            goto FAIL;
+    }
+
+    memset(fb_base, 0, test_obj->slice_size);
+
+    if (ioctl(test_obj->fb_fd, FBIOPAN_DISPLAY, &test_obj->vinfo) < 0) {
+        LOGE("FBIOPAN_DISPLAY failed!");
+        rc = -errno;
+        goto FAIL;
+    }
+
+    munmap(fb_base, test_obj->slice_size);
+    test_obj->data_overlay.id = (uint32_t)MSMFB_NEW_REQUEST;
+    rc = ioctl(test_obj->fb_fd, MSMFB_OVERLAY_SET, &test_obj->data_overlay);
+    if (rc < 0) {
+        LOGE("MSMFB_OVERLAY_SET failed! err=%d\n",
+               test_obj->data_overlay.id);
+        return MM_CAMERA_E_GENERAL;
+    }
+    LOGE("Overlay set with overlay id: %d",  test_obj->data_overlay.id);
+
+    return rc;
+
+FAIL:
+
+    if ( 0 < test_obj->fb_fd ) {
+        close(test_obj->fb_fd);
+    }
+
+    return rc;
+}
+
+int mm_app_close_fb(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+
+    assert( ( NULL != test_obj ) && ( 0 < test_obj->fb_fd ) );
+
+    if (ioctl(test_obj->fb_fd, MSMFB_OVERLAY_UNSET, &test_obj->data_overlay.id)) {
+        LOGE("\nERROR! MSMFB_OVERLAY_UNSET failed! (Line %d)\n");
+    }
+
+    if (ioctl(test_obj->fb_fd, FBIOPAN_DISPLAY, &test_obj->vinfo) < 0) {
+        LOGE("ERROR: FBIOPAN_DISPLAY failed! line=%d\n");
+    }
+
+    close(test_obj->fb_fd);
+    test_obj->fb_fd = -1;
+
+    return rc;
+}
+
+void memset16(void *pDst, uint16_t value, int count)
+{
+    uint16_t *ptr = pDst;
+    while (count--)
+        *ptr++ = value;
+}
+
+int mm_app_overlay_display(mm_camera_test_obj_t *test_obj, int bufferFd)
+{
+    int rc = MM_CAMERA_OK;
+    struct msmfb_overlay_data ovdata;
+
+
+    memset(&ovdata, 0, sizeof(struct msmfb_overlay_data));
+    ovdata.id = test_obj->data_overlay.id;
+    ovdata.data.memory_id = bufferFd;
+
+    if (ioctl(test_obj->fb_fd, MSMFB_OVERLAY_PLAY, &ovdata)) {
+        LOGE("MSMFB_OVERLAY_PLAY failed!");
+        return MM_CAMERA_E_GENERAL;
+    }
+
+    if (ioctl(test_obj->fb_fd, FBIOPAN_DISPLAY, &test_obj->vinfo) < 0) {
+        LOGE("FBIOPAN_DISPLAY failed!");
+        return MM_CAMERA_E_GENERAL;
+    }
+
+    return rc;
+}
diff --git a/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_queue.c b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_queue.c
new file mode 100644
index 0000000..61176be
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_queue.c
@@ -0,0 +1,168 @@
+/* Copyright (c) 2012, 2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+// Camera dependencies
+#include "mm_qcamera_app.h"
+#include "mm_qcamera_dbg.h"
+
+int mm_camera_queue_init(mm_camera_queue_t *queue,
+                         release_data_fn data_rel_fn,
+                         void *user_data)
+{
+    if ( NULL == queue ) {
+        return -1;
+    }
+
+    pthread_mutex_init(&queue->m_lock, NULL);
+    cam_list_init(&queue->m_head.list);
+    queue->m_size = 0;
+    queue->m_dataFn = data_rel_fn;
+    queue->m_userData = user_data;
+
+    return MM_CAMERA_OK;
+}
+
+int mm_qcamera_queue_release(mm_camera_queue_t *queue)
+{
+    if ( NULL == queue ) {
+        return -1;
+    }
+
+    mm_qcamera_queue_flush(queue);
+    pthread_mutex_destroy(&queue->m_lock);
+
+    return MM_CAMERA_OK;
+}
+
+int mm_qcamera_queue_isempty(mm_camera_queue_t *queue)
+{
+    if ( NULL == queue ) {
+        return 0;
+    }
+
+    int flag = 1;
+    pthread_mutex_lock(&queue->m_lock);
+    if (queue->m_size > 0) {
+        flag = 0;
+    }
+    pthread_mutex_unlock(&queue->m_lock);
+
+    return flag;
+}
+
+int mm_qcamera_queue_enqueue(mm_camera_queue_t *queue, void *data)
+{
+    if ( NULL == queue ) {
+        return -1;
+    }
+
+    camera_q_node *node =
+        (camera_q_node *)malloc(sizeof(camera_q_node));
+    if (NULL == node) {
+        LOGE(" No memory for camera_q_node");
+        return 0;
+    }
+
+    memset(node, 0, sizeof(camera_q_node));
+    node->data = data;
+
+    pthread_mutex_lock(&queue->m_lock);
+    cam_list_add_tail_node(&node->list, &queue->m_head.list);
+    queue->m_size++;
+    pthread_mutex_unlock(&queue->m_lock);
+
+    return 1;
+}
+
+void* mm_qcamera_queue_dequeue(mm_camera_queue_t *queue, int bFromHead)
+{
+    if ( NULL == queue ) {
+        return NULL;
+    }
+
+    camera_q_node* node = NULL;
+    void* data = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    pthread_mutex_lock(&queue->m_lock);
+    head = &queue->m_head.list;
+    if (bFromHead) {
+        pos = head->next;
+    } else {
+        pos = head->prev;
+    }
+    if (pos != head) {
+        node = member_of(pos, camera_q_node, list);
+        cam_list_del_node(&node->list);
+        queue->m_size--;
+    }
+    pthread_mutex_unlock(&queue->m_lock);
+
+    if (NULL != node) {
+        data = node->data;
+        free(node);
+    }
+
+    return data;
+}
+
+void mm_qcamera_queue_flush(mm_camera_queue_t *queue)
+{
+    camera_q_node* node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    if ( NULL == queue ) {
+        return;
+    }
+
+    pthread_mutex_lock(&queue->m_lock);
+    head = &queue->m_head.list;
+    pos = head->next;
+
+    while(pos != head) {
+        node = member_of(pos, camera_q_node, list);
+        pos = pos->next;
+        cam_list_del_node(&node->list);
+        queue->m_size--;
+
+        if (NULL != node->data) {
+            if (queue->m_dataFn) {
+                queue->m_dataFn(node->data, queue->m_userData);
+            }
+            free(node->data);
+        }
+        free(node);
+
+    }
+    queue->m_size = 0;
+    pthread_mutex_unlock(&queue->m_lock);
+}
+
diff --git a/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_rdi.c b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_rdi.c
new file mode 100644
index 0000000..4c07f6a
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_rdi.c
@@ -0,0 +1,346 @@
+/*
+Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of The Linux Foundation nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+// System dependencies
+#include <fcntl.h>
+
+// Camera dependencies
+#include "mm_qcamera_app.h"
+#include "mm_qcamera_dbg.h"
+
+static uint32_t rdi_len = 0;
+
+static void mm_app_rdi_dump_frame(mm_camera_buf_def_t *frame,
+                                  char *name,
+                                  char *ext,
+                                  uint32_t frame_idx)
+{
+    char file_name[FILENAME_MAX];
+    int file_fd;
+    int i;
+
+    if (frame != NULL) {
+        snprintf(file_name, sizeof(file_name),
+            QCAMERA_DUMP_FRM_LOCATION"%s_%03u.%s", name, frame_idx, ext);
+        file_fd = open(file_name, O_RDWR | O_CREAT, 0777);
+        if (file_fd < 0) {
+            LOGE(" cannot open file %s \n",  file_name);
+        } else {
+            for (i = 0; i < frame->planes_buf.num_planes; i++) {
+                write(file_fd,
+                      (uint8_t *)frame->buffer + frame->planes_buf.planes[i].data_offset,
+                      rdi_len);
+            }
+
+            close(file_fd);
+            LOGD(" dump rdi frame %s", file_name);
+        }
+    }
+}
+
+static void mm_app_rdi_notify_cb(mm_camera_super_buf_t *bufs,
+                                 void *user_data)
+{
+    char file_name[FILENAME_MAX];
+    mm_camera_buf_def_t *frame = bufs->bufs[0];
+    mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+
+    LOGD(" BEGIN - length=%zu, frame idx = %d stream_id=%d\n",
+          frame->frame_len, frame->frame_idx, frame->stream_id);
+    snprintf(file_name, sizeof(file_name), "RDI_dump_%d", pme->cam->camera_handle);
+    mm_app_rdi_dump_frame(frame, file_name, "raw", frame->frame_idx);
+
+    if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+                                            bufs->ch_id,
+                                            frame)) {
+        LOGE(" Failed in RDI Qbuf\n");
+    }
+    mm_app_cache_ops((mm_camera_app_meminfo_t *)frame->mem_info,
+                     ION_IOC_INV_CACHES);
+
+    LOGD(" END\n");
+}
+
+mm_camera_stream_t * mm_app_add_rdi_stream(mm_camera_test_obj_t *test_obj,
+                                               mm_camera_channel_t *channel,
+                                               mm_camera_buf_notify_t stream_cb,
+                                               void *userdata,
+                                               uint8_t num_bufs,
+                                               uint8_t num_burst)
+{
+    int rc = MM_CAMERA_OK;
+    size_t i;
+    mm_camera_stream_t *stream = NULL;
+    cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+    cam_format_t fmt = CAM_FORMAT_MAX;
+    cam_stream_buf_plane_info_t *buf_planes;
+    cam_stream_size_info_t abc ;
+    memset (&abc , 0, sizeof (cam_stream_size_info_t));
+
+
+
+    LOGE(" raw_dim w:%d height:%d\n",  cam_cap->raw_dim[0].width, cam_cap->raw_dim[0].height);
+    for (i = 0;i < cam_cap->supported_raw_fmt_cnt;i++) {
+        LOGE(" supported_raw_fmts[%zd]=%d\n",
+            i, (int)cam_cap->supported_raw_fmts[i]);
+        if (((CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG <= cam_cap->supported_raw_fmts[i]) &&
+            (CAM_FORMAT_BAYER_MIPI_RAW_12BPP_BGGR >= cam_cap->supported_raw_fmts[i])) ||
+            (cam_cap->supported_raw_fmts[i] == CAM_FORMAT_META_RAW_8BIT) ||
+            (cam_cap->supported_raw_fmts[i] == CAM_FORMAT_JPEG_RAW_8BIT) ||
+            (cam_cap->supported_raw_fmts[i] == CAM_FORMAT_BAYER_MIPI_RAW_14BPP_BGGR))
+        {
+            fmt = cam_cap->supported_raw_fmts[i];
+            LOGE(" fmt=%d\n",  fmt);
+        }
+    }
+
+    if (CAM_FORMAT_MAX == fmt) {
+        LOGE(" rdi format not supported\n");
+        return NULL;
+    }
+
+    abc.num_streams = 1;
+    abc.postprocess_mask[0] = 0;
+    abc.stream_sizes[0].width = cam_cap->raw_dim[0].width;
+    abc.stream_sizes[0].height = cam_cap->raw_dim[0].height;
+    abc.type[0] = CAM_STREAM_TYPE_RAW;
+    abc.buffer_info.min_buffers = num_bufs;
+    abc.buffer_info.max_buffers = num_bufs;
+    abc.is_type[0] = IS_TYPE_NONE;
+
+    rc = setmetainfoCommand(test_obj, &abc);
+    if (rc != MM_CAMERA_OK) {
+       LOGE(" meta info command failed\n");
+    }
+
+    stream = mm_app_add_stream(test_obj, channel);
+    if (NULL == stream) {
+        LOGE(" add stream failed\n");
+        return NULL;
+    }
+
+    stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+    stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+    stream->s_config.mem_vtbl.clean_invalidate_buf =
+      mm_app_stream_clean_invalidate_buf;
+    stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+    stream->s_config.mem_vtbl.user_data = (void *)stream;
+    stream->s_config.stream_cb = stream_cb;
+    stream->s_config.stream_cb_sync = NULL;
+    stream->s_config.userdata = userdata;
+    stream->num_of_bufs = num_bufs;
+
+    stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+    memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+    stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_RAW;
+    if (num_burst == 0) {
+        stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+    } else {
+        stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_BURST;
+        stream->s_config.stream_info->num_of_burst = num_burst;
+    }
+    stream->s_config.stream_info->fmt = DEFAULT_RAW_FORMAT;
+    LOGD(" RAW: w: %d, h: %d ",
+       cam_cap->raw_dim[0].width, cam_cap->raw_dim[0].height);
+
+    stream->s_config.stream_info->dim.width = cam_cap->raw_dim[0].width;
+    stream->s_config.stream_info->dim.height = cam_cap->raw_dim[0].height;
+    stream->s_config.padding_info = cam_cap->padding_info;
+
+    rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("config rdi stream err=%d\n",  rc);
+        return NULL;
+    }
+
+    buf_planes = &stream->s_config.stream_info->buf_planes;
+    rdi_len = buf_planes->plane_info.mp[0].len;
+    LOGD(" plane_info %dx%d len:%d frame_len:%d\n",
+        buf_planes->plane_info.mp[0].stride, buf_planes->plane_info.mp[0].scanline,
+        buf_planes->plane_info.mp[0].len, buf_planes->plane_info.frame_len);
+
+    return stream;
+}
+
+mm_camera_stream_t * mm_app_add_rdi_snapshot_stream(mm_camera_test_obj_t *test_obj,
+                                                mm_camera_channel_t *channel,
+                                                mm_camera_buf_notify_t stream_cb,
+                                                void *userdata,
+                                                uint8_t num_bufs,
+                                                uint8_t num_burst)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_stream_t *stream = NULL;
+    cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+
+    stream = mm_app_add_stream(test_obj, channel);
+    if (NULL == stream) {
+        LOGE(" add stream failed\n");
+        return NULL;
+    }
+
+    stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+    stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+    stream->s_config.mem_vtbl.clean_invalidate_buf =
+      mm_app_stream_clean_invalidate_buf;
+    stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+    stream->s_config.mem_vtbl.user_data = (void *)stream;
+    stream->s_config.stream_cb = stream_cb;
+    stream->s_config.stream_cb_sync = NULL;
+    stream->s_config.userdata = userdata;
+    stream->num_of_bufs = num_bufs;
+
+    stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+    memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+    stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_SNAPSHOT;
+    if (num_burst == 0) {
+        stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+    } else {
+        stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_BURST;
+        stream->s_config.stream_info->num_of_burst = num_burst;
+    }
+    stream->s_config.stream_info->fmt = DEFAULT_SNAPSHOT_FORMAT;
+    stream->s_config.stream_info->dim.width = DEFAULT_SNAPSHOT_WIDTH;
+    stream->s_config.stream_info->dim.height = DEFAULT_SNAPSHOT_HEIGHT;
+    stream->s_config.padding_info = cam_cap->padding_info;
+
+    rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("config rdi stream err=%d\n",  rc);
+        return NULL;
+    }
+
+    return stream;
+}
+
+mm_camera_channel_t * mm_app_add_rdi_channel(mm_camera_test_obj_t *test_obj, uint8_t num_burst)
+{
+    mm_camera_channel_t *channel = NULL;
+    mm_camera_stream_t *stream = NULL;
+
+    channel = mm_app_add_channel(test_obj,
+                                 MM_CHANNEL_TYPE_RDI,
+                                 NULL,
+                                 NULL,
+                                 NULL);
+    if (NULL == channel) {
+        LOGE(" add channel failed");
+        return NULL;
+    }
+
+    stream = mm_app_add_rdi_stream(test_obj,
+                                       channel,
+                                       mm_app_rdi_notify_cb,
+                                       (void *)test_obj,
+                                       RDI_BUF_NUM,
+                                       num_burst);
+    if (NULL == stream) {
+        LOGE(" add stream failed\n");
+        mm_app_del_channel(test_obj, channel);
+        return NULL;
+    }
+
+    LOGD(" channel=%d stream=%d\n",  channel->ch_id, stream->s_id);
+    return channel;
+}
+
+int mm_app_stop_and_del_rdi_channel(mm_camera_test_obj_t *test_obj,
+                                mm_camera_channel_t *channel)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_stream_t *stream = NULL;
+    uint8_t i;
+    cam_stream_size_info_t abc ;
+    memset (&abc , 0, sizeof (cam_stream_size_info_t));
+
+    rc = mm_app_stop_channel(test_obj, channel);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("Stop RDI failed rc=%d\n",  rc);
+    }
+
+    if (channel->num_streams <= MAX_STREAM_NUM_IN_BUNDLE) {
+        for (i = 0; i < channel->num_streams; i++) {
+            stream = &channel->streams[i];
+            rc = mm_app_del_stream(test_obj, channel, stream);
+            if (MM_CAMERA_OK != rc) {
+                LOGE("del stream(%d) failed rc=%d\n",  i, rc);
+            }
+        }
+    } else {
+        LOGE(" num_streams = %d. Should not be more than %d\n",
+             channel->num_streams, MAX_STREAM_NUM_IN_BUNDLE);
+    }
+    rc = setmetainfoCommand(test_obj, &abc);
+    if (rc != MM_CAMERA_OK) {
+       LOGE(" meta info command failed\n");
+    }
+    rc = mm_app_del_channel(test_obj, channel);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("delete channel failed rc=%d\n",  rc);
+    }
+
+    return rc;
+}
+
+int mm_app_start_rdi(mm_camera_test_obj_t *test_obj, uint8_t num_burst)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_channel_t *channel = NULL;
+
+    channel = mm_app_add_rdi_channel(test_obj, num_burst);
+    if (NULL == channel) {
+        LOGE(" add channel failed");
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    rc = mm_app_start_channel(test_obj, channel);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("start rdi failed rc=%d\n",  rc);
+        mm_app_del_channel(test_obj, channel);
+        return rc;
+    }
+
+    return rc;
+}
+
+int mm_app_stop_rdi(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+
+    mm_camera_channel_t *channel =
+        mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_RDI);
+
+    rc = mm_app_stop_and_del_rdi_channel(test_obj, channel);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("Stop RDI failed rc=%d\n",  rc);
+    }
+
+    return rc;
+}
+
diff --git a/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_reprocess.c b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_reprocess.c
new file mode 100644
index 0000000..4ed4c5d
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_reprocess.c
@@ -0,0 +1,349 @@
+/*
+Copyright (c) 2012-2014, 2016, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of The Linux Foundation nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+// Camera dependencies
+#include "mm_qcamera_app.h"
+#include "mm_qcamera_dbg.h"
+
+static void mm_app_reprocess_notify_cb(mm_camera_super_buf_t *bufs,
+                                   void *user_data)
+{
+    mm_camera_buf_def_t *frame = bufs->bufs[0];
+    mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+    mm_camera_channel_t *channel = NULL;
+    mm_camera_stream_t *m_stream = NULL;
+    mm_camera_buf_def_t *m_frame = NULL;
+    mm_camera_super_buf_t *src_frame;
+    int i = 0;
+    int rc = 0;
+
+    LOGE(" BEGIN - length=%zu, frame idx = %d\n",
+          frame->frame_len, frame->frame_idx);
+
+    /* find channel */
+    for (i = 0; i < MM_CHANNEL_TYPE_MAX; i++) {
+        if (pme->channels[i].ch_id == bufs->ch_id) {
+            channel = &pme->channels[i];
+            break;
+        }
+    }
+    if (NULL == channel) {
+        LOGE(" Wrong channel id (%d)",  bufs->ch_id);
+        return;
+    }
+
+    // We have only one stream and buffer
+    // in the reprocess channel.
+    m_stream = &channel->streams[0];
+    m_frame = bufs->bufs[0];
+
+    if ( pme->encodeJpeg ) {
+        pme->jpeg_buf.buf.buffer = (uint8_t *)malloc(m_frame->frame_len);
+        if ( NULL == pme->jpeg_buf.buf.buffer ) {
+            LOGE(" error allocating jpeg output buffer");
+            goto exit;
+        }
+
+        pme->jpeg_buf.buf.frame_len = m_frame->frame_len;
+        /* create a new jpeg encoding session */
+        rc = createEncodingSession(pme, m_stream, m_frame);
+        if (0 != rc) {
+            LOGE(" error creating jpeg session");
+            free(pme->jpeg_buf.buf.buffer);
+            goto exit;
+        }
+
+        /* start jpeg encoding job */
+        LOGE("Encoding reprocessed frame!!");
+        rc = encodeData(pme, bufs, m_stream);
+        pme->encodeJpeg = 0;
+    } else {
+        if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+                                                bufs->ch_id,
+                                                frame)) {
+            LOGE(" Failed in Reprocess Qbuf\n");
+        }
+        mm_app_cache_ops((mm_camera_app_meminfo_t *)frame->mem_info,
+                         ION_IOC_INV_CACHES);
+    }
+
+exit:
+
+// Release source frame
+    src_frame = ( mm_camera_super_buf_t * ) mm_qcamera_queue_dequeue(&pme->pp_frames, 1);
+    if ( NULL != src_frame ) {
+        mm_app_release_ppinput((void *) src_frame, (void *) pme);
+    }
+
+    LOGE(" END\n");
+}
+
+mm_camera_stream_t * mm_app_add_reprocess_stream_from_source(mm_camera_test_obj_t *test_obj,
+                                                             mm_camera_channel_t *channel,
+                                                             mm_camera_stream_t *source,
+                                                             mm_camera_buf_notify_t stream_cb,
+                                                             cam_pp_feature_config_t pp_config,
+                                                             void *userdata,
+                                                             uint8_t num_bufs)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_stream_t *stream = NULL;
+    cam_capability_t *cam_cap = NULL;
+    cam_stream_info_t *source_stream_info;
+
+    if ( ( NULL == test_obj ) ||
+         ( NULL == channel ) ||
+         ( NULL == source ) ) {
+        LOGE(" Invalid input\n");
+        return NULL;
+    }
+
+    cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+
+    stream = mm_app_add_stream(test_obj, channel);
+    if (NULL == stream) {
+        LOGE(" add stream failed\n");
+        return NULL;
+    }
+
+    stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+    stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+    stream->s_config.mem_vtbl.clean_invalidate_buf =
+      mm_app_stream_clean_invalidate_buf;
+    stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+    stream->s_config.mem_vtbl.user_data = (void *)stream;
+    stream->s_config.stream_cb = stream_cb;
+    stream->s_config.stream_cb_sync = NULL;
+    stream->s_config.userdata = userdata;
+    stream->num_of_bufs = num_bufs;
+
+    stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+    source_stream_info = (cam_stream_info_t *) source->s_info_buf.buf.buffer;
+    memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+    stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_OFFLINE_PROC;
+    stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+    stream->s_config.stream_info->fmt = source_stream_info->fmt;
+    stream->s_config.stream_info->dim = source_stream_info->dim;
+    stream->s_config.padding_info = cam_cap->padding_info;
+
+
+    stream->s_config.stream_info->reprocess_config.pp_type = CAM_ONLINE_REPROCESS_TYPE;
+    stream->s_config.stream_info->reprocess_config.online.input_stream_id = source->s_config.stream_info->stream_svr_id;
+    stream->s_config.stream_info->reprocess_config.online.input_stream_type = source->s_config.stream_info->stream_type;
+    stream->s_config.stream_info->reprocess_config.pp_feature_config = pp_config;
+
+    rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("config preview stream err=%d\n",  rc);
+        return NULL;
+    }
+
+    return stream;
+}
+
+mm_camera_channel_t * mm_app_add_reprocess_channel(mm_camera_test_obj_t *test_obj,
+                                                   mm_camera_stream_t *source_stream)
+{
+    mm_camera_channel_t *channel = NULL;
+    mm_camera_stream_t *stream = NULL;
+
+    if ( NULL == source_stream ) {
+        LOGE(" add reprocess stream failed\n");
+        return NULL;
+    }
+
+    channel = mm_app_add_channel(test_obj,
+                                 MM_CHANNEL_TYPE_REPROCESS,
+                                 NULL,
+                                 NULL,
+                                 NULL);
+    if (NULL == channel) {
+        LOGE(" add channel failed");
+        return NULL;
+    }
+
+    // pp feature config
+    cam_pp_feature_config_t pp_config;
+    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
+
+    cam_capability_t *caps = ( cam_capability_t * ) ( test_obj->cap_buf.buf.buffer );
+    if (caps->qcom_supported_feature_mask & CAM_QCOM_FEATURE_SHARPNESS) {
+        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
+        pp_config.sharpness = test_obj->reproc_sharpness;
+    }
+
+    if (test_obj->reproc_wnr.denoise_enable) {
+        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
+        pp_config.denoise2d = test_obj->reproc_wnr;
+    }
+
+    if (test_obj->enable_CAC) {
+        pp_config.feature_mask |= CAM_QCOM_FEATURE_CAC;
+    }
+
+    uint8_t minStreamBufNum = source_stream->num_of_bufs;
+    stream = mm_app_add_reprocess_stream_from_source(test_obj,
+                                     channel,
+                                     source_stream,
+                                     mm_app_reprocess_notify_cb,
+                                     pp_config,
+                                     (void *)test_obj,
+                                     minStreamBufNum);
+    if (NULL == stream) {
+        LOGE(" add reprocess stream failed\n");
+        mm_app_del_channel(test_obj, channel);
+        return NULL;
+    }
+    test_obj->reproc_stream = stream;
+
+    return channel;
+}
+
+int mm_app_start_reprocess(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_channel_t *r_ch = NULL;
+
+    mm_camera_queue_init(&test_obj->pp_frames,
+                         mm_app_release_ppinput,
+                         ( void * ) test_obj);
+
+    r_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_REPROCESS);
+    if (MM_CAMERA_OK != rc) {
+        LOGE(" No initialized reprocess channel d rc=%d\n", rc);
+        return rc;
+    }
+
+    rc = mm_app_start_channel(test_obj, r_ch);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("start reprocess failed rc=%d\n",  rc);
+        mm_app_del_channel(test_obj, r_ch);
+        return rc;
+    }
+
+    return rc;
+}
+
+int mm_app_stop_reprocess(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_channel_t *r_ch = NULL;
+
+    r_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_REPROCESS);
+    if (MM_CAMERA_OK != rc) {
+        LOGE(" No initialized reprocess channel d rc=%d\n", rc);
+        return rc;
+    }
+
+    rc = mm_app_stop_and_del_channel(test_obj, r_ch);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("Stop Preview failed rc=%d\n",  rc);
+    }
+
+    mm_qcamera_queue_release(&test_obj->pp_frames);
+    test_obj->reproc_stream = NULL;
+
+    return rc;
+}
+
+int mm_app_do_reprocess(mm_camera_test_obj_t *test_obj,
+                        mm_camera_buf_def_t *frame,
+                        uint32_t meta_idx,
+                        mm_camera_super_buf_t *super_buf,
+                        mm_camera_stream_t *src_meta)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_channel_t *r_ch = NULL;
+    mm_camera_super_buf_t *src_buf = NULL;
+
+    if ( ( NULL == test_obj ) ||
+         ( NULL == frame ) ||
+         ( NULL == super_buf )) {
+        LOGE(" Invalid input rc=%d\n", rc);
+        return rc;
+    }
+
+    if ( NULL == test_obj->reproc_stream ) {
+        LOGE(" No reprocess stream rc=%d\n", rc);
+        return rc;
+    }
+
+    r_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_REPROCESS);
+    if (MM_CAMERA_OK != rc) {
+        LOGE(" No reprocess channel rc=%d\n", rc);
+        return rc;
+    }
+
+    src_buf = ( mm_camera_super_buf_t * ) malloc(sizeof(mm_camera_super_buf_t));
+    if ( NULL == src_buf ) {
+        LOGE(" No resources for src frame rc=%d\n", rc);
+        return -1;
+    }
+    memcpy(src_buf, super_buf, sizeof(mm_camera_super_buf_t));
+    mm_qcamera_queue_enqueue(&test_obj->pp_frames, src_buf);
+
+    cam_stream_parm_buffer_t param;
+    memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+    param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
+    param.reprocess.buf_index = frame->buf_idx;
+    param.reprocess.frame_idx = frame->frame_idx;
+    if (src_meta != NULL) {
+        param.reprocess.meta_present = 1;
+        param.reprocess.meta_stream_handle = src_meta->s_config.stream_info->stream_svr_id;
+        param.reprocess.meta_buf_index = meta_idx;
+    } else {
+        LOGE(" No metadata source stream rc=%d\n", rc);
+    }
+
+    test_obj->reproc_stream->s_config.stream_info->parm_buf = param;
+    rc = test_obj->cam->ops->set_stream_parms(test_obj->cam->camera_handle,
+                                              r_ch->ch_id,
+                                              test_obj->reproc_stream->s_id,
+                                              &test_obj->reproc_stream->s_config.stream_info->parm_buf);
+
+    return rc;
+}
+
+void mm_app_release_ppinput(void *data, void *user_data)
+{
+    uint32_t i = 0;
+    mm_camera_super_buf_t *recvd_frame  = ( mm_camera_super_buf_t * ) data;
+    mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+
+    for ( i = 0 ; i < recvd_frame->num_bufs ; i++) {
+        if (MM_CAMERA_OK != pme->cam->ops->qbuf(pme->cam->camera_handle,
+                                                recvd_frame->ch_id,
+                                                recvd_frame->bufs[i])) {
+            LOGE(" Failed in Qbuf\n");
+        }
+        mm_app_cache_ops((mm_camera_app_meminfo_t *) recvd_frame->bufs[i]->mem_info,
+                         ION_IOC_INV_CACHES);
+    }
+}
+
diff --git a/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_snapshot.c b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_snapshot.c
new file mode 100644
index 0000000..b56e6b4
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_snapshot.c
@@ -0,0 +1,711 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// Camera dependencies
+#include "mm_qcamera_app.h"
+#include "mm_qcamera_dbg.h"
+
+/* This callback is received once the complete JPEG encoding is done */
+static void jpeg_encode_cb(jpeg_job_status_t status,
+                           uint32_t client_hdl,
+                           uint32_t jobId,
+                           mm_jpeg_output_t *p_buf,
+                           void *userData)
+{
+    uint32_t i = 0;
+    mm_camera_test_obj_t *pme = NULL;
+    LOGD(" BEGIN\n");
+
+    pme = (mm_camera_test_obj_t *)userData;
+    if (pme->jpeg_hdl != client_hdl ||
+        jobId != pme->current_job_id ||
+        !pme->current_job_frames) {
+        LOGE(" NULL current job frames or not matching job ID (%d, %d)",
+                    jobId, pme->current_job_id);
+        return;
+    }
+
+    /* dump jpeg img */
+    LOGE(" job %d, status=%d",  jobId, status);
+    if (status == JPEG_JOB_STATUS_DONE && p_buf != NULL) {
+        mm_app_dump_jpeg_frame(p_buf->buf_vaddr, p_buf->buf_filled_len, "jpeg", "jpg", jobId);
+    }
+
+    /* buf done current encoding frames */
+    pme->current_job_id = 0;
+    for (i = 0; i < pme->current_job_frames->num_bufs; i++) {
+        if (MM_CAMERA_OK != pme->cam->ops->qbuf(pme->current_job_frames->camera_handle,
+                                                pme->current_job_frames->ch_id,
+                                                pme->current_job_frames->bufs[i])) {
+            LOGE(" Failed in Qbuf\n");
+        }
+        mm_app_cache_ops((mm_camera_app_meminfo_t *) pme->current_job_frames->bufs[i]->mem_info,
+                         ION_IOC_INV_CACHES);
+    }
+
+    free(pme->jpeg_buf.buf.buffer);
+    free(pme->current_job_frames);
+    pme->current_job_frames = NULL;
+
+    /* signal snapshot is done */
+    mm_camera_app_done();
+}
+
+int encodeData(mm_camera_test_obj_t *test_obj, mm_camera_super_buf_t* recvd_frame,
+               mm_camera_stream_t *m_stream)
+{
+    cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+
+    int rc = -MM_CAMERA_E_GENERAL;
+    mm_jpeg_job_t job;
+
+    /* remember current frames being encoded */
+    test_obj->current_job_frames =
+        (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+    if (!test_obj->current_job_frames) {
+        LOGE(" No memory for current_job_frames");
+        return rc;
+    }
+    *(test_obj->current_job_frames) = *recvd_frame;
+
+    memset(&job, 0, sizeof(job));
+    job.job_type = JPEG_JOB_TYPE_ENCODE;
+    job.encode_job.session_id = test_obj->current_jpeg_sess_id;
+
+    // TODO: Rotation should be set according to
+    //       sensor&device orientation
+    job.encode_job.rotation = 0;
+    if (cam_cap->position == CAM_POSITION_BACK) {
+        job.encode_job.rotation = 270;
+    }
+
+    /* fill in main src img encode param */
+    job.encode_job.main_dim.src_dim = m_stream->s_config.stream_info->dim;
+    job.encode_job.main_dim.dst_dim = m_stream->s_config.stream_info->dim;
+    job.encode_job.src_index = 0;
+
+    job.encode_job.thumb_dim.src_dim = m_stream->s_config.stream_info->dim;
+    job.encode_job.thumb_dim.dst_dim.width = DEFAULT_PREVIEW_WIDTH;
+    job.encode_job.thumb_dim.dst_dim.height = DEFAULT_PREVIEW_HEIGHT;
+
+    /* fill in sink img param */
+    job.encode_job.dst_index = 0;
+
+    if (test_obj->metadata != NULL) {
+        job.encode_job.p_metadata = test_obj->metadata;
+    } else {
+        LOGE(" Metadata null, not set for jpeg encoding");
+    }
+
+    rc = test_obj->jpeg_ops.start_job(&job, &test_obj->current_job_id);
+    if ( 0 != rc ) {
+        free(test_obj->current_job_frames);
+        test_obj->current_job_frames = NULL;
+    }
+
+    return rc;
+}
+
+int createEncodingSession(mm_camera_test_obj_t *test_obj,
+                          mm_camera_stream_t *m_stream,
+                          mm_camera_buf_def_t *m_frame)
+{
+    mm_jpeg_encode_params_t encode_param;
+
+    memset(&encode_param, 0, sizeof(mm_jpeg_encode_params_t));
+    encode_param.jpeg_cb = jpeg_encode_cb;
+    encode_param.userdata = (void*)test_obj;
+    encode_param.encode_thumbnail = 0;
+    encode_param.quality = 85;
+    encode_param.color_format = MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+    encode_param.thumb_color_format = MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+
+    /* fill in main src img encode param */
+    encode_param.num_src_bufs = 1;
+    encode_param.src_main_buf[0].index = 0;
+    encode_param.src_main_buf[0].buf_size = m_frame->frame_len;
+    encode_param.src_main_buf[0].buf_vaddr = (uint8_t *)m_frame->buffer;
+    encode_param.src_main_buf[0].fd = m_frame->fd;
+    encode_param.src_main_buf[0].format = MM_JPEG_FMT_YUV;
+    encode_param.src_main_buf[0].offset = m_stream->offset;
+
+    /* fill in sink img param */
+    encode_param.num_dst_bufs = 1;
+    encode_param.dest_buf[0].index = 0;
+    encode_param.dest_buf[0].buf_size = test_obj->jpeg_buf.buf.frame_len;
+    encode_param.dest_buf[0].buf_vaddr = (uint8_t *)test_obj->jpeg_buf.buf.buffer;
+    encode_param.dest_buf[0].fd = test_obj->jpeg_buf.buf.fd;
+    encode_param.dest_buf[0].format = MM_JPEG_FMT_YUV;
+
+    /* main dimension */
+    encode_param.main_dim.src_dim = m_stream->s_config.stream_info->dim;
+    encode_param.main_dim.dst_dim = m_stream->s_config.stream_info->dim;
+
+    return test_obj->jpeg_ops.create_session(test_obj->jpeg_hdl,
+                                             &encode_param,
+                                             &test_obj->current_jpeg_sess_id);
+}
+
+/** mm_app_snapshot_metadata_notify_cb
+ *  @bufs: Pointer to super buffer
+ *  @user_data: Pointer to user data
+ *
+ *
+ **/
+__unused
+static void mm_app_snapshot_metadata_notify_cb(mm_camera_super_buf_t *bufs,
+  void *user_data)
+{
+  uint32_t i = 0;
+  mm_camera_channel_t *channel = NULL;
+  mm_camera_stream_t *p_stream = NULL;
+  mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+  mm_camera_buf_def_t *frame;
+  metadata_buffer_t *pMetadata;
+
+  if (NULL == bufs || NULL == user_data) {
+    LOGE(" bufs or user_data are not valid ");
+    return;
+  }
+  frame = bufs->bufs[0];
+
+  /* find channel */
+  for (i = 0; i < MM_CHANNEL_TYPE_MAX; i++) {
+    if (pme->channels[i].ch_id == bufs->ch_id) {
+      channel = &pme->channels[i];
+      break;
+    }
+  }
+
+  if (NULL == channel) {
+    LOGE(" Channel object is null");
+    return;
+  }
+
+  /* find meta stream */
+  for (i = 0; i < channel->num_streams; i++) {
+    if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_METADATA) {
+      p_stream = &channel->streams[i];
+      break;
+    }
+  }
+
+  if (NULL == p_stream) {
+    LOGE(" cannot find metadata stream");
+    return;
+  }
+
+  /* find meta frame */
+  for (i = 0; i < bufs->num_bufs; i++) {
+    if (bufs->bufs[i]->stream_id == p_stream->s_id) {
+      frame = bufs->bufs[i];
+      break;
+    }
+  }
+
+  if (!pme->metadata) {
+    /* The app will free the metadata, we don't need to bother here */
+    pme->metadata = malloc(sizeof(metadata_buffer_t));
+    if (NULL == pme->metadata) {
+        LOGE(" malloc failed");
+        return;
+    }
+  }
+
+  memcpy(pme->metadata , frame->buffer, sizeof(metadata_buffer_t));
+
+  pMetadata = (metadata_buffer_t *)frame->buffer;
+
+  IF_META_AVAILABLE(cam_auto_focus_data_t, focus_data,
+        CAM_INTF_META_AUTOFOCUS_DATA, pMetadata) {
+    if (focus_data->focus_state == CAM_AF_STATE_FOCUSED_LOCKED) {
+      LOGE(" AutoFocus Done Call Back Received\n");
+      mm_camera_app_done();
+    } else if (focus_data->focus_state == CAM_AF_STATE_NOT_FOCUSED_LOCKED) {
+      LOGE(" AutoFocus failed\n");
+      mm_camera_app_done();
+    }
+  }
+
+  if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+                                          bufs->ch_id,
+                                          frame)) {
+    LOGE(" Failed in Preview Qbuf\n");
+  }
+  mm_app_cache_ops((mm_camera_app_meminfo_t *)frame->mem_info,
+                   ION_IOC_INV_CACHES);
+}
+
+static void mm_app_snapshot_notify_cb_raw(mm_camera_super_buf_t *bufs,
+                                          void *user_data)
+{
+
+    int rc;
+    uint32_t i = 0;
+    mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+    mm_camera_channel_t *channel = NULL;
+    mm_camera_stream_t *m_stream = NULL;
+    mm_camera_buf_def_t *m_frame = NULL;
+
+    LOGD(" BEGIN\n");
+
+    /* find channel */
+    for (i = 0; i < MM_CHANNEL_TYPE_MAX; i++) {
+        if (pme->channels[i].ch_id == bufs->ch_id) {
+            channel = &pme->channels[i];
+            break;
+        }
+    }
+    if (NULL == channel) {
+        LOGE(" Wrong channel id (%d)",  bufs->ch_id);
+        rc = -1;
+        goto EXIT;
+    }
+
+    /* find snapshot stream */
+    for (i = 0; i < channel->num_streams; i++) {
+        if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_RAW) {
+            m_stream = &channel->streams[i];
+            break;
+        }
+    }
+    if (NULL == m_stream) {
+        LOGE(" cannot find snapshot stream");
+        rc = -1;
+        goto EXIT;
+    }
+
+    /* find snapshot frame */
+    for (i = 0; i < bufs->num_bufs; i++) {
+        if (bufs->bufs[i]->stream_id == m_stream->s_id) {
+            m_frame = bufs->bufs[i];
+            break;
+        }
+    }
+    if (NULL == m_frame) {
+        LOGE(" main frame is NULL");
+        rc = -1;
+        goto EXIT;
+    }
+
+    mm_app_dump_frame(m_frame, "main", "raw", m_frame->frame_idx);
+
+EXIT:
+    for (i=0; i<bufs->num_bufs; i++) {
+        if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+                                                bufs->ch_id,
+                                                bufs->bufs[i])) {
+            LOGE(" Failed in Qbuf\n");
+        }
+    }
+
+    mm_camera_app_done();
+
+    LOGD(" END\n");
+}
+
+static void mm_app_snapshot_notify_cb(mm_camera_super_buf_t *bufs,
+                                      void *user_data)
+{
+
+    int rc = 0;
+    uint32_t i = 0;
+    mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+    mm_camera_channel_t *channel = NULL;
+    mm_camera_stream_t *p_stream = NULL;
+    mm_camera_stream_t *m_stream = NULL;
+    mm_camera_buf_def_t *p_frame = NULL;
+    mm_camera_buf_def_t *m_frame = NULL;
+
+    /* find channel */
+    for (i = 0; i < MM_CHANNEL_TYPE_MAX; i++) {
+        if (pme->channels[i].ch_id == bufs->ch_id) {
+            channel = &pme->channels[i];
+            break;
+        }
+    }
+    if (NULL == channel) {
+        LOGE(" Wrong channel id (%d)",  bufs->ch_id);
+        rc = -1;
+        goto error;
+    }
+
+    /* find snapshot stream */
+    for (i = 0; i < channel->num_streams; i++) {
+        if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_SNAPSHOT) {
+            m_stream = &channel->streams[i];
+            break;
+        }
+    }
+    if (NULL == m_stream) {
+        LOGE(" cannot find snapshot stream");
+        rc = -1;
+        goto error;
+    }
+
+    /* find snapshot frame */
+    for (i = 0; i < bufs->num_bufs; i++) {
+        if (bufs->bufs[i]->stream_id == m_stream->s_id) {
+            m_frame = bufs->bufs[i];
+            break;
+        }
+    }
+    if (NULL == m_frame) {
+        LOGE(" main frame is NULL");
+        rc = -1;
+        goto error;
+    }
+
+    mm_app_dump_frame(m_frame, "main", "yuv", m_frame->frame_idx);
+
+    /* find postview stream */
+    for (i = 0; i < channel->num_streams; i++) {
+        if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_POSTVIEW) {
+            p_stream = &channel->streams[i];
+            break;
+        }
+    }
+    if (NULL != p_stream) {
+        /* find preview frame */
+        for (i = 0; i < bufs->num_bufs; i++) {
+            if (bufs->bufs[i]->stream_id == p_stream->s_id) {
+                p_frame = bufs->bufs[i];
+                break;
+            }
+        }
+        if (NULL != p_frame) {
+            mm_app_dump_frame(p_frame, "postview", "yuv", p_frame->frame_idx);
+        }
+    }
+
+    mm_app_cache_ops((mm_camera_app_meminfo_t *)m_frame->mem_info,
+                     ION_IOC_CLEAN_INV_CACHES);
+
+    pme->jpeg_buf.buf.buffer = (uint8_t *)malloc(m_frame->frame_len);
+    if ( NULL == pme->jpeg_buf.buf.buffer ) {
+        LOGE(" error allocating jpeg output buffer");
+        goto error;
+    }
+
+    pme->jpeg_buf.buf.frame_len = m_frame->frame_len;
+    /* create a new jpeg encoding session */
+    rc = createEncodingSession(pme, m_stream, m_frame);
+    if (0 != rc) {
+        LOGE(" error creating jpeg session");
+        free(pme->jpeg_buf.buf.buffer);
+        goto error;
+    }
+
+    /* start jpeg encoding job */
+    rc = encodeData(pme, bufs, m_stream);
+    if (0 != rc) {
+        LOGE(" error creating jpeg session");
+        free(pme->jpeg_buf.buf.buffer);
+        goto error;
+    }
+
+error:
+    /* buf done rcvd frames in error case */
+    if ( 0 != rc ) {
+        for (i=0; i<bufs->num_bufs; i++) {
+            if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+                                                    bufs->ch_id,
+                                                    bufs->bufs[i])) {
+                LOGE(" Failed in Qbuf\n");
+            }
+            mm_app_cache_ops((mm_camera_app_meminfo_t *)bufs->bufs[i]->mem_info,
+                             ION_IOC_INV_CACHES);
+        }
+    }
+
+    LOGD(" END\n");
+}
+
+mm_camera_channel_t * mm_app_add_snapshot_channel(mm_camera_test_obj_t *test_obj)
+{
+    mm_camera_channel_t *channel = NULL;
+    mm_camera_stream_t *stream = NULL;
+
+    channel = mm_app_add_channel(test_obj,
+                                 MM_CHANNEL_TYPE_SNAPSHOT,
+                                 NULL,
+                                 NULL,
+                                 NULL);
+    if (NULL == channel) {
+        LOGE(" add channel failed");
+        return NULL;
+    }
+
+    stream = mm_app_add_snapshot_stream(test_obj,
+                                        channel,
+                                        mm_app_snapshot_notify_cb,
+                                        (void *)test_obj,
+                                        1,
+                                        1);
+    if (NULL == stream) {
+        LOGE(" add snapshot stream failed\n");
+        mm_app_del_channel(test_obj, channel);
+        return NULL;
+    }
+
+    return channel;
+}
+
+mm_camera_stream_t * mm_app_add_postview_stream(mm_camera_test_obj_t *test_obj,
+                                                mm_camera_channel_t *channel,
+                                                mm_camera_buf_notify_t stream_cb,
+                                                void *userdata,
+                                                uint8_t num_bufs,
+                                                uint8_t num_burst)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_stream_t *stream = NULL;
+    cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+
+    stream = mm_app_add_stream(test_obj, channel);
+    if (NULL == stream) {
+        LOGE(" add stream failed\n");
+        return NULL;
+    }
+
+    stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+    stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+    stream->s_config.mem_vtbl.clean_invalidate_buf =
+      mm_app_stream_clean_invalidate_buf;
+    stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+    stream->s_config.mem_vtbl.user_data = (void *)stream;
+    stream->s_config.stream_cb = stream_cb;
+    stream->s_config.stream_cb_sync = NULL;
+    stream->s_config.userdata = userdata;
+    stream->num_of_bufs = num_bufs;
+
+    stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+    memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+    stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_POSTVIEW;
+    if (num_burst == 0) {
+        stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+    } else {
+        stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_BURST;
+        stream->s_config.stream_info->num_of_burst = num_burst;
+    }
+    stream->s_config.stream_info->fmt = DEFAULT_PREVIEW_FORMAT;
+    stream->s_config.stream_info->dim.width = DEFAULT_PREVIEW_WIDTH;
+    stream->s_config.stream_info->dim.height = DEFAULT_PREVIEW_HEIGHT;
+    stream->s_config.padding_info = cam_cap->padding_info;
+
+    rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("config postview stream err=%d\n",  rc);
+        return NULL;
+    }
+
+    return stream;
+}
+
+int mm_app_start_capture_raw(mm_camera_test_obj_t *test_obj, uint8_t num_snapshots)
+{
+    int32_t rc = MM_CAMERA_OK;
+    mm_camera_channel_t *channel = NULL;
+    mm_camera_stream_t *s_main = NULL;
+    mm_camera_channel_attr_t attr;
+
+    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
+    attr.max_unmatched_frames = 3;
+    channel = mm_app_add_channel(test_obj,
+                                 MM_CHANNEL_TYPE_CAPTURE,
+                                 &attr,
+                                 mm_app_snapshot_notify_cb_raw,
+                                 test_obj);
+    if (NULL == channel) {
+        LOGE(" add channel failed");
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    test_obj->buffer_format = DEFAULT_RAW_FORMAT;
+    s_main = mm_app_add_raw_stream(test_obj,
+                                   channel,
+                                   mm_app_snapshot_notify_cb_raw,
+                                   test_obj,
+                                   num_snapshots,
+                                   num_snapshots);
+    if (NULL == s_main) {
+        LOGE(" add main snapshot stream failed\n");
+        mm_app_del_channel(test_obj, channel);
+        return rc;
+    }
+
+    rc = mm_app_start_channel(test_obj, channel);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("start zsl failed rc=%d\n",  rc);
+        mm_app_del_stream(test_obj, channel, s_main);
+        mm_app_del_channel(test_obj, channel);
+        return rc;
+    }
+
+    return rc;
+}
+
+int mm_app_stop_capture_raw(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_channel_t *ch = NULL;
+    int i;
+    cam_stream_size_info_t abc ;
+    memset (&abc , 0, sizeof (cam_stream_size_info_t));
+
+    ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_CAPTURE);
+
+    rc = mm_app_stop_channel(test_obj, ch);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("stop recording failed rc=%d\n",  rc);
+    }
+
+    for ( i = 0 ; i < ch->num_streams ; i++ ) {
+        mm_app_del_stream(test_obj, ch, &ch->streams[i]);
+    }
+    rc = setmetainfoCommand(test_obj, &abc);
+    if (rc != MM_CAMERA_OK) {
+       LOGE(" meta info command failed\n");
+    }
+    mm_app_del_channel(test_obj, ch);
+
+    return rc;
+}
+
+int mm_app_start_capture(mm_camera_test_obj_t *test_obj,
+                         uint8_t num_snapshots)
+{
+    int32_t rc = MM_CAMERA_OK;
+    mm_camera_channel_t *channel = NULL;
+    mm_camera_stream_t *s_main = NULL;
+    mm_camera_stream_t *s_post = NULL;
+    mm_camera_channel_attr_t attr;
+    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
+    attr.max_unmatched_frames = 3;
+    channel = mm_app_add_channel(test_obj,
+                                 MM_CHANNEL_TYPE_CAPTURE,
+                                 &attr,
+                                 mm_app_snapshot_notify_cb,
+                                 test_obj);
+    if (NULL == channel) {
+        LOGE(" add channel failed");
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    s_main = mm_app_add_snapshot_stream(test_obj,
+                                        channel,
+                                        mm_app_snapshot_notify_cb,
+                                        (void *)test_obj,
+                                        CAPTURE_BUF_NUM,
+                                        num_snapshots);
+    if (NULL == s_main) {
+        LOGE(" add main snapshot stream failed\n");
+        mm_app_del_channel(test_obj, channel);
+        return rc;
+    }
+
+    s_post = mm_app_add_postview_stream(test_obj,
+                                        channel,
+                                        NULL,
+                                        NULL,
+                                        CAPTURE_BUF_NUM,
+                                        num_snapshots);
+    if (NULL == s_main) {
+        LOGE(" add main postview stream failed\n");
+        mm_app_del_channel(test_obj, channel);
+        return rc;
+    }
+
+    rc = mm_app_start_channel(test_obj, channel);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("start zsl failed rc=%d\n",  rc);
+        mm_app_del_stream(test_obj, channel, s_main);
+        mm_app_del_channel(test_obj, channel);
+        return rc;
+    }
+
+    return rc;
+}
+
+int mm_app_stop_capture(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_channel_t *ch = NULL;
+
+    ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_CAPTURE);
+
+    rc = mm_app_stop_and_del_channel(test_obj, ch);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("stop capture channel failed rc=%d\n",  rc);
+    }
+
+    return rc;
+}
+
+int mm_app_take_picture(mm_camera_test_obj_t *test_obj, uint8_t is_burst_mode)
+{
+    LOGH("\nEnter %s!!\n");
+    int rc = MM_CAMERA_OK;
+    uint8_t num_snapshot = 1;
+    int num_rcvd_snapshot = 0;
+
+    if (is_burst_mode)
+       num_snapshot = 6;
+
+    //stop preview before starting capture.
+    rc = mm_app_stop_preview(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE(" stop preview failed before capture!!, err=%d\n", rc);
+        return rc;
+    }
+
+    rc = mm_app_start_capture(test_obj, num_snapshot);
+    if (rc != MM_CAMERA_OK) {
+        LOGE(" mm_app_start_capture(), err=%d\n", rc);
+        return rc;
+    }
+    while (num_rcvd_snapshot < num_snapshot) {
+        LOGH("\nWaiting mm_camera_app_wait !!\n");
+        mm_camera_app_wait();
+        num_rcvd_snapshot++;
+    }
+    rc = mm_app_stop_capture(test_obj);
+    if (rc != MM_CAMERA_OK) {
+       LOGE(" mm_app_stop_capture(), err=%d\n", rc);
+       return rc;
+    }
+    //start preview after capture.
+    rc = mm_app_start_preview(test_obj);
+    if (rc != MM_CAMERA_OK) {
+        LOGE(" start preview failed after capture!!, err=%d\n",rc);
+    }
+    return rc;
+}
diff --git a/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_socket.c b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_socket.c
new file mode 100644
index 0000000..81c377f
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_socket.c
@@ -0,0 +1,881 @@
+/* Copyright (c) 2012-2014, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <errno.h>
+#include <sys/socket.h>
+#include <fcntl.h>
+
+// Camera dependencies
+#include "mm_qcamera_socket.h"
+#include "mm_qcamera_commands.h"
+#include "mm_qcamera_dbg.h"
+
+#define IP_ADDR                  "127.0.0.1"
+#define TUNING_CHROMATIX_PORT     55555
+#define TUNING_PREVIEW_PORT       55556
+
+#define CURRENT_COMMAND_ACK_SUCCESS 1
+#define CURRENT_COMMAND_ACK_FAILURE 2
+
+pthread_t eztune_thread_id;
+
+static ssize_t tuneserver_send_command_rsp(tuningserver_t *tsctrl,
+  char *send_buf, uint32_t send_len)
+{
+  ssize_t rc;
+
+  /* send ack back to client upon req */
+  if (send_len <= 0) {
+    LOGE("Invalid send len \n");
+    return -1;
+  }
+  if (send_buf == NULL) {
+    LOGE("Invalid send buf \n");
+    return -1;
+  }
+
+  rc = send(tsctrl->clientsocket_id, send_buf, send_len, 0);
+  if (rc < 0) {
+    LOGE("RSP send returns error %s\n",  strerror(errno));
+  } else {
+    rc = 0;
+  }
+
+  if (send_buf != NULL) {
+    free(send_buf);
+    send_buf = NULL;
+  }
+  return rc;
+}
+
+static void release_eztune_prevcmd_rsp(eztune_prevcmd_rsp *pHead)
+{
+  if (pHead != NULL ) {
+    release_eztune_prevcmd_rsp((eztune_prevcmd_rsp *)pHead->next);
+    free(pHead);
+  }
+}
+
+static ssize_t tuneserver_ack(uint16_t a, uint32_t b, tuningserver_t *tsctrl)
+{
+  ssize_t rc;
+  char ack_1[6];
+  /*Ack the command here*/
+  memcpy(ack_1, &a, 2);
+  memcpy(ack_1+2, &b, 4);
+  /* send echo back to client upon accept */
+  rc = send(tsctrl->clientsocket_id, &ack_1, sizeof(ack_1), 0);
+  if (rc < 0) {
+    LOGE(" eztune_server_run: send returns error %s\n",
+      strerror(errno));
+    return rc;
+  } else if (rc < (int32_t)sizeof(ack_1)) {
+    /*Shouldn't hit this for packets <1K; need to re-send if we do*/
+  }
+  return 0;
+}
+
+static ssize_t tuneserver_send_command_ack( uint8_t ack,
+    tuningserver_t *tsctrl)
+{
+  ssize_t rc;
+  /* send ack back to client upon req */
+  rc = send(tsctrl->clientsocket_id, &ack, sizeof(ack), 0);
+  if (rc < 0) {
+    LOGE("ACK send returns error %s\n",  strerror(errno));
+    return rc;
+  }
+  return 0;
+}
+
+/** tuneserver_process_command
+ *    @tsctrl: the server control object
+ *
+ *  Processes the command that the client sent
+ *
+ *  Return: >=0 on success, -1 on failure.
+ **/
+static int32_t tuneserver_process_command(tuningserver_t *tsctrl,
+  char *send_buf, uint32_t send_len)
+{
+  tuneserver_protocol_t *p = tsctrl->proto;
+  int result = 0;
+
+  LOGD(" Current command is %d\n",  p->current_cmd);
+  switch (p->current_cmd) {
+  case TUNESERVER_GET_LIST:
+    if(tuneserver_send_command_ack(CURRENT_COMMAND_ACK_SUCCESS, tsctrl)) {
+      LOGE(" Ack Failed for cmd %d\n",  p->current_cmd);
+      return -1;
+    }
+    result = tuneserver_process_get_list_cmd(tsctrl, p->recv_buf,
+      send_buf, send_len);
+    if (result < 0) {
+      LOGE(" RSP processing Failed for cmd %d\n",  p->current_cmd);
+      return -1;
+    }
+    if(tuneserver_send_command_rsp(tsctrl, send_buf, send_len)) {
+      LOGE(" RSP Failed for cmd %d\n",  p->current_cmd);
+      return -1;
+    }
+    break;
+
+  case TUNESERVER_GET_PARMS:
+    if(tuneserver_send_command_ack(CURRENT_COMMAND_ACK_SUCCESS, tsctrl)) {
+      LOGE(" Ack Failed for cmd %d\n",  p->current_cmd);
+      return -1;
+    }
+    result = tuneserver_process_get_params_cmd(tsctrl, p->recv_buf,
+      send_buf, send_len);
+    if (result < 0) {
+      LOGE(" RSP processing Failed for cmd %d\n",  p->current_cmd);
+      return -1;
+    }
+    if(tuneserver_send_command_rsp(tsctrl, send_buf, send_len)) {
+      LOGE(" RSP Failed for cmd %d\n",  p->current_cmd);
+      return -1;
+    }
+    break;
+
+  case TUNESERVER_SET_PARMS:
+    if(tuneserver_send_command_ack(CURRENT_COMMAND_ACK_SUCCESS, tsctrl)) {
+      LOGE(" Ack Failed for cmd %d\n",  p->current_cmd);
+      return -1;
+    }
+    result = tuneserver_process_set_params_cmd(tsctrl, p->recv_buf,
+      send_buf, send_len);
+    if (result < 0) {
+      LOGE(" RSP processing Failed for cmd %d\n",  p->current_cmd);
+      return -1;
+    }
+    if(tuneserver_send_command_rsp(tsctrl, send_buf, send_len)) {
+      LOGE(" RSP Failed for cmd %d\n",  p->current_cmd);
+      return -1;
+    }
+    break;
+
+  case TUNESERVER_MISC_CMDS: {
+    if(tuneserver_send_command_ack(CURRENT_COMMAND_ACK_SUCCESS, tsctrl)) {
+      LOGE(" Ack Failed for cmd %d\n",  p->current_cmd);
+      return -1;
+    }
+    result = tuneserver_process_misc_cmd(tsctrl, p->recv_buf,
+      send_buf, send_len);
+    if (result < 0) {
+      LOGE(" RSP processing Failed for cmd %d\n",  p->current_cmd);
+      return -1;
+    }
+    if(tuneserver_send_command_rsp(tsctrl, send_buf, send_len)) {
+      LOGE(" RSP Failed for cmd %d\n",  p->current_cmd);
+      return -1;
+    }
+    break;
+  }
+
+  default:
+    if(tuneserver_send_command_ack(CURRENT_COMMAND_ACK_SUCCESS, tsctrl)) {
+      LOGE(" Ack Failed for cmd %d\n",  p->current_cmd);
+      return -1;
+    }
+    LOGE(" p->current_cmd: default\n");
+    result = -1;
+    break;
+  }
+
+  return result;
+}
+
+/** tuneserver_process_client_message
+ *    @recv_buffer: received message from the client
+ *    @tsctrl: the server control object
+ *
+ *  Processes the message from client and prepares for next
+ *  message.
+ *
+ *  Return: >=0 on success, -1 on failure.
+ **/
+static int32_t tuneserver_process_client_message(void *recv_buffer,
+  tuningserver_t *tsctrl)
+{
+  int rc = 0;
+  tuneserver_protocol_t *p = tsctrl->proto;
+
+  switch (tsctrl->proto->next_recv_code) {
+  case TUNESERVER_RECV_COMMAND:
+    p->current_cmd = *(uint16_t *)recv_buffer;
+    p->next_recv_code = TUNESERVER_RECV_PAYLOAD_SIZE;
+    p->next_recv_len = sizeof(uint32_t);
+    break;
+
+  case TUNESERVER_RECV_PAYLOAD_SIZE:
+    p->next_recv_code = TUNESERVER_RECV_PAYLOAD;
+    p->next_recv_len = *(uint32_t *)recv_buffer;
+    p->recv_len = p->next_recv_len;
+    if (p->next_recv_len > TUNESERVER_MAX_RECV)
+      return -1;
+    if (p->next_recv_len == 0) {
+      p->next_recv_code = TUNESERVER_RECV_RESPONSE;
+      p->next_recv_len = sizeof(uint32_t);
+    }
+    break;
+
+  case TUNESERVER_RECV_PAYLOAD:
+    p->recv_buf = malloc(p->next_recv_len);
+    if (!p->recv_buf) {
+      LOGE("Error allocating memory for recv_buf %s\n",
+        strerror(errno));
+      return -1;
+    }
+    memcpy(p->recv_buf, recv_buffer, p->next_recv_len);
+    p->next_recv_code = TUNESERVER_RECV_RESPONSE;
+    p->next_recv_len = sizeof(uint32_t);
+    /*Process current command at this point*/
+    break;
+
+  case TUNESERVER_RECV_RESPONSE:
+    p->next_recv_code = TUNESERVER_RECV_COMMAND;
+    p->next_recv_len = 2;
+    p->send_len = *(uint32_t *)recv_buffer;
+    p->send_buf =  (char *)calloc(p->send_len, sizeof(char *));
+    if (!p->send_buf) {
+      LOGE("Error allocating memory for send_buf %s\n",
+        strerror(errno));
+      return -1;
+    }
+    rc = tuneserver_process_command(tsctrl, p->send_buf, p->send_len);
+    free(p->recv_buf);
+    p->recv_buf = NULL;
+    p->recv_len = 0;
+    break;
+
+  default:
+    LOGE(" p->next_recv_code: default\n");
+    rc = -1;
+    break;
+  }
+
+  return rc;
+}
+
+/** tuneserver_ack_onaccept_initprotocol
+ *    @tsctrl: the server control object
+ *
+ *  Acks a connection from the cient and sets up the
+ *  protocol object to start receiving commands.
+ *
+ *  Return: >=0 on success, -1 on failure.
+ **/
+static ssize_t tuneserver_ack_onaccept_initprotocol(tuningserver_t *tsctrl)
+{
+  ssize_t rc = 0;
+  uint32_t ack_status;
+
+  LOGE("starts\n");
+/*
+  if(tsctrl->camera_running) {
+    ack_status = 1;
+  } else {
+    ack_status = 2;
+  }
+*/
+  ack_status = 1;
+
+  rc = tuneserver_ack(1, ack_status, tsctrl);
+
+  tsctrl->proto = malloc(sizeof(tuneserver_protocol_t));
+  if (!tsctrl->proto) {
+    LOGE(" malloc returns NULL with error %s\n",  strerror(errno));
+    return -1;
+  }
+
+  tsctrl->proto->current_cmd    = 0xFFFF;
+  tsctrl->proto->next_recv_code = TUNESERVER_RECV_COMMAND;
+  tsctrl->proto->next_recv_len  = 2;
+  tsctrl->proto->recv_buf       = NULL;
+  tsctrl->proto->send_buf       = NULL;
+
+  LOGD("X\n");
+
+  return rc;
+}
+
+/** tuneserver_check_status
+ *    @tsctrl: the server control object
+ *
+ *  Checks if camera is running and stops it.
+ *
+ *  Return: >=0 on success, -1 on failure.
+ **/
+#if 0
+static void tuneserver_check_status(tuningserver_t *tsctrl)
+{
+  if (tsctrl->camera_running == 1) {
+    /*TODO: Stop camera here*/
+    tuneserver_stop_cam(&tsctrl->lib_handle);
+  }
+  tsctrl->camera_running = 0;
+
+  tuneserver_close_cam(&tsctrl->lib_handle);
+}
+#endif
+
+static ssize_t prevserver_send_command_rsp(tuningserver_t *tsctrl,
+  char *send_buf, uint32_t send_len)
+{
+  ssize_t rc;
+
+  /* send ack back to client upon req */
+  if (send_len <= 0) {
+    LOGE("Invalid send len \n");
+    return -1;
+  }
+  if (send_buf == NULL) {
+    LOGE("Invalid send buf \n");
+    return -1;
+  }
+
+  rc = send(tsctrl->pr_clientsocket_id, send_buf, send_len, 0);
+  if (rc < 0) {
+    LOGE("RSP send returns error %s\n",  strerror(errno));
+  } else {
+    rc = 0;
+  }
+  if (send_buf != NULL) {
+    free(send_buf);
+    send_buf = NULL;
+  }
+  return rc;
+}
+
+static void prevserver_init_protocol(tuningserver_t *tsctrl)
+{
+  tsctrl->pr_proto = malloc(sizeof(prserver_protocol_t));
+  if (!tsctrl->pr_proto) {
+    LOGE(" malloc returns NULL with error %s\n",
+      strerror(errno));
+    return;
+  }
+
+  tsctrl->pr_proto->current_cmd    = 0xFFFF;
+  tsctrl->pr_proto->next_recv_code = TUNE_PREV_RECV_COMMAND;
+  tsctrl->pr_proto->next_recv_len  = 2;
+}
+
+static int32_t prevserver_process_command(
+  tuningserver_t *tsctrl, char **send_buf, uint32_t *send_len)
+{
+  prserver_protocol_t *p = tsctrl->pr_proto;
+  int result = 0;
+  eztune_prevcmd_rsp *rsp_ptr=NULL, *rspn_ptr=NULL, *head_ptr=NULL;
+
+  LOGD(" Current command is %d\n",  p->current_cmd);
+  switch (p->current_cmd) {
+  case TUNE_PREV_GET_INFO:
+    result = tuneserver_preview_getinfo(tsctrl, send_buf, send_len);
+    if (result < 0) {
+      LOGE(" RSP processing Failed for cmd %d\n",
+        p->current_cmd);
+      return -1;
+    }
+    rsp_ptr = (eztune_prevcmd_rsp *)*send_buf;
+    if ((!rsp_ptr) || (!rsp_ptr->send_buf)) {
+      LOGE(" RSP ptr is NULL %d\n",  p->current_cmd);
+      return -1;
+    }
+    if (prevserver_send_command_rsp(tsctrl,
+      rsp_ptr->send_buf, rsp_ptr->send_len)) {
+      LOGE(" RSP Failed for TUNE_PREV_GET_INFO ver cmd %d\n",
+        p->current_cmd);
+      return -1;
+    }
+    rspn_ptr = (eztune_prevcmd_rsp *)rsp_ptr->next;
+    if ((!rspn_ptr) || (!rspn_ptr->send_buf)) {
+      LOGE(" RSP1 ptr is NULL %d\n",  p->current_cmd);
+      return -1;
+    }
+    if (prevserver_send_command_rsp(tsctrl,
+        rspn_ptr->send_buf, rspn_ptr->send_len)) {
+      LOGE(" RSP Failed for TUNE_PREV_GET_INFO caps cmd %d\n",
+        p->current_cmd);
+      return -1;
+    }
+    free(rspn_ptr);
+    free(rsp_ptr);
+    break;
+
+  case TUNE_PREV_CH_CNK_SIZE:
+    result = tuneserver_preview_getchunksize(tsctrl, send_buf, send_len);
+    if (result < 0) {
+      LOGE(" RSP processing Failed for cmd %d\n",  p->current_cmd);
+      return -1;
+    }
+    if (prevserver_send_command_rsp(tsctrl, *send_buf, *send_len)) {
+      LOGE(" RSP Failed for TUNE_PREV_CH_CNK_SIZE cmd %d\n",
+        p->current_cmd);
+      return -1;
+    }
+    break;
+
+  case TUNE_PREV_GET_PREV_FRAME:
+    result = tuneserver_preview_getframe(tsctrl, send_buf, send_len);
+    if (result < 0) {
+      LOGE(" RSP processing Failed for cmd %d\n",  p->current_cmd);
+      return -1;
+    }
+    rsp_ptr = (eztune_prevcmd_rsp *)*send_buf;
+    if ((!rsp_ptr) || (!rsp_ptr->send_buf)) {
+      LOGE(" RSP ptr is NULL %d\n",  p->current_cmd);
+      return -1;
+    }
+    head_ptr = rsp_ptr;
+
+    while (rsp_ptr != NULL) {
+      if ((!rsp_ptr) || (!rsp_ptr->send_buf)) {
+        LOGE(" RSP ptr is NULL %d\n",  p->current_cmd);
+        return -1;
+      }
+      if (prevserver_send_command_rsp(tsctrl,
+        rsp_ptr->send_buf, rsp_ptr->send_len)) {
+        LOGE(" RSP Failed for TUNE_PREV_GET_INFO ver cmd %d\n",
+          p->current_cmd);
+        return -1;
+      }
+      rsp_ptr = (eztune_prevcmd_rsp *)rsp_ptr->next;
+    }
+    release_eztune_prevcmd_rsp(head_ptr);
+    break;
+
+  case TUNE_PREV_GET_JPG_SNAP:
+  case TUNE_PREV_GET_RAW_SNAP:
+  case TUNE_PREV_GET_RAW_PREV:
+    result = tuneserver_preview_unsupported(tsctrl, send_buf, send_len);
+    if (result < 0) {
+       LOGE("RSP processing Failed for cmd %d\n",  p->current_cmd);
+      return -1;
+    }
+    if (prevserver_send_command_rsp(tsctrl, *send_buf, *send_len)) {
+      LOGE("RSP Failed for UNSUPPORTED cmd %d\n",  p->current_cmd);
+      return -1;
+    }
+    break;
+
+  default:
+    LOGE(" p->current_cmd: default\n");
+    result = -1;
+    break;
+  }
+
+  return result;
+}
+
+/** previewserver_process_client_message
+ *    @recv_buffer: received message from the client
+ *    @tsctrl: the server control object
+ *
+ *  Processes the message from client and prepares for next
+ *  message.
+ *
+ *  Return: >=0 on success, -1 on failure.
+ **/
+static int32_t prevserver_process_client_message(void *recv_buffer,
+  tuningserver_t *tsctrl)
+{
+  int rc = 0;
+  prserver_protocol_t *p = tsctrl->pr_proto;
+
+  LOGD("command = %d", p->next_recv_code);
+
+  switch (p->next_recv_code) {
+  case TUNE_PREV_RECV_COMMAND:
+    p->current_cmd = *(uint16_t *)recv_buffer;
+    if(p->current_cmd != TUNE_PREV_CH_CNK_SIZE) {
+      rc = prevserver_process_command(tsctrl,
+        &p->send_buf, (uint32_t *)&p->send_len);
+      break;
+    }
+    p->next_recv_code = TUNE_PREV_RECV_NEWCNKSIZE;
+    p->next_recv_len = sizeof(uint32_t);
+    LOGD("TUNE_PREV_COMMAND X\n");
+    break;
+  case TUNE_PREV_RECV_NEWCNKSIZE:
+    p->new_cnk_size = *(uint32_t *)recv_buffer;
+    p->next_recv_code = TUNE_PREV_RECV_COMMAND;
+    p->next_recv_len  = 2;
+    rc = prevserver_process_command(tsctrl,
+      &p->send_buf, (uint32_t *)&p->send_len);
+    break;
+  default:
+    LOGE("prev_proc->next_recv_code: default\n");
+    rc = -1;
+    break;
+  }
+
+  return rc;
+}
+
+/** tunning_server_socket_listen
+ *    @ip_addr: the ip addr to listen
+ *    @port: the port to listen
+ *
+ *  Setup a listen socket for eztune.
+ *
+ *  Return: >0 on success, <=0 on failure.
+ **/
+int tunning_server_socket_listen(const char* ip_addr, uint16_t port)
+{
+  int sock_fd = -1;
+  mm_qcamera_sock_addr_t server_addr;
+  int result;
+  int option;
+  int socket_flag;
+
+  memset(&server_addr, 0, sizeof(server_addr));
+  server_addr.addr_in.sin_family = AF_INET;
+  server_addr.addr_in.sin_port = (__be16) htons(port);
+  server_addr.addr_in.sin_addr.s_addr = inet_addr(ip_addr);
+
+  if (server_addr.addr_in.sin_addr.s_addr == INADDR_NONE) {
+    LOGE(" invalid address.\n");
+    return -1;
+  }
+
+  /* Create an AF_INET stream socket to receive incoming connection ON */
+  sock_fd = socket(AF_INET, SOCK_STREAM, 0);
+  if (sock_fd < 0) {
+    LOGE(" socket failed\n");
+    return sock_fd;
+  }
+
+  // set listen socket to non-block, but why??
+  socket_flag = fcntl(sock_fd, F_GETFL, 0);
+  fcntl(sock_fd, F_SETFL, socket_flag | O_NONBLOCK);
+
+  /* reuse in case it is in timeout */
+  option = 1;
+  result = setsockopt(sock_fd, SOL_SOCKET, SO_REUSEADDR,
+    &option, sizeof(option));
+
+  if (result < 0) {
+    LOGE("eztune setsockopt failed");
+    close(sock_fd);
+    sock_fd = -1;
+    return sock_fd;
+  }
+
+  result = bind(sock_fd, &server_addr.addr, sizeof(server_addr.addr_in));
+  if (result < 0) {
+    LOGE("eztune socket bind failed");
+    close(sock_fd);
+    sock_fd = -1;
+    return sock_fd;
+  }
+
+  result = listen(sock_fd, 1);
+  if (result < 0) {
+    LOGE("eztune socket listen failed");
+    close(sock_fd);
+    sock_fd = -1;
+    return sock_fd;
+  }
+
+  LOGH("sock_fd: %d, listen at port: %d\n",  sock_fd, port);
+
+  return sock_fd;
+}
+
+/** main
+ *
+ *  Creates the server, and starts waiting for
+ *  connections/messages from a prospective
+ *  client
+ *
+ **/
+void *eztune_proc(void *data)
+{
+  int server_socket = -1, client_socket = -1;
+  int prev_server_socket = -1, prev_client_socket = -1;
+
+  mm_qcamera_sock_addr_t addr_client_inet;
+  socklen_t addr_client_len = sizeof(addr_client_inet.addr_in);
+  int result;
+  fd_set tsfds;
+  int num_fds = 0;
+  ssize_t recv_bytes;
+  char buf[TUNESERVER_MAX_RECV];
+
+  mm_camera_lib_handle *lib_handle = (mm_camera_lib_handle *)data;
+
+  LOGE(">>> Starting tune server <<< \n");
+
+  // for eztune chromatix params
+  server_socket = tunning_server_socket_listen(IP_ADDR, TUNING_CHROMATIX_PORT);
+  if (server_socket <= 0) {
+    LOGE("[ERR] fail to setup listen socket for eztune chromatix parms...");
+    return NULL;
+  }
+  prev_server_socket = tunning_server_socket_listen(IP_ADDR, TUNING_PREVIEW_PORT);
+  if (prev_server_socket <= 0) {
+    LOGE("[ERR] fail to setup listen socket for eztune preview...\n");
+    return NULL;
+  }
+  num_fds = TUNESERVER_MAX(server_socket, prev_server_socket);
+  LOGH("num_fds = %d\n", num_fds);
+
+  do {
+    FD_ZERO(&tsfds);
+    FD_SET(server_socket, &tsfds);
+    FD_SET(prev_server_socket, &tsfds);
+    if (client_socket > 0) {
+      FD_SET(client_socket, &tsfds);
+    }
+    if (prev_client_socket > 0) {
+      FD_SET( prev_client_socket, &tsfds);
+    }
+
+    /* no timeout */
+    result = select(num_fds + 1, &tsfds, NULL, NULL, NULL);
+    if (result < 0) {
+      LOGE("select failed: %s\n", strerror(errno));
+      continue;
+    }
+
+    /*
+     ** (1) CHROMATIX SERVER
+     */
+    if (FD_ISSET(server_socket, &tsfds)) {
+      LOGD("Receiving New client connection\n");
+
+      client_socket = accept(server_socket,
+        &addr_client_inet.addr, &addr_client_len);
+      if (client_socket == -1) {
+        LOGE("accept failed %s", strerror(errno));
+        continue;
+      }
+
+      if (client_socket >= FD_SETSIZE) {
+        LOGE("client_socket is out of range. client_socket=%d",client_socket);
+        continue;
+      }
+
+      LOGE("accept a new connect on 55555, sd(%d)\n", client_socket);
+      num_fds = TUNESERVER_MAX(num_fds, client_socket);
+
+      // open camera and get handle - this is needed to
+      // be able to set parameters without starting
+      // preview stream
+      /*if (!tsctrl.camera_running) {
+        result = tuneserver_open_cam(&tsctrl.lib_handle, &tsctrl);
+        if(result) {
+          printf("\n Camera Open Fail !!! \n");
+          close(server_socket);
+          return EXIT_FAILURE;
+        }
+      }*/
+      result = tuneserver_open_cam(lib_handle);
+      if(result) {
+        LOGE("\n Tuning Library open failed!!!\n");
+        close(server_socket);
+        return NULL;
+      }
+      lib_handle->tsctrl.clientsocket_id = client_socket;
+      if (tuneserver_ack_onaccept_initprotocol(&lib_handle->tsctrl) < 0) {
+        LOGE(" Error while acking\n");
+        close(client_socket);
+        continue;
+      }
+      tuneserver_initialize_tuningp(lib_handle, client_socket,
+        lib_handle->tsctrl.proto->send_buf, lib_handle->tsctrl.proto->send_len);
+    }
+
+    if ((client_socket < FD_SETSIZE) && (FD_ISSET(client_socket, &tsfds))) {
+      if (lib_handle->tsctrl.proto == NULL) {
+        LOGE(" Cannot receive msg without connect\n");
+        continue;
+      }
+
+      /*Receive message and process it*/
+      recv_bytes = recv(client_socket, (void *)buf,
+        lib_handle->tsctrl.proto->next_recv_len, 0);
+      LOGD("Receive %lld bytes \n", (long long int) recv_bytes);
+
+      if (recv_bytes == -1) {
+        LOGE(" Receive failed with error %s\n",  strerror(errno));
+        //tuneserver_check_status(&tsctrl);
+        continue;
+      } else if (recv_bytes == 0) {
+        LOGE("connection has been terminated\n");
+
+        tuneserver_deinitialize_tuningp(&lib_handle->tsctrl, client_socket,
+          lib_handle->tsctrl.proto->send_buf,
+          lib_handle->tsctrl.proto->send_len);
+        free(lib_handle->tsctrl.proto);
+        lib_handle->tsctrl.proto = NULL;
+
+        close(client_socket);
+        client_socket = -1;
+        //tuneserver_check_status(&tsctrl);
+      } else {
+        LOGD(" Processing socket command\n");
+
+        result = tuneserver_process_client_message(buf, &lib_handle->tsctrl);
+
+        if (result < 0) {
+          LOGE("Protocol violated\n");
+
+          free(lib_handle->tsctrl.proto);
+          lib_handle->tsctrl.proto = NULL;
+
+          close(client_socket);
+          client_socket = -1;
+          //tuneserver_check_status(&tsctrl);
+          continue;
+        }
+      }
+    }
+
+    /*
+     ** (2) PREVIEW SERVER
+     */
+    if (FD_ISSET(prev_server_socket, &tsfds)) {
+      LOGD("Receiving New Preview client connection\n");
+
+      prev_client_socket = accept(prev_server_socket,
+        &addr_client_inet.addr, &addr_client_len);
+      if (prev_client_socket == -1) {
+        LOGE("accept failed %s", strerror(errno));
+        continue;
+      }
+      if (prev_client_socket >= FD_SETSIZE) {
+        LOGE("prev_client_socket is out of range. prev_client_socket=%d",prev_client_socket);
+        continue;
+      }
+
+      lib_handle->tsctrl.pr_clientsocket_id = prev_client_socket;
+
+      LOGD("Accepted a new connection, fd(%d)\n", prev_client_socket);
+      num_fds = TUNESERVER_MAX(num_fds, prev_client_socket);
+
+      // start camera
+      /*if (!tsctrl.camera_running) {
+        result = 0;
+        result = tuneserver_open_cam(&tsctrl.lib_handle, &tsctrl);
+        if(result) {
+          printf("\n Camera Open Fail !!! \n");
+          return EXIT_FAILURE;
+        }
+      }*/
+      cam_dimension_t dim;
+      //dim.width = lib_handle->test_obj.buffer_width;
+      //dim.height = lib_handle->test_obj.buffer_height;
+      dim.width = DEFAULT_PREVIEW_WIDTH;
+      dim.height = DEFAULT_PREVIEW_HEIGHT;
+
+      LOGD("preview dimension info: w(%d), h(%d)\n", dim.width, dim.height);
+      // we have to make sure that camera is running, before init connection,
+      // because we need to know the frame size for allocating the memory.
+      prevserver_init_protocol(&lib_handle->tsctrl);
+
+      result = tuneserver_initialize_prevtuningp(lib_handle, prev_client_socket,
+        dim, (char **)&lib_handle->tsctrl.proto->send_buf,
+        &lib_handle->tsctrl.proto->send_len);
+      if (result < 0) {
+        LOGE("tuneserver_initialize_prevtuningp error!");
+        close(prev_client_socket);
+        prev_client_socket = -1;
+      }
+    }
+
+    if ((prev_client_socket < FD_SETSIZE) && (FD_ISSET(prev_client_socket, &tsfds))) {
+      recv_bytes = recv(prev_client_socket, (void *)buf,
+        lib_handle->tsctrl.pr_proto->next_recv_len, 0);
+
+      LOGD("prev_client_socket=%d\n",  prev_client_socket);
+      LOGD("next_recv_len=%d\n",  buf[0]+buf[1]*256);
+
+      if (recv_bytes <= 0) {
+        if (recv_bytes == 0) {
+          LOGE("client close the connection.\n");
+        } else {
+          LOGE("receive error: %s\n", strerror(errno));
+        }
+
+        //tuneserver_check_status(&tsctrl);
+        // if recv error, we should close the connection, free the proto data,
+        // AND wait for a new connecton..
+        // close_connection();
+        // stop_camera()
+        // cleanup_proto_data();
+        tuneserver_deinitialize_prevtuningp(&lib_handle->tsctrl,
+          (char **)&lib_handle->tsctrl.proto->send_buf,
+          &lib_handle->tsctrl.proto->send_len);
+        close(prev_client_socket);
+        prev_client_socket = -1;
+      } else {
+        result = prevserver_process_client_message((void *)buf,
+          &lib_handle->tsctrl);
+        if (result < 0) {
+          LOGE("Protocol violated\n");
+
+          //free(tsctrl->preivew_proto);
+          //free(tsctrl);
+          //max_fd = ezt_parms_listen_sd + 1;
+          tuneserver_deinitialize_prevtuningp(&lib_handle->tsctrl,
+            (char **)&lib_handle->tsctrl.proto->send_buf,
+            &lib_handle->tsctrl.proto->send_len);
+          close(prev_client_socket);
+          prev_client_socket = -1;
+          //tuneserver_check_status(&tsctrl);
+        }
+        //sleep(1);
+      }
+    }
+  } while (1);
+
+  if (server_socket >= 0) {
+    close(server_socket);
+  }
+  if (client_socket >= 0) {
+    close(client_socket);
+  }
+  if (prev_server_socket >= 0) {
+    close(prev_server_socket);
+  }
+  if (prev_client_socket >= 0) {
+    close(prev_client_socket);
+  }
+
+  return EXIT_SUCCESS;
+}
+
+int eztune_server_start (void *lib_handle)
+{
+  return pthread_create(&eztune_thread_id, NULL,  eztune_proc, lib_handle);
+}
+
diff --git a/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_unit_test.c b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_unit_test.c
new file mode 100644
index 0000000..c6eaab7
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_unit_test.c
@@ -0,0 +1,695 @@
+/* Copyright (c) 2013, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// Camera dependencies
+#include "mm_qcamera_app.h"
+#include "mm_qcamera_dbg.h"
+
+#define MM_QCAMERA_APP_UTEST_MAX_MAIN_LOOP 1
+#define MM_QCAMERA_APP_UTEST_OUTER_LOOP 1
+#define MM_QCAMERA_APP_UTEST_INNER_LOOP 1
+#define MM_QCAM_APP_TEST_NUM 128
+
+static mm_app_tc_t mm_app_tc[MM_QCAM_APP_TEST_NUM];
+
+int mm_app_tc_open_close(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK;
+    int i;
+    mm_camera_test_obj_t test_obj;
+
+    printf("\n Verifying open/close cameras...\n");
+    for (i = 0; i < cam_app->num_cameras; i++) {
+        memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+        rc = mm_app_open(cam_app, i, &test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_open() cam_idx=%d, err=%d\n",
+                        i, rc);
+            break;
+        }
+        sleep(1);
+        rc = mm_app_close(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_close() cam_idx=%d, err=%d\n",
+                        i, rc);
+            break;
+        }
+    }
+    if (rc == MM_CAMERA_OK) {
+        printf("\nPassed\n");
+    } else {
+        printf("\nFailed\n");
+    }
+    LOGD("END, rc = %d\n",  rc);
+    return rc;
+}
+
+int mm_app_tc_start_stop_preview(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK;
+    int i, j;
+    mm_camera_test_obj_t test_obj;
+
+    printf("\n Verifying start/stop preview...\n");
+    for (i = 0; i < cam_app->num_cameras; i++) {
+        memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+        rc = mm_app_open(cam_app, i, &test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_open() cam_idx=%d, err=%d\n",
+                        i, rc);
+            break;
+        }
+
+        for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+            rc = mm_app_start_preview(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                LOGE(" mm_app_start_preview() cam_idx=%d, err=%d\n",
+                            i, rc);
+                break;
+            }
+            sleep(1);
+            rc = mm_app_stop_preview(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                LOGE(" mm_app_stop_preview() cam_idx=%d, err=%d\n",
+                            i, rc);
+                break;
+            }
+        }
+
+        rc |= mm_app_close(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_close() cam_idx=%d, err=%d\n",
+                        i, rc);
+            break;
+        }
+    }
+    if (rc == MM_CAMERA_OK) {
+        printf("\nPassed\n");
+    } else {
+        printf("\nFailed\n");
+    }
+    LOGD("END, rc = %d\n",  rc);
+    return rc;
+}
+
+int mm_app_tc_start_stop_zsl(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK;
+    int i, j;
+    mm_camera_test_obj_t test_obj;
+
+    printf("\n Verifying start/stop preview...\n");
+    for (i = 0; i < cam_app->num_cameras; i++) {
+        memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+        rc = mm_app_open(cam_app, i, &test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_open() cam_idx=%d, err=%d\n",
+                        i, rc);
+            break;
+        }
+
+        for (j = 0; j < 1; j++) {
+            rc = mm_app_start_preview_zsl(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                LOGE(" mm_app_start_preview_zsl() cam_idx=%d, err=%d\n",
+                            i, rc);
+                break;
+            }
+            sleep(1);
+            rc = mm_app_stop_preview_zsl(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                LOGE(" mm_app_stop_preview_zsl() cam_idx=%d, err=%d\n",
+                            i, rc);
+                break;
+            }
+        }
+
+        rc = mm_app_close(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_close() cam_idx=%d, err=%d\n",
+                        i, rc);
+            break;
+        }
+    }
+    if (rc == MM_CAMERA_OK) {
+        printf("\nPassed\n");
+    } else {
+        printf("\nFailed\n");
+    }
+    LOGD("END, rc = %d\n",  rc);
+    return rc;
+}
+
+int mm_app_tc_start_stop_video_preview(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK;
+    int i, j;
+    mm_camera_test_obj_t test_obj;
+
+    printf("\n Verifying start/stop video preview...\n");
+    for (i = 0; i < cam_app->num_cameras; i++) {
+        memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+        rc = mm_app_open(cam_app, i, &test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_open() cam_idx=%d, err=%d\n",
+                        i, rc);
+            break;
+        }
+
+        for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+            rc = mm_app_start_record_preview(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                LOGE("mm_app_start_record_preview() cam_idx=%d, err=%d\n",
+                            i, rc);
+                break;
+            }
+            sleep(1);
+            rc = mm_app_stop_record_preview(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                LOGE("mm_app_stop_record_preview() cam_idx=%d, err=%d\n",
+                            i, rc);
+                break;
+            }
+        }
+
+        rc = mm_app_close(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_close() cam_idx=%d, err=%d\n",
+                        i, rc);
+            break;
+        }
+    }
+    if (rc == MM_CAMERA_OK) {
+        printf("\nPassed\n");
+    } else {
+        printf("\nFailed\n");
+    }
+    LOGD("END, rc = %d\n",  rc);
+    return rc;
+}
+
+int mm_app_tc_start_stop_video_record(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK;
+    int i, j;
+    mm_camera_test_obj_t test_obj;
+
+    printf("\n Verifying start/stop recording...\n");
+    for (i = 0; i < cam_app->num_cameras; i++) {
+        memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+        rc = mm_app_open(cam_app, i, &test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_open() cam_idx=%d, err=%d\n",
+                        i, rc);
+            break;
+        }
+
+        rc = mm_app_start_record_preview(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_start_record_preview() cam_idx=%d, err=%d\n",
+                        i, rc);
+            mm_app_close(&test_obj);
+            break;
+        }
+
+        sleep(1);
+
+        for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+            rc = mm_app_start_record(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                LOGE("mm_app_start_record() cam_idx=%d, err=%d\n",
+                            i, rc);
+                break;
+            }
+
+            sleep(1);
+
+            rc = mm_app_stop_record(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                LOGE("mm_app_stop_record() cam_idx=%d, err=%d\n",
+                            i, rc);
+                break;
+            }
+        }
+        if (rc != MM_CAMERA_OK) {
+            LOGE("start/stop record cam_idx=%d, err=%d\n",
+                        i, rc);
+            mm_app_stop_record_preview(&test_obj);
+            mm_app_close(&test_obj);
+            break;
+        }
+
+        rc = mm_app_stop_record_preview(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_stop_record_preview() cam_idx=%d, err=%d\n",
+                        i, rc);
+            mm_app_close(&test_obj);
+            break;
+        }
+
+        rc = mm_app_close(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_close() cam_idx=%d, err=%d\n",
+                        i, rc);
+            break;
+        }
+    }
+    if (rc == MM_CAMERA_OK) {
+        printf("\nPassed\n");
+    } else {
+        printf("\nFailed\n");
+    }
+    LOGD("END, rc = %d\n",  rc);
+    return rc;
+}
+
+int mm_app_tc_start_stop_live_snapshot(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK;
+    int i, j;
+    mm_camera_test_obj_t test_obj;
+
+    printf("\n Verifying start/stop live snapshot...\n");
+    for (i = 0; i < cam_app->num_cameras; i++) {
+        memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+        rc = mm_app_open(cam_app, i, &test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_open() cam_idx=%d, err=%d\n",
+                        i, rc);
+            break;
+        }
+
+        rc = mm_app_start_record_preview(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_start_record_preview() cam_idx=%d, err=%d\n",
+                        i, rc);
+            mm_app_close(&test_obj);
+            break;
+        }
+
+        sleep(1);
+
+        rc = mm_app_start_record(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_start_record() cam_idx=%d, err=%d\n",
+                        i, rc);
+            mm_app_stop_record_preview(&test_obj);
+            mm_app_close(&test_obj);
+            break;
+        }
+
+        sleep(1);
+
+        for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+            rc = mm_app_start_live_snapshot(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                LOGE("mm_app_start_live_snapshot() cam_idx=%d, err=%d\n",
+                            i, rc);
+                break;
+            }
+
+            /* wait for jpeg is done */
+            mm_camera_app_wait();
+
+            rc = mm_app_stop_live_snapshot(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                LOGE("mm_app_stop_live_snapshot() cam_idx=%d, err=%d\n",
+                            i, rc);
+                break;
+            }
+        }
+        if (rc != MM_CAMERA_OK) {
+            LOGE("start/stop live snapshot cam_idx=%d, err=%d\n",
+                        i, rc);
+            mm_app_stop_record(&test_obj);
+            mm_app_stop_record_preview(&test_obj);
+            mm_app_close(&test_obj);
+            break;
+        }
+
+        rc = mm_app_stop_record(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_stop_record() cam_idx=%d, err=%d\n",
+                        i, rc);
+            mm_app_stop_record_preview(&test_obj);
+            mm_app_close(&test_obj);
+            break;
+        }
+
+        sleep(1);
+
+        rc = mm_app_stop_record_preview(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_stop_record_preview() cam_idx=%d, err=%d\n",
+                        i, rc);
+            mm_app_close(&test_obj);
+            break;
+        }
+
+        rc = mm_app_close(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_close() cam_idx=%d, err=%d\n",
+                        i, rc);
+            break;
+        }
+    }
+    if (rc == MM_CAMERA_OK) {
+        printf("\nPassed\n");
+    } else {
+        printf("\nFailed\n");
+    }
+    LOGD("END, rc = %d\n",  rc);
+    return rc;
+}
+
+int mm_app_tc_capture_raw(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK;
+    int i, j;
+    mm_camera_test_obj_t test_obj;
+    uint8_t num_snapshot = 1;
+    uint8_t num_rcvd_snapshot = 0;
+
+    printf("\n Verifying raw capture...\n");
+    for (i = 0; i < cam_app->num_cameras; i++) {
+        memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+        rc = mm_app_open(cam_app, i, &test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_open() cam_idx=%d, err=%d\n",
+                        i, rc);
+            break;
+        }
+
+        for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+            rc = mm_app_start_capture_raw(&test_obj, num_snapshot);
+            if (rc != MM_CAMERA_OK) {
+                LOGE(" mm_app_start_capture() cam_idx=%d, err=%d\n",
+                            i, rc);
+                break;
+            }
+            while (num_rcvd_snapshot < num_snapshot) {
+                mm_camera_app_wait();
+                num_rcvd_snapshot++;
+            }
+            rc = mm_app_stop_capture_raw(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                LOGE(" mm_app_stop_capture() cam_idx=%d, err=%d\n",
+                            i, rc);
+                break;
+            }
+        }
+
+        rc |= mm_app_close(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_close() cam_idx=%d, err=%d\n",
+                        i, rc);
+            break;
+        }
+    }
+    if (rc == MM_CAMERA_OK) {
+        printf("\nPassed\n");
+    } else {
+        printf("\nFailed\n");
+    }
+    LOGD("END, rc = %d\n",  rc);
+    return rc;
+}
+
+int mm_app_tc_capture_regular(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK;
+    int i, j;
+    mm_camera_test_obj_t test_obj;
+    uint8_t num_snapshot = 1;
+    uint8_t num_rcvd_snapshot = 0;
+
+    printf("\n Verifying capture...\n");
+    for (i = 0; i < cam_app->num_cameras; i++) {
+        memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+        rc = mm_app_open(cam_app, i, &test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_open() cam_idx=%d, err=%d\n",
+                        i, rc);
+            break;
+        }
+
+        for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+            rc = mm_app_start_capture(&test_obj, num_snapshot);
+            if (rc != MM_CAMERA_OK) {
+                LOGE(" mm_app_start_capture() cam_idx=%d, err=%d\n",
+                            i, rc);
+                break;
+            }
+            while (num_rcvd_snapshot < num_snapshot) {
+                mm_camera_app_wait();
+                num_rcvd_snapshot++;
+            }
+            rc = mm_app_stop_capture(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                LOGE(" mm_app_stop_capture() cam_idx=%d, err=%d\n",
+                            i, rc);
+                break;
+            }
+        }
+
+        rc = mm_app_close(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_close() cam_idx=%d, err=%d\n",
+                        i, rc);
+            break;
+        }
+    }
+    if (rc == MM_CAMERA_OK) {
+        printf("\nPassed\n");
+    } else {
+        printf("\nFailed\n");
+    }
+    LOGD("END, rc = %d\n",  rc);
+    return rc;
+}
+
+int mm_app_tc_capture_burst(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK;
+    int i, j;
+    mm_camera_test_obj_t test_obj;
+    uint8_t num_snapshot = 3;
+    uint8_t num_rcvd_snapshot = 0;
+
+    printf("\n Verifying capture...\n");
+    for (i = 0; i < cam_app->num_cameras; i++) {
+        memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+        rc = mm_app_open(cam_app, i, &test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_open() cam_idx=%d, err=%d\n",
+                        i, rc);
+            break;
+        }
+
+        for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+            rc = mm_app_start_capture(&test_obj, num_snapshot);
+            if (rc != MM_CAMERA_OK) {
+                LOGE(" mm_app_start_capture() cam_idx=%d, err=%d\n",
+                            i, rc);
+                break;
+            }
+            while (num_rcvd_snapshot < num_snapshot) {
+                mm_camera_app_wait();
+                num_rcvd_snapshot++;
+            }
+            rc = mm_app_stop_capture(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                LOGE(" mm_app_stop_capture() cam_idx=%d, err=%d\n",
+                            i, rc);
+                break;
+            }
+        }
+
+        rc = mm_app_close(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_close() cam_idx=%d, err=%d\n",
+                        i, rc);
+            break;
+        }
+    }
+    if (rc == MM_CAMERA_OK) {
+        printf("\nPassed\n");
+    } else {
+        printf("\nFailed\n");
+    }
+    LOGD("END, rc = %d\n",  rc);
+    return rc;
+}
+
+int mm_app_tc_rdi_burst(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK, rc2 = MM_CAMERA_OK;
+    int i, j;
+    mm_camera_test_obj_t test_obj;
+
+    printf("\n Verifying rdi burst (3) capture...\n");
+    for (i = 0; i < cam_app->num_cameras; i++) {
+        memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+        rc = mm_app_open(cam_app, i, &test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_open() cam_idx=%d, err=%d\n",
+                        i, rc);
+            break;
+        }
+
+        for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+            rc = mm_app_start_rdi(&test_obj, 3);
+            if (rc != MM_CAMERA_OK) {
+                LOGE(" mm_app_start_preview() cam_idx=%d, err=%d\n",
+                            i, rc);
+                break;
+            }
+            sleep(1);
+            rc = mm_app_stop_rdi(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                LOGE(" mm_app_stop_preview() cam_idx=%d, err=%d\n",
+                            i, rc);
+                break;
+            }
+        }
+
+        rc2 = mm_app_close(&test_obj);
+        if (rc2 != MM_CAMERA_OK) {
+            LOGE("mm_app_close() cam_idx=%d, err=%d\n",
+                        i, rc2);
+            if (rc == MM_CAMERA_OK) {
+                rc = rc2;
+            }
+            break;
+        }
+    }
+    if (rc == MM_CAMERA_OK) {
+        printf("\nPassed\n");
+    } else {
+        printf("\nFailed\n");
+    }
+    LOGD("END, rc = %d\n",  rc);
+    return rc;
+}
+
+int mm_app_tc_rdi_cont(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK, rc2 = MM_CAMERA_OK;
+    int i, j;
+    mm_camera_test_obj_t test_obj;
+
+    printf("\n Verifying rdi continuous capture...\n");
+    for (i = 0; i < cam_app->num_cameras; i++) {
+        memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+        rc = mm_app_open(cam_app, i, &test_obj);
+        if (rc != MM_CAMERA_OK) {
+            LOGE("mm_app_open() cam_idx=%d, err=%d\n",
+                        i, rc);
+            break;
+        }
+
+        for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+            rc = mm_app_start_rdi(&test_obj, 0);
+            if (rc != MM_CAMERA_OK) {
+                LOGE(" mm_app_start_preview() cam_idx=%d, err=%d\n",
+                            i, rc);
+                break;
+            }
+            sleep(1);
+            rc = mm_app_stop_rdi(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                LOGE(" mm_app_stop_preview() cam_idx=%d, err=%d\n",
+                            i, rc);
+                break;
+            }
+        }
+
+        rc2 = mm_app_close(&test_obj);
+        if (rc2 != MM_CAMERA_OK) {
+            LOGE("mm_app_close() cam_idx=%d, err=%d\n",
+                        i, rc2);
+            if (rc == MM_CAMERA_OK) {
+                rc = rc2;
+            }
+            break;
+        }
+    }
+    if (rc == MM_CAMERA_OK) {
+        printf("\nPassed\n");
+    } else {
+        printf("\nFailed\n");
+    }
+    LOGD("END, rc = %d\n",  rc);
+    return rc;
+}
+
+int mm_app_gen_test_cases()
+{
+    int tc = 0;
+    memset(mm_app_tc, 0, sizeof(mm_app_tc));
+    if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_open_close;
+    if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_start_stop_preview;
+    //if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_start_stop_zsl;
+    //if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_start_stop_video_preview;
+    //if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_start_stop_video_record;
+    //if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_start_stop_live_snapshot;
+    //if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_capture_regular;
+    //if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_capture_burst;
+    //if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_rdi_cont;
+    //if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_rdi_burst;
+
+    return tc;
+}
+
+int mm_app_unit_test_entry(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK;
+    int i, j, tc = 0;
+
+    tc = mm_app_gen_test_cases();
+    LOGD("Running %d test cases\n",tc);
+    for (i = 0; i < tc; i++) {
+        for (j = 0; j < MM_QCAMERA_APP_UTEST_OUTER_LOOP; j++) {
+            mm_app_tc[i].r = mm_app_tc[i].f(cam_app);
+            if (mm_app_tc[i].r != MM_CAMERA_OK) {
+                printf(" test case %d (iteration %d) error = %d, abort unit testing engine!!!!\n",
+                        i, j, mm_app_tc[i].r);
+                rc = mm_app_tc[i].r;
+                goto end;
+            }
+        }
+    }
+end:
+    printf("nTOTAL_TSET_CASE = %d, NUM_TEST_RAN = %d, rc=%d\n", tc, i, rc);
+    return rc;
+}
+
+
+
+
diff --git a/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_video.c b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_video.c
new file mode 100644
index 0000000..ba0a57f
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-camera-test/src/mm_qcamera_video.c
@@ -0,0 +1,258 @@
+/*
+Copyright (c) 2012-2014, 2016, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of The Linux Foundation nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+// Camera dependencies
+#include "mm_qcamera_app.h"
+#include "mm_qcamera_dbg.h"
+
+static void mm_app_video_notify_cb(mm_camera_super_buf_t *bufs,
+                                   void *user_data)
+{
+    char file_name[64];
+    mm_camera_buf_def_t *frame = bufs->bufs[0];
+    mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+
+    LOGD("BEGIN - length=%zu, frame idx = %d\n",
+          frame->frame_len, frame->frame_idx);
+    snprintf(file_name, sizeof(file_name), "V_C%d", pme->cam->camera_handle);
+    mm_app_dump_frame(frame, file_name, "yuv", frame->frame_idx);
+
+    if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+                                            bufs->ch_id,
+                                            frame)) {
+        LOGE("Failed in Preview Qbuf\n");
+    }
+    mm_app_cache_ops((mm_camera_app_meminfo_t *)frame->mem_info,
+                     ION_IOC_INV_CACHES);
+
+    LOGD("END\n");
+}
+
+mm_camera_stream_t * mm_app_add_video_stream(mm_camera_test_obj_t *test_obj,
+                                             mm_camera_channel_t *channel,
+                                             mm_camera_buf_notify_t stream_cb,
+                                             void *userdata,
+                                             uint8_t num_bufs)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_stream_t *stream = NULL;
+    cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+
+    stream = mm_app_add_stream(test_obj, channel);
+    if (NULL == stream) {
+        LOGE("add stream failed\n");
+        return NULL;
+    }
+
+    stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+    stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+    stream->s_config.mem_vtbl.clean_invalidate_buf =
+      mm_app_stream_clean_invalidate_buf;
+    stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+    stream->s_config.mem_vtbl.user_data = (void *)stream;
+    stream->s_config.stream_cb = stream_cb;
+    stream->s_config.stream_cb_sync = NULL;
+    stream->s_config.userdata = userdata;
+    stream->num_of_bufs = num_bufs;
+
+    stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+    memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+    stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_VIDEO;
+    stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+    stream->s_config.stream_info->fmt = DEFAULT_VIDEO_FORMAT;
+    stream->s_config.stream_info->dim.width = DEFAULT_VIDEO_WIDTH;
+    stream->s_config.stream_info->dim.height = DEFAULT_VIDEO_HEIGHT;
+    stream->s_config.padding_info = cam_cap->padding_info;
+
+    rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("config preview stream err=%d\n",  rc);
+        return NULL;
+    }
+
+    return stream;
+}
+
+mm_camera_channel_t * mm_app_add_video_channel(mm_camera_test_obj_t *test_obj)
+{
+    mm_camera_channel_t *channel = NULL;
+    mm_camera_stream_t *stream = NULL;
+
+    channel = mm_app_add_channel(test_obj,
+                                 MM_CHANNEL_TYPE_VIDEO,
+                                 NULL,
+                                 NULL,
+                                 NULL);
+    if (NULL == channel) {
+        LOGE("add channel failed");
+        return NULL;
+    }
+
+    stream = mm_app_add_video_stream(test_obj,
+                                     channel,
+                                     mm_app_video_notify_cb,
+                                     (void *)test_obj,
+                                     1);
+    if (NULL == stream) {
+        LOGE("add video stream failed\n");
+        mm_app_del_channel(test_obj, channel);
+        return NULL;
+    }
+
+    return channel;
+}
+
+int mm_app_start_record_preview(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_channel_t *p_ch = NULL;
+    mm_camera_channel_t *v_ch = NULL;
+    mm_camera_channel_t *s_ch = NULL;
+
+    p_ch = mm_app_add_preview_channel(test_obj);
+    if (NULL == p_ch) {
+        LOGE("add preview channel failed");
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    v_ch = mm_app_add_video_channel(test_obj);
+    if (NULL == v_ch) {
+        LOGE("add video channel failed");
+        mm_app_del_channel(test_obj, p_ch);
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    s_ch = mm_app_add_snapshot_channel(test_obj);
+    if (NULL == s_ch) {
+        LOGE("add snapshot channel failed");
+        mm_app_del_channel(test_obj, p_ch);
+        mm_app_del_channel(test_obj, v_ch);
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    rc = mm_app_start_channel(test_obj, p_ch);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("start preview failed rc=%d\n", rc);
+        mm_app_del_channel(test_obj, p_ch);
+        mm_app_del_channel(test_obj, v_ch);
+        mm_app_del_channel(test_obj, s_ch);
+        return rc;
+    }
+
+    return rc;
+}
+
+int mm_app_stop_record_preview(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_channel_t *p_ch = NULL;
+    mm_camera_channel_t *v_ch = NULL;
+    mm_camera_channel_t *s_ch = NULL;
+
+    p_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_PREVIEW);
+    v_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_VIDEO);
+    s_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_SNAPSHOT);
+
+    rc = mm_app_stop_and_del_channel(test_obj, p_ch);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("Stop Preview failed rc=%d\n", rc);
+    }
+
+    rc = mm_app_stop_and_del_channel(test_obj, v_ch);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("Stop Preview failed rc=%d\n", rc);
+    }
+
+    rc = mm_app_stop_and_del_channel(test_obj, s_ch);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("Stop Preview failed rc=%d\n", rc);
+    }
+
+    return rc;
+}
+
+int mm_app_start_record(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_channel_t *v_ch = NULL;
+
+    v_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_VIDEO);
+
+    rc = mm_app_start_channel(test_obj, v_ch);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("start recording failed rc=%d\n", rc);
+    }
+
+    return rc;
+}
+
+int mm_app_stop_record(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_channel_t *v_ch = NULL;
+
+    v_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_VIDEO);
+
+    rc = mm_app_stop_channel(test_obj, v_ch);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("stop recording failed rc=%d\n", rc);
+    }
+
+    return rc;
+}
+
+int mm_app_start_live_snapshot(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_channel_t *s_ch = NULL;
+
+    s_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_SNAPSHOT);
+
+    rc = mm_app_start_channel(test_obj, s_ch);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("start recording failed rc=%d\n", rc);
+    }
+
+    return rc;
+}
+
+int mm_app_stop_live_snapshot(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_channel_t *s_ch = NULL;
+
+    s_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_SNAPSHOT);
+
+    rc = mm_app_stop_channel(test_obj, s_ch);
+    if (MM_CAMERA_OK != rc) {
+        LOGE("stop recording failed rc=%d\n", rc);
+    }
+
+    return rc;
+}
diff --git a/msmcobalt/QCamera2/stack/mm-jpeg-interface/Android.mk b/msmcobalt/QCamera2/stack/mm-jpeg-interface/Android.mk
new file mode 100644
index 0000000..175796b
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-jpeg-interface/Android.mk
@@ -0,0 +1,82 @@
+OLD_LOCAL_PATH := $(LOCAL_PATH)
+LOCAL_PATH := $(call my-dir)
+
+include $(LOCAL_PATH)/../../../common.mk
+include $(CLEAR_VARS)
+
+LOCAL_32_BIT_ONLY := $(BOARD_QTI_CAMERA_32BIT_ONLY)
+LOCAL_CFLAGS+= -D_ANDROID_ -DQCAMERA_REDEFINE_LOG
+
+LOCAL_CFLAGS += -Wall -Wextra -Werror -Wno-unused-parameter
+
+LOCAL_C_INCLUDES+= $(kernel_includes)
+LOCAL_ADDITIONAL_DEPENDENCIES := $(common_deps)
+
+LIB2D_ROTATION=false
+
+LOCAL_C_INCLUDES += \
+    frameworks/native/include/media/openmax \
+    $(LOCAL_PATH)/inc \
+    $(LOCAL_PATH)/../common \
+    $(LOCAL_PATH)/../mm-camera-interface/inc \
+    $(LOCAL_PATH)/../../.. \
+    $(LOCAL_PATH)/../../../mm-image-codec/qexif \
+    $(LOCAL_PATH)/../../../mm-image-codec/qomx_core
+
+ifeq ($(strip $(LIB2D_ROTATION)),true)
+    LOCAL_C_INCLUDES += $(LOCAL_PATH)/../mm-lib2d-interface/inc
+    LOCAL_CFLAGS += -DLIB2D_ROTATION_ENABLE
+endif
+
+
+ifeq ($(strip $(TARGET_USES_ION)),true)
+    LOCAL_CFLAGS += -DUSE_ION
+endif
+
+ifneq (,$(filter  msm8610,$(TARGET_BOARD_PLATFORM)))
+    LOCAL_CFLAGS+= -DLOAD_ADSP_RPC_LIB
+endif
+
+DUAL_JPEG_TARGET_LIST := msm8974
+DUAL_JPEG_TARGET_LIST += msm8994
+
+ifneq (,$(filter  $(DUAL_JPEG_TARGET_LIST),$(TARGET_BOARD_PLATFORM)))
+    LOCAL_CFLAGS+= -DMM_JPEG_CONCURRENT_SESSIONS_COUNT=2
+else
+    LOCAL_CFLAGS+= -DMM_JPEG_CONCURRENT_SESSIONS_COUNT=1
+endif
+
+JPEG_PIPELINE_TARGET_LIST := msm8994
+JPEG_PIPELINE_TARGET_LIST += msm8992
+JPEG_PIPELINE_TARGET_LIST += msm8996
+JPEG_PIPELINE_TARGET_LIST += msmcobalt
+
+ifneq (,$(filter  $(JPEG_PIPELINE_TARGET_LIST),$(TARGET_BOARD_PLATFORM)))
+    LOCAL_CFLAGS+= -DMM_JPEG_USE_PIPELINE
+endif
+
+# System header file path prefix
+LOCAL_CFLAGS += -DSYSTEM_HEADER_PREFIX=sys
+
+LOCAL_SRC_FILES := \
+    src/mm_jpeg_queue.c \
+    src/mm_jpeg_exif.c \
+    src/mm_jpeg.c \
+    src/mm_jpeg_interface.c \
+    src/mm_jpeg_ionbuf.c \
+    src/mm_jpegdec_interface.c \
+    src/mm_jpegdec.c \
+    src/mm_jpeg_mpo_composer.c
+
+LOCAL_MODULE           := libmmjpeg_interface
+LOCAL_PRELINK_MODULE   := false
+LOCAL_SHARED_LIBRARIES := libdl libcutils liblog libqomx_core libmmcamera_interface
+ifeq ($(strip $(LIB2D_ROTATION)),true)
+    LOCAL_SHARED_LIBRARIES += libmmlib2d_interface
+endif
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_32_BIT_ONLY := $(BOARD_QTI_CAMERA_32BIT_ONLY)
+include $(BUILD_SHARED_LIBRARY)
+
+LOCAL_PATH := $(OLD_LOCAL_PATH)
diff --git a/msmcobalt/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg.h b/msmcobalt/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg.h
new file mode 100644
index 0000000..65c07d6
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg.h
@@ -0,0 +1,536 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef MM_JPEG_H_
+#define MM_JPEG_H_
+
+// OpenMAX dependencies
+#include "OMX_Types.h"
+#include "OMX_Index.h"
+#include "OMX_Core.h"
+#include "OMX_Component.h"
+#include "QOMX_JpegExtensions.h"
+
+// JPEG dependencies
+#include "mm_jpeg_interface.h"
+#include "mm_jpeg_ionbuf.h"
+
+// Camera dependencies
+#include "cam_list.h"
+#include "cam_semaphore.h"
+
+#define MM_JPEG_MAX_THREADS 30
+#define MM_JPEG_CIRQ_SIZE 30
+#define MM_JPEG_MAX_SESSION 10
+#define MAX_EXIF_TABLE_ENTRIES 50
+#define MAX_JPEG_SIZE 20000000
+#define MAX_OMX_HANDLES (5)
+// Thumbnail src and dest aspect ratio diffrence tolerance
+#define ASPECT_TOLERANCE 0.001
+
+
+/** mm_jpeg_abort_state_t:
+ *  @MM_JPEG_ABORT_NONE: Abort is not issued
+ *  @MM_JPEG_ABORT_INIT: Abort is issued from the client
+ *  @MM_JPEG_ABORT_DONE: Abort is completed
+ *
+ *  State representing the abort state
+ **/
+typedef enum {
+  MM_JPEG_ABORT_NONE,
+  MM_JPEG_ABORT_INIT,
+  MM_JPEG_ABORT_DONE,
+} mm_jpeg_abort_state_t;
+
+
+/* define max num of supported concurrent jpeg jobs by OMX engine.
+ * Current, only one per time */
+#define NUM_MAX_JPEG_CNCURRENT_JOBS 2
+
+#define JOB_ID_MAGICVAL 0x1
+#define JOB_HIST_MAX 10000
+
+/** DUMP_TO_FILE:
+ *  @filename: file name
+ *  @p_addr: address of the buffer
+ *  @len: buffer length
+ *
+ *  dump the image to the file
+ **/
+#define DUMP_TO_FILE(filename, p_addr, len) ({ \
+  size_t rc = 0; \
+  FILE *fp = fopen(filename, "w+"); \
+  if (fp) { \
+    rc = fwrite(p_addr, 1, len, fp); \
+    LOGE("written size %zu", len); \
+    fclose(fp); \
+  } else { \
+    LOGE("open %s failed", filename); \
+  } \
+})
+
+/** DUMP_TO_FILE2:
+ *  @filename: file name
+ *  @p_addr: address of the buffer
+ *  @len: buffer length
+ *
+ *  dump the image to the file if the memory is non-contiguous
+ **/
+#define DUMP_TO_FILE2(filename, p_addr1, len1, paddr2, len2) ({ \
+  size_t rc = 0; \
+  FILE *fp = fopen(filename, "w+"); \
+  if (fp) { \
+    rc = fwrite(p_addr1, 1, len1, fp); \
+    rc = fwrite(p_addr2, 1, len2, fp); \
+    LOGE("written %zu %zu", len1, len2); \
+    fclose(fp); \
+  } else { \
+    LOGE("open %s failed", filename); \
+  } \
+})
+
+/** MM_JPEG_CHK_ABORT:
+ *  @p: client pointer
+ *  @ret: return value
+ *  @label: label to jump to
+ *
+ *  check the abort failure
+ **/
+#define MM_JPEG_CHK_ABORT(p, ret, label) ({ \
+  if (MM_JPEG_ABORT_INIT == p->abort_state) { \
+    LOGE("jpeg abort"); \
+    ret = OMX_ErrorNone; \
+    goto label; \
+  } \
+})
+
+#define GET_CLIENT_IDX(x) ((x) & 0xff)
+#define GET_SESSION_IDX(x) (((x) >> 8) & 0xff)
+#define GET_JOB_IDX(x) (((x) >> 16) & 0xff)
+
+typedef struct {
+  union {
+    int i_data[MM_JPEG_CIRQ_SIZE];
+    void *p_data[MM_JPEG_CIRQ_SIZE];
+  };
+  int front;
+  int rear;
+  int count;
+  pthread_mutex_t lock;
+} mm_jpeg_cirq_t;
+
+/** cirq_reset:
+ *
+ *  Arguments:
+ *    @q: circular queue
+ *
+ *  Return:
+ *       none
+ *
+ *  Description:
+ *       Resets the circular queue
+ *
+ **/
+static inline void cirq_reset(mm_jpeg_cirq_t *q)
+{
+  q->front = 0;
+  q->rear = 0;
+  q->count = 0;
+  pthread_mutex_init(&q->lock, NULL);
+}
+
+/** cirq_empty:
+ *
+ *  Arguments:
+ *    @q: circular queue
+ *
+ *  Return:
+ *       none
+ *
+ *  Description:
+ *       check if the curcular queue is empty
+ *
+ **/
+#define cirq_empty(q) (q->count == 0)
+
+/** cirq_full:
+ *
+ *  Arguments:
+ *    @q: circular queue
+ *
+ *  Return:
+ *       none
+ *
+ *  Description:
+ *       check if the curcular queue is full
+ *
+ **/
+#define cirq_full(q) (q->count == MM_JPEG_CIRQ_SIZE)
+
+/** cirq_enqueue:
+ *
+ *  Arguments:
+ *    @q: circular queue
+ *    @data: data to be inserted
+ *
+ *  Return:
+ *       true/false
+ *
+ *  Description:
+ *       enqueue an element into circular queue
+ *
+ **/
+#define cirq_enqueue(q, type, data) ({ \
+  int rc = 0; \
+  pthread_mutex_lock(&q->lock); \
+  if (cirq_full(q)) { \
+    rc = -1; \
+  } else { \
+    q->type[q->rear] = data; \
+    q->rear = (q->rear + 1) % MM_JPEG_CIRQ_SIZE; \
+    q->count++; \
+  } \
+  pthread_mutex_unlock(&q->lock); \
+  rc; \
+})
+
+/** cirq_dequeue:
+ *
+ *  Arguments:
+ *    @q: circular queue
+ *    @data: data to be popped
+ *
+ *  Return:
+ *       true/false
+ *
+ *  Description:
+ *       dequeue an element from the circular queue
+ *
+ **/
+#define cirq_dequeue(q, type, data) ({ \
+  int rc = 0; \
+  pthread_mutex_lock(&q->lock); \
+  if (cirq_empty(q)) { \
+    pthread_mutex_unlock(&q->lock); \
+    rc = -1; \
+  } else { \
+    data = q->type[q->front]; \
+    q->count--; \
+  } \
+  pthread_mutex_unlock(&q->lock); \
+  rc; \
+})
+
+
+typedef union {
+  uint32_t u32;
+  void* p;
+} mm_jpeg_q_data_t;
+
+  typedef struct {
+  struct cam_list list;
+  mm_jpeg_q_data_t data;
+} mm_jpeg_q_node_t;
+
+typedef struct {
+  mm_jpeg_q_node_t head; /* dummy head */
+  uint32_t size;
+  pthread_mutex_t lock;
+} mm_jpeg_queue_t;
+
+typedef enum {
+  MM_JPEG_CMD_TYPE_JOB,          /* job cmd */
+  MM_JPEG_CMD_TYPE_EXIT,         /* EXIT cmd for exiting jobMgr thread */
+  MM_JPEG_CMD_TYPE_DECODE_JOB,
+  MM_JPEG_CMD_TYPE_MAX
+} mm_jpeg_cmd_type_t;
+
+typedef struct mm_jpeg_job_session {
+  uint32_t client_hdl;           /* client handler */
+  uint32_t jobId;                /* job ID */
+  uint32_t sessionId;            /* session ID */
+  mm_jpeg_encode_params_t params; /* encode params */
+  mm_jpeg_decode_params_t dec_params; /* encode params */
+  mm_jpeg_encode_job_t encode_job;             /* job description */
+  mm_jpeg_decode_job_t decode_job;
+  pthread_t encode_pid;          /* encode thread handler*/
+
+  void *jpeg_obj;                /* ptr to mm_jpeg_obj */
+  jpeg_job_status_t job_status;  /* job status */
+
+  int state_change_pending;      /* flag to indicate if state change is pending */
+  OMX_ERRORTYPE error_flag;      /* variable to indicate error during encoding */
+  mm_jpeg_abort_state_t abort_state; /* variable to indicate abort during encoding */
+
+  /* OMX related */
+  OMX_HANDLETYPE omx_handle;                      /* handle to omx engine */
+  OMX_CALLBACKTYPE omx_callbacks;                 /* callbacks to omx engine */
+
+  /* buffer headers */
+  OMX_BUFFERHEADERTYPE *p_in_omx_buf[MM_JPEG_MAX_BUF];
+  OMX_BUFFERHEADERTYPE *p_in_omx_thumb_buf[MM_JPEG_MAX_BUF];
+  OMX_BUFFERHEADERTYPE *p_out_omx_buf[MM_JPEG_MAX_BUF];
+  OMX_BUFFERHEADERTYPE *p_in_rot_omx_buf[MM_JPEG_MAX_BUF];
+  OMX_BUFFERHEADERTYPE *p_in_rot_omx_thumb_buf[MM_JPEG_MAX_BUF];
+
+  OMX_PARAM_PORTDEFINITIONTYPE inputPort;
+  OMX_PARAM_PORTDEFINITIONTYPE outputPort;
+  OMX_PARAM_PORTDEFINITIONTYPE inputTmbPort;
+
+  /* event locks */
+  pthread_mutex_t lock;
+  pthread_cond_t cond;
+
+  QEXIF_INFO_DATA exif_info_local[MAX_EXIF_TABLE_ENTRIES];  //all exif tags for JPEG encoder
+  int exif_count_local;
+
+  mm_jpeg_cirq_t cb_q;
+  int32_t ebd_count;
+  int32_t fbd_count;
+
+  /* this flag represents whether the job is active */
+  OMX_BOOL active;
+
+  /* this flag indicates if the configration is complete */
+  OMX_BOOL config;
+
+  /* job history count to generate unique id */
+  unsigned int job_hist;
+
+  OMX_BOOL encoding;
+
+  buffer_t work_buffer;
+  /* src rotate ion bufs */
+  buffer_t src_rot_ion_buffer[MM_JPEG_MAX_BUF];
+
+  OMX_EVENTTYPE omxEvent;
+  int event_pending;
+
+  uint8_t *meta_enc_key;
+  size_t meta_enc_keylen;
+
+  struct mm_jpeg_job_session *next_session;
+
+  uint32_t curr_out_buf_idx;
+
+  uint32_t num_omx_sessions;
+  OMX_BOOL auto_out_buf;
+
+  mm_jpeg_queue_t *session_handle_q;
+  mm_jpeg_queue_t *out_buf_q;
+
+  int thumb_from_main;
+  uint32_t job_index;
+
+  /* lib2d rotation flag*/
+  uint32_t lib2d_rotation_flag;
+
+  /* num of buf for input src rotation */
+  uint32_t num_src_rot_bufs;
+
+  /* src rotate img bufs */
+  mm_jpeg_buf_t src_rot_main_buf[MM_JPEG_MAX_BUF];
+} mm_jpeg_job_session_t;
+
+typedef struct {
+  mm_jpeg_encode_job_t encode_job;
+  uint32_t job_id;
+  uint32_t client_handle;
+} mm_jpeg_encode_job_info_t;
+
+typedef struct {
+  mm_jpeg_decode_job_t decode_job;
+  uint32_t job_id;
+  uint32_t client_handle;
+} mm_jpeg_decode_job_info_t;
+
+typedef struct {
+  mm_jpeg_cmd_type_t type;
+  union {
+    mm_jpeg_encode_job_info_t enc_info;
+    mm_jpeg_decode_job_info_t dec_info;
+  };
+} mm_jpeg_job_q_node_t;
+
+typedef struct {
+  uint8_t is_used;                /* flag: if is a valid client */
+  uint32_t client_handle;         /* client handle */
+  mm_jpeg_job_session_t session[MM_JPEG_MAX_SESSION];
+  pthread_mutex_t lock;           /* job lock */
+} mm_jpeg_client_t;
+
+typedef struct {
+  pthread_t pid;                  /* job cmd thread ID */
+  cam_semaphore_t job_sem;        /* semaphore for job cmd thread */
+  mm_jpeg_queue_t job_queue;      /* queue for job to do */
+} mm_jpeg_job_cmd_thread_t;
+
+#define MAX_JPEG_CLIENT_NUM 8
+typedef struct mm_jpeg_obj_t {
+  /* ClientMgr */
+  int num_clients;                                /* num of clients */
+  mm_jpeg_client_t clnt_mgr[MAX_JPEG_CLIENT_NUM]; /* client manager */
+
+  /* JobMkr */
+  pthread_mutex_t job_lock;                       /* job lock */
+  mm_jpeg_job_cmd_thread_t job_mgr;               /* job mgr thread including todo_q*/
+  mm_jpeg_queue_t ongoing_job_q;                  /* queue for ongoing jobs */
+  buffer_t ionBuffer[MM_JPEG_CONCURRENT_SESSIONS_COUNT];
+
+
+  /* Max pic dimension for work buf calc*/
+  uint32_t max_pic_w;
+  uint32_t max_pic_h;
+#ifdef LOAD_ADSP_RPC_LIB
+  void *adsprpc_lib_handle;
+#endif
+
+  uint32_t work_buf_cnt;
+
+  uint32_t num_sessions;
+  uint32_t reuse_reproc_buffer;
+
+  cam_jpeg_metadata_t *jpeg_metadata;
+
+  /* Pointer to the session in progress*/
+  mm_jpeg_job_session_t *p_session_inprogress;
+
+  // dummy OMX handle
+  OMX_HANDLETYPE dummy_handle;
+} mm_jpeg_obj;
+
+/** mm_jpeg_pending_func_t:
+ *
+ * Intermediate function for transition change
+ **/
+typedef OMX_ERRORTYPE (*mm_jpeg_transition_func_t)(void *);
+
+extern int32_t mm_jpeg_init(mm_jpeg_obj *my_obj);
+extern int32_t mm_jpeg_deinit(mm_jpeg_obj *my_obj);
+extern uint32_t mm_jpeg_new_client(mm_jpeg_obj *my_obj);
+extern int32_t mm_jpeg_start_job(mm_jpeg_obj *my_obj,
+  mm_jpeg_job_t* job,
+  uint32_t* jobId);
+extern int32_t mm_jpeg_abort_job(mm_jpeg_obj *my_obj,
+  uint32_t jobId);
+extern int32_t mm_jpeg_close(mm_jpeg_obj *my_obj,
+  uint32_t client_hdl);
+extern int32_t mm_jpeg_create_session(mm_jpeg_obj *my_obj,
+  uint32_t client_hdl,
+  mm_jpeg_encode_params_t *p_params,
+  uint32_t* p_session_id);
+extern int32_t mm_jpeg_destroy_session_by_id(mm_jpeg_obj *my_obj,
+  uint32_t session_id);
+
+extern int32_t mm_jpegdec_init(mm_jpeg_obj *my_obj);
+extern int32_t mm_jpegdec_deinit(mm_jpeg_obj *my_obj);
+extern int32_t mm_jpeg_jobmgr_thread_release(mm_jpeg_obj * my_obj);
+extern int32_t mm_jpeg_jobmgr_thread_launch(mm_jpeg_obj *my_obj);
+extern int32_t mm_jpegdec_start_decode_job(mm_jpeg_obj *my_obj,
+  mm_jpeg_job_t* job,
+  uint32_t* jobId);
+
+extern int32_t mm_jpegdec_create_session(mm_jpeg_obj *my_obj,
+  uint32_t client_hdl,
+  mm_jpeg_decode_params_t *p_params,
+  uint32_t* p_session_id);
+
+extern int32_t mm_jpegdec_destroy_session_by_id(mm_jpeg_obj *my_obj,
+  uint32_t session_id);
+
+extern int32_t mm_jpegdec_abort_job(mm_jpeg_obj *my_obj,
+  uint32_t jobId);
+
+int32_t mm_jpegdec_process_decoding_job(mm_jpeg_obj *my_obj,
+    mm_jpeg_job_q_node_t* job_node);
+
+/* utiltity fucntion declared in mm-camera-inteface2.c
+ * and need be used by mm-camera and below*/
+uint32_t mm_jpeg_util_generate_handler(uint8_t index);
+uint8_t mm_jpeg_util_get_index_by_handler(uint32_t handler);
+
+/* basic queue functions */
+extern int32_t mm_jpeg_queue_init(mm_jpeg_queue_t* queue);
+extern int32_t mm_jpeg_queue_enq(mm_jpeg_queue_t* queue,
+    mm_jpeg_q_data_t data);
+extern int32_t mm_jpeg_queue_enq_head(mm_jpeg_queue_t* queue,
+    mm_jpeg_q_data_t data);
+extern mm_jpeg_q_data_t mm_jpeg_queue_deq(mm_jpeg_queue_t* queue);
+extern int32_t mm_jpeg_queue_deinit(mm_jpeg_queue_t* queue);
+extern int32_t mm_jpeg_queue_flush(mm_jpeg_queue_t* queue);
+extern uint32_t mm_jpeg_queue_get_size(mm_jpeg_queue_t* queue);
+extern mm_jpeg_q_data_t mm_jpeg_queue_peek(mm_jpeg_queue_t* queue);
+extern int32_t addExifEntry(QOMX_EXIF_INFO *p_exif_info, exif_tag_id_t tagid,
+  exif_tag_type_t type, uint32_t count, void *data);
+extern int32_t releaseExifEntry(QEXIF_INFO_DATA *p_exif_data);
+extern int process_meta_data(metadata_buffer_t *p_meta,
+  QOMX_EXIF_INFO *exif_info, mm_jpeg_exif_params_t *p_cam3a_params,
+  cam_hal_version_t hal_version);
+
+OMX_ERRORTYPE mm_jpeg_session_change_state(mm_jpeg_job_session_t* p_session,
+  OMX_STATETYPE new_state,
+  mm_jpeg_transition_func_t p_exec);
+
+int map_jpeg_format(mm_jpeg_color_format color_fmt);
+
+OMX_BOOL mm_jpeg_session_abort(mm_jpeg_job_session_t *p_session);
+/**
+ *
+ * special queue functions for job queue
+ **/
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_client_id(
+  mm_jpeg_queue_t* queue, uint32_t client_hdl);
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_job_id(
+  mm_jpeg_queue_t* queue, uint32_t job_id);
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_session_id(
+  mm_jpeg_queue_t* queue, uint32_t session_id);
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_unlk(
+  mm_jpeg_queue_t* queue, uint32_t job_id);
+
+
+/** mm_jpeg_queue_func_t:
+ *
+ * Intermediate function for queue operation
+ **/
+typedef void (*mm_jpeg_queue_func_t)(void *);
+
+/** mm_jpeg_exif_flash_mode:
+ *
+ * Exif flash mode values
+ **/
+typedef enum {
+  MM_JPEG_EXIF_FLASH_MODE_ON   = 0x1,
+  MM_JPEG_EXIF_FLASH_MODE_OFF  = 0x2,
+  MM_JPEG_EXIF_FLASH_MODE_AUTO = 0x3,
+  MM_JPEG_EXIF_FLASH_MODE_MAX
+} mm_jpeg_exif_flash_mode;
+
+#endif /* MM_JPEG_H_ */
+
+
diff --git a/msmcobalt/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_dbg.h b/msmcobalt/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_dbg.h
new file mode 100644
index 0000000..2269537
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_dbg.h
@@ -0,0 +1,55 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_JPEG_DBG_H__
+#define __MM_JPEG_DBG_H__
+
+#ifdef QCAMERA_REDEFINE_LOG
+#define CAM_MODULE CAM_JPEG_MODULE
+#include "mm_camera_dbg.h"
+#endif
+
+extern volatile uint32_t gKpiDebugLevel;
+
+#ifndef KPI_DEBUG
+#define KPI_DEBUG
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+#include <cutils/trace.h>
+
+#define KPI_APT 1
+#define KPI_DBG 2
+
+#define KPI_ATRACE_INT(name,val) ({\
+if (gKpiDebugLevel >= KPI_APT) { \
+     atrace_int(ATRACE_TAG, name, val); \
+}\
+})
+
+#endif
+#endif /* __MM_JPEG_DBG_H__ */
diff --git a/msmcobalt/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_inlines.h b/msmcobalt/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_inlines.h
new file mode 100644
index 0000000..d2ca63d
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_inlines.h
@@ -0,0 +1,127 @@
+/* Copyright (c) 2013, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef MM_JPEG_INLINES_H_
+#define MM_JPEG_INLINES_H_
+
+// JPEG dependencies
+#include "mm_jpeg.h"
+
+/** mm_jpeg_get_session:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_idx: client index
+ *
+ *  Return:
+ *       job index
+ *
+ *  Description:
+ *       Get job index by client id
+ *
+ **/
+static inline mm_jpeg_job_session_t *mm_jpeg_get_session(mm_jpeg_obj *my_obj, uint32_t job_id)
+{
+  mm_jpeg_job_session_t *p_session = NULL;
+  int client_idx =  GET_CLIENT_IDX(job_id);
+  int session_idx= GET_SESSION_IDX(job_id);
+
+  LOGD("client_idx %d session_idx %d",
+    client_idx, session_idx);
+  if ((session_idx >= MM_JPEG_MAX_SESSION) ||
+    (client_idx >= MAX_JPEG_CLIENT_NUM)) {
+    LOGE("invalid job id %x",
+      job_id);
+    return NULL;
+  }
+  pthread_mutex_lock(&my_obj->clnt_mgr[client_idx].lock);
+  p_session = &my_obj->clnt_mgr[client_idx].session[session_idx];
+  pthread_mutex_unlock(&my_obj->clnt_mgr[client_idx].lock);
+  return p_session;
+}
+
+/** mm_jpeg_get_job_idx:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_idx: client index
+ *
+ *  Return:
+ *       job index
+ *
+ *  Description:
+ *       Get job index by client id
+ *
+ **/
+static inline int mm_jpeg_get_new_session_idx(mm_jpeg_obj *my_obj, int client_idx,
+  mm_jpeg_job_session_t **pp_session)
+{
+  int i = 0;
+  int index = -1;
+  for (i = 0; i < MM_JPEG_MAX_SESSION; i++) {
+    pthread_mutex_lock(&my_obj->clnt_mgr[client_idx].lock);
+    if (!my_obj->clnt_mgr[client_idx].session[i].active) {
+      *pp_session = &my_obj->clnt_mgr[client_idx].session[i];
+      my_obj->clnt_mgr[client_idx].session[i].active = OMX_TRUE;
+      index = i;
+      pthread_mutex_unlock(&my_obj->clnt_mgr[client_idx].lock);
+      break;
+    }
+    pthread_mutex_unlock(&my_obj->clnt_mgr[client_idx].lock);
+  }
+  return index;
+}
+
+/** mm_jpeg_get_job_idx:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_idx: client index
+ *
+ *  Return:
+ *       job index
+ *
+ *  Description:
+ *       Get job index by client id
+ *
+ **/
+static inline void mm_jpeg_remove_session_idx(mm_jpeg_obj *my_obj, uint32_t job_id)
+{
+  int client_idx =  GET_CLIENT_IDX(job_id);
+  int session_idx= GET_SESSION_IDX(job_id);
+  LOGD("client_idx %d session_idx %d",
+    client_idx, session_idx);
+  pthread_mutex_lock(&my_obj->clnt_mgr[client_idx].lock);
+  my_obj->clnt_mgr[client_idx].session[session_idx].active = OMX_FALSE;
+  pthread_mutex_unlock(&my_obj->clnt_mgr[client_idx].lock);
+}
+
+
+
+#endif /* MM_JPEG_INLINES_H_ */
diff --git a/msmcobalt/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_ionbuf.h b/msmcobalt/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_ionbuf.h
new file mode 100644
index 0000000..96b70d9
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_ionbuf.h
@@ -0,0 +1,105 @@
+/* Copyright (c) 2013-2014, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_JPEG_IONBUF_H__
+#define __MM_JPEG_IONBUF_H__
+
+// System dependencies
+#include <linux/msm_ion.h>
+
+// JPEG dependencies
+#include "mm_jpeg_dbg.h"
+
+typedef struct  {
+  struct ion_fd_data ion_info_fd;
+  struct ion_allocation_data alloc;
+  int p_pmem_fd;
+  size_t size;
+  int ion_fd;
+  uint8_t *addr;
+} buffer_t;
+
+/** buffer_allocate:
+ *
+ *  Arguments:
+ *     @p_buffer: ION buffer
+ *
+ *  Return:
+ *     buffer address
+ *
+ *  Description:
+ *      allocates ION buffer
+ *
+ **/
+void* buffer_allocate(buffer_t *p_buffer, int cached);
+
+/** buffer_deallocate:
+ *
+ *  Arguments:
+ *     @p_buffer: ION buffer
+ *
+ *  Return:
+ *     error val
+ *
+ *  Description:
+ *      deallocates ION buffer
+ *
+ **/
+int buffer_deallocate(buffer_t *p_buffer);
+
+/** buffer_invalidate:
+ *
+ *  Arguments:
+ *     @p_buffer: ION buffer
+ *
+ *  Return:
+ *     error val
+ *
+ *  Description:
+ *      Invalidates the cached buffer
+ *
+ **/
+int buffer_invalidate(buffer_t *p_buffer);
+
+/** buffer_clean:
+ *
+ *  Arguments:
+ *     @p_buffer: ION buffer
+ *
+ *  Return:
+ *     error val
+ *
+ *  Description:
+ *      clean the cached buffer
+ *
+ **/
+int buffer_clean(buffer_t *p_buffer);
+
+#endif
+
diff --git a/msmcobalt/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_mpo.h b/msmcobalt/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_mpo.h
new file mode 100644
index 0000000..6e8424c
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_mpo.h
@@ -0,0 +1,45 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef MM_JPEG_MPO_H_
+#define MM_JPEG_MPO_H_
+
+// JPEG dependencies
+#include "mm_jpeg_interface.h"
+#include "qmpo.h"
+
+#define TRUE 1
+#define FALSE 0
+
+extern int mm_jpeg_mpo_compose(mm_jpeg_mpo_info_t *mpo_info);
+
+extern int get_mpo_size(mm_jpeg_output_t jpeg_buffer[MM_JPEG_MAX_MPO_IMAGES],
+    int num_of_images);
+
+#endif
diff --git a/msmcobalt/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg.c b/msmcobalt/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg.c
new file mode 100644
index 0000000..d697d5b
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg.c
@@ -0,0 +1,3743 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <pthread.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <math.h>
+#define PRCTL_H <SYSTEM_HEADER_PREFIX/prctl.h>
+#include PRCTL_H
+
+#ifdef LOAD_ADSP_RPC_LIB
+#include <dlfcn.h>
+#include <stdlib.h>
+#endif
+
+// JPEG dependencies
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg_interface.h"
+#include "mm_jpeg.h"
+#include "mm_jpeg_inlines.h"
+#ifdef LIB2D_ROTATION_ENABLE
+#include "mm_lib2d.h"
+#endif
+
+#define ENCODING_MODE_PARALLEL 1
+
+#define META_KEYFILE QCAMERA_DUMP_FRM_LOCATION"metadata.key"
+
+/**
+ * minimal resolution needed for normal mode of ops
+ */
+#define MM_JPEG_MIN_NOM_RESOLUTION 7680000 /*8MP*/
+
+#ifdef MM_JPEG_USE_PIPELINE
+#undef MM_JPEG_CONCURRENT_SESSIONS_COUNT
+#define MM_JPEG_CONCURRENT_SESSIONS_COUNT 1
+#endif
+
+OMX_ERRORTYPE mm_jpeg_ebd(OMX_HANDLETYPE hComponent,
+    OMX_PTR pAppData,
+    OMX_BUFFERHEADERTYPE* pBuffer);
+OMX_ERRORTYPE mm_jpeg_fbd(OMX_HANDLETYPE hComponent,
+    OMX_PTR pAppData,
+    OMX_BUFFERHEADERTYPE* pBuffer);
+OMX_ERRORTYPE mm_jpeg_event_handler(OMX_HANDLETYPE hComponent,
+    OMX_PTR pAppData,
+    OMX_EVENTTYPE eEvent,
+    OMX_U32 nData1,
+    OMX_U32 nData2,
+    OMX_PTR pEventData);
+
+static int32_t mm_jpegenc_destroy_job(mm_jpeg_job_session_t *p_session);
+static void mm_jpegenc_job_done(mm_jpeg_job_session_t *p_session);
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_dst_ptr(
+  mm_jpeg_queue_t* queue, void * dst_ptr);
+static OMX_ERRORTYPE mm_jpeg_session_configure(mm_jpeg_job_session_t *p_session);
+
+/** mm_jpeg_get_comp_name:
+ *
+ *  Arguments:
+ *       None
+ *
+ *  Return:
+ *       Encoder component name
+ *
+ *  Description:
+ *       Get the name of omx component to be used for jpeg encoding
+ *
+ **/
+inline char* mm_jpeg_get_comp_name()
+{
+#ifdef MM_JPEG_USE_PIPELINE
+  return "OMX.qcom.image.jpeg.encoder_pipeline";
+#else
+  return "OMX.qcom.image.jpeg.encoder";
+#endif
+}
+
+/** mm_jpeg_session_send_buffers:
+ *
+ *  Arguments:
+ *    @data: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Send the buffers to OMX layer
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_send_buffers(void *data)
+{
+  uint32_t i = 0;
+  mm_jpeg_job_session_t* p_session = (mm_jpeg_job_session_t *)data;
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  QOMX_BUFFER_INFO lbuffer_info;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+
+  memset(&lbuffer_info, 0x0, sizeof(QOMX_BUFFER_INFO));
+
+  if (p_session->lib2d_rotation_flag) {
+    for (i = 0; i < p_session->num_src_rot_bufs; i++) {
+      lbuffer_info.fd = (OMX_U32)p_session->src_rot_main_buf[i].fd;
+      LOGD("Source rot buffer %d", i);
+      ret = OMX_UseBuffer(p_session->omx_handle,
+        &(p_session->p_in_rot_omx_buf[i]), 0,
+        &lbuffer_info, p_session->src_rot_main_buf[i].buf_size,
+        p_session->src_rot_main_buf[i].buf_vaddr);
+      if (ret) {
+        LOGE("Error %d", ret);
+        return ret;
+      }
+    }
+  } else {
+    for (i = 0; i < p_params->num_src_bufs; i++) {
+      LOGD("Source buffer %d", i);
+      lbuffer_info.fd = (OMX_U32)p_params->src_main_buf[i].fd;
+      ret = OMX_UseBuffer(p_session->omx_handle,
+        &(p_session->p_in_omx_buf[i]), 0,
+        &lbuffer_info, p_params->src_main_buf[i].buf_size,
+        p_params->src_main_buf[i].buf_vaddr);
+      if (ret) {
+        LOGE("Error %d", ret);
+        return ret;
+      }
+    }
+  }
+
+  if (p_session->lib2d_rotation_flag && p_session->thumb_from_main) {
+    for (i = 0; i < p_session->num_src_rot_bufs; i++) {
+      LOGD("Source rot buffer thumb %d", i);
+      lbuffer_info.fd = (OMX_U32)p_session->src_rot_main_buf[i].fd;
+      ret = OMX_UseBuffer(p_session->omx_handle,
+        &(p_session->p_in_rot_omx_thumb_buf[i]), 2,
+        &lbuffer_info, p_session->src_rot_main_buf[i].buf_size,
+        p_session->src_rot_main_buf[i].buf_vaddr);
+      if (ret) {
+        LOGE("Error %d", ret);
+        return ret;
+      }
+    }
+  } else {
+    for (i = 0; i < p_params->num_tmb_bufs; i++) {
+      LOGD("Source buffer %d", i);
+      lbuffer_info.fd = (OMX_U32)p_params->src_thumb_buf[i].fd;
+      ret = OMX_UseBuffer(p_session->omx_handle,
+        &(p_session->p_in_omx_thumb_buf[i]), 2,
+        &lbuffer_info, p_params->src_thumb_buf[i].buf_size,
+      p_params->src_thumb_buf[i].buf_vaddr);
+      if (ret) {
+        LOGE("Error %d", ret);
+        return ret;
+      }
+    }
+  }
+
+  for (i = 0; i < p_params->num_dst_bufs; i++) {
+    LOGD("Dest buffer %d", i);
+    ret = OMX_UseBuffer(p_session->omx_handle, &(p_session->p_out_omx_buf[i]),
+      1, NULL, p_params->dest_buf[i].buf_size,
+      p_params->dest_buf[i].buf_vaddr);
+    if (ret) {
+      LOGE("Error");
+      return ret;
+    }
+  }
+
+  return ret;
+}
+
+
+/** mm_jpeg_session_free_buffers:
+ *
+ *  Arguments:
+ *    @data: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Free the buffers from OMX layer
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_free_buffers(void *data)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  uint32_t i = 0;
+  mm_jpeg_job_session_t* p_session = (mm_jpeg_job_session_t *)data;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+
+
+  if (p_session->lib2d_rotation_flag) {
+    for (i = 0; i < p_session->num_src_rot_bufs; i++) {
+      LOGD("Source rot buffer %d", i);
+      ret = OMX_FreeBuffer(p_session->omx_handle, 0,
+        p_session->p_in_rot_omx_buf[i]);
+      if (ret) {
+        LOGE("Error %d", ret);
+        return ret;
+      }
+    }
+  } else {
+    for (i = 0; i < p_params->num_src_bufs; i++) {
+      LOGD("Source buffer %d", i);
+      ret = OMX_FreeBuffer(p_session->omx_handle, 0,
+        p_session->p_in_omx_buf[i]);
+      if (ret) {
+        LOGE("Error %d", ret);
+        return ret;
+      }
+    }
+  }
+
+  if (p_session->lib2d_rotation_flag && p_session->thumb_from_main) {
+    for (i = 0; i < p_session->num_src_rot_bufs; i++) {
+    LOGD("Source rot buffer thumb %d", i);
+      ret = OMX_FreeBuffer(p_session->omx_handle, 2,
+        p_session->p_in_rot_omx_thumb_buf[i]);
+      if (ret) {
+        LOGE("Error %d", ret);
+        return ret;
+      }
+    }
+  } else {
+    for (i = 0; i < p_params->num_tmb_bufs; i++) {
+      LOGD("Source buffer %d", i);
+      ret = OMX_FreeBuffer(p_session->omx_handle, 2,
+        p_session->p_in_omx_thumb_buf[i]);
+      if (ret) {
+        LOGE("Error %d", ret);
+        return ret;
+      }
+    }
+  }
+
+  for (i = 0; i < p_params->num_dst_bufs; i++) {
+    LOGD("Dest buffer %d", i);
+    ret = OMX_FreeBuffer(p_session->omx_handle, 1, p_session->p_out_omx_buf[i]);
+    if (ret) {
+      LOGE("Error");
+      return ret;
+    }
+  }
+  return ret;
+}
+
+
+
+
+/** mm_jpeg_session_change_state:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *    @new_state: new state to be transitioned to
+ *    @p_exec: transition function
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       This method is used for state transition
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_change_state(mm_jpeg_job_session_t* p_session,
+  OMX_STATETYPE new_state,
+  mm_jpeg_transition_func_t p_exec)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  OMX_STATETYPE current_state;
+  LOGD("new_state %d p_exec %p",
+    new_state, p_exec);
+
+
+  pthread_mutex_lock(&p_session->lock);
+
+  ret = OMX_GetState(p_session->omx_handle, &current_state);
+
+  if (ret) {
+    pthread_mutex_unlock(&p_session->lock);
+    return ret;
+  }
+
+  if (current_state == new_state) {
+    pthread_mutex_unlock(&p_session->lock);
+    return OMX_ErrorNone;
+  }
+
+  p_session->state_change_pending = OMX_TRUE;
+  pthread_mutex_unlock(&p_session->lock);
+  ret = OMX_SendCommand(p_session->omx_handle, OMX_CommandStateSet,
+    new_state, NULL);
+  pthread_mutex_lock(&p_session->lock);
+  if (ret) {
+    LOGE("Error %d", ret);
+    pthread_mutex_unlock(&p_session->lock);
+    return OMX_ErrorIncorrectStateTransition;
+  }
+  if ((OMX_ErrorNone != p_session->error_flag) &&
+      (OMX_ErrorOverflow != p_session->error_flag)) {
+    LOGE("Error %d", p_session->error_flag);
+    pthread_mutex_unlock(&p_session->lock);
+    return p_session->error_flag;
+  }
+  if (p_exec) {
+    ret = p_exec(p_session);
+    if (ret) {
+      LOGE("Error %d", ret);
+      pthread_mutex_unlock(&p_session->lock);
+      return ret;
+    }
+  }
+  if (p_session->state_change_pending) {
+    LOGL("before wait");
+    pthread_cond_wait(&p_session->cond, &p_session->lock);
+    LOGL("after wait");
+  }
+  pthread_mutex_unlock(&p_session->lock);
+  return ret;
+}
+
+/** mm_jpeg_session_create:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error types
+ *
+ *  Description:
+ *       Create a jpeg encode session
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_create(mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+  mm_jpeg_obj *my_obj = (mm_jpeg_obj *) p_session->jpeg_obj;
+
+  pthread_mutex_init(&p_session->lock, NULL);
+  pthread_cond_init(&p_session->cond, NULL);
+  cirq_reset(&p_session->cb_q);
+  p_session->state_change_pending = OMX_FALSE;
+  p_session->abort_state = MM_JPEG_ABORT_NONE;
+  p_session->error_flag = OMX_ErrorNone;
+  p_session->ebd_count = 0;
+  p_session->fbd_count = 0;
+  p_session->encode_pid = -1;
+  p_session->config = OMX_FALSE;
+  p_session->exif_count_local = 0;
+  p_session->auto_out_buf = OMX_FALSE;
+
+  p_session->omx_callbacks.EmptyBufferDone = mm_jpeg_ebd;
+  p_session->omx_callbacks.FillBufferDone = mm_jpeg_fbd;
+  p_session->omx_callbacks.EventHandler = mm_jpeg_event_handler;
+
+  p_session->thumb_from_main = 0;
+#ifdef MM_JPEG_USE_PIPELINE
+  p_session->thumb_from_main = !p_session->params.thumb_from_postview;
+#endif
+
+  rc = OMX_GetHandle(&p_session->omx_handle,
+      mm_jpeg_get_comp_name(),
+      (void *)p_session,
+      &p_session->omx_callbacks);
+  if (OMX_ErrorNone != rc) {
+    LOGE("OMX_GetHandle failed (%d)", rc);
+    return rc;
+  }
+
+  my_obj->num_sessions++;
+
+  return rc;
+}
+
+
+
+/** mm_jpeg_session_destroy:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       none
+ *
+ *  Description:
+ *       Destroy a jpeg encode session
+ *
+ **/
+void mm_jpeg_session_destroy(mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+  OMX_STATETYPE state;
+  uint32_t i;
+  mm_jpeg_obj *my_obj = (mm_jpeg_obj *) p_session->jpeg_obj;
+
+  LOGD("E");
+  if (NULL == p_session->omx_handle) {
+    LOGE("invalid handle");
+    return;
+  }
+
+  rc = OMX_GetState(p_session->omx_handle, &state);
+
+  //Check state before state transition
+  if ((state == OMX_StateExecuting) || (state == OMX_StatePause)) {
+    rc = mm_jpeg_session_change_state(p_session, OMX_StateIdle, NULL);
+    if (rc) {
+      LOGE("Error");
+    }
+  }
+
+  rc = OMX_GetState(p_session->omx_handle, &state);
+
+  if (state == OMX_StateIdle) {
+    rc = mm_jpeg_session_change_state(p_session, OMX_StateLoaded,
+      mm_jpeg_session_free_buffers);
+    if (rc) {
+      LOGE("Error");
+    }
+  }
+
+  if (p_session->lib2d_rotation_flag) {
+    for (i = 0; i < p_session->num_src_rot_bufs; i++) {
+      if (p_session->src_rot_ion_buffer[i].addr) {
+        buffer_deallocate(&p_session->src_rot_ion_buffer[i]);
+      }
+    }
+  }
+
+  /* If current session is the session in progress
+     set session in progress pointer to null*/
+  p_session->config = OMX_FALSE;
+  if (my_obj->p_session_inprogress == p_session) {
+    my_obj->p_session_inprogress = NULL;
+  }
+
+  rc = OMX_FreeHandle(p_session->omx_handle);
+  if (0 != rc) {
+    LOGE("OMX_FreeHandle failed (%d)", rc);
+  }
+  p_session->omx_handle = NULL;
+
+  pthread_mutex_destroy(&p_session->lock);
+  pthread_cond_destroy(&p_session->cond);
+
+  if (NULL != p_session->meta_enc_key) {
+    free(p_session->meta_enc_key);
+    p_session->meta_enc_key = NULL;
+  }
+
+  my_obj->num_sessions--;
+
+  // Destroy next session
+  if (p_session->next_session) {
+    mm_jpeg_session_destroy(p_session->next_session);
+  }
+
+  LOGD("Session destroy successful. X");
+}
+
+
+
+/** mm_jpeg_session_config_main_buffer_offset:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure the buffer offsets
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_config_main_buffer_offset(
+  mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE rc = 0;
+  OMX_INDEXTYPE buffer_index;
+  QOMX_YUV_FRAME_INFO frame_info;
+  size_t totalSize = 0;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+
+  mm_jpeg_buf_t *p_src_buf =
+    &p_params->src_main_buf[0];
+
+  memset(&frame_info, 0x0, sizeof(QOMX_YUV_FRAME_INFO));
+
+  frame_info.cbcrStartOffset[0] = p_src_buf->offset.mp[0].len;
+  frame_info.cbcrStartOffset[1] = p_src_buf->offset.mp[1].len;
+  if (!p_session->lib2d_rotation_flag) {
+    frame_info.yOffset = p_src_buf->offset.mp[0].offset;
+    frame_info.cbcrOffset[0] = p_src_buf->offset.mp[1].offset;
+    frame_info.cbcrOffset[1] = p_src_buf->offset.mp[2].offset;
+  }
+  totalSize = p_src_buf->buf_size;
+
+  rc = OMX_GetExtensionIndex(p_session->omx_handle,
+    QOMX_IMAGE_EXT_BUFFER_OFFSET_NAME, &buffer_index);
+  if (rc != OMX_ErrorNone) {
+    LOGE("Failed");
+    return rc;
+  }
+
+  LOGD("yOffset = %d, cbcrOffset = (%d %d), totalSize = %zd,"
+    "cbcrStartOffset = (%d %d)",
+    (int)frame_info.yOffset,
+    (int)frame_info.cbcrOffset[0],
+    (int)frame_info.cbcrOffset[1],
+    totalSize,
+    (int)frame_info.cbcrStartOffset[0],
+    (int)frame_info.cbcrStartOffset[1]);
+
+  rc = OMX_SetParameter(p_session->omx_handle, buffer_index, &frame_info);
+  if (rc != OMX_ErrorNone) {
+    LOGE("Failed");
+    return rc;
+  }
+  return rc;
+}
+
+/** mm_jpeg_encoding_mode:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure the serial or parallel encoding
+ *       mode
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_encoding_mode(
+  mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE rc = 0;
+  OMX_INDEXTYPE indextype;
+  QOMX_ENCODING_MODE encoding_mode;
+
+  rc = OMX_GetExtensionIndex(p_session->omx_handle,
+    QOMX_IMAGE_EXT_ENCODING_MODE_NAME, &indextype);
+  if (rc != OMX_ErrorNone) {
+    LOGE("Failed");
+    return rc;
+  }
+
+  if (ENCODING_MODE_PARALLEL) {
+    encoding_mode = OMX_Parallel_Encoding;
+  } else {
+    encoding_mode = OMX_Serial_Encoding;
+  }
+  LOGD("encoding mode = %d ",
+    (int)encoding_mode);
+  rc = OMX_SetParameter(p_session->omx_handle, indextype, &encoding_mode);
+  if (rc != OMX_ErrorNone) {
+    LOGE("Failed");
+    return rc;
+  }
+  return rc;
+}
+
+/** mm_jpeg_get_speed:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       ops speed type for jpeg
+ *
+ *  Description:
+ *      Configure normal or high speed jpeg
+ *
+ **/
+QOMX_JPEG_SPEED_MODE mm_jpeg_get_speed(
+  mm_jpeg_job_session_t* p_session)
+{
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+  cam_dimension_t *p_dim = &p_params->main_dim.src_dim;
+  if (p_params->burst_mode ||
+    (MM_JPEG_MIN_NOM_RESOLUTION < (p_dim->width * p_dim->height))) {
+    return QOMX_JPEG_SPEED_MODE_HIGH;
+  }
+  return QOMX_JPEG_SPEED_MODE_NORMAL;
+}
+
+/** mm_jpeg_speed_mode:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *      Configure normal or high speed jpeg
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_speed_mode(
+  mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE rc = 0;
+  OMX_INDEXTYPE indextype;
+  QOMX_JPEG_SPEED jpeg_speed;
+
+  rc = OMX_GetExtensionIndex(p_session->omx_handle,
+    QOMX_IMAGE_EXT_JPEG_SPEED_NAME, &indextype);
+  if (rc != OMX_ErrorNone) {
+    LOGE("Failed");
+    return rc;
+  }
+
+  jpeg_speed.speedMode = mm_jpeg_get_speed(p_session);
+  LOGH("speed %d", jpeg_speed.speedMode);
+
+  rc = OMX_SetParameter(p_session->omx_handle, indextype, &jpeg_speed);
+  if (rc != OMX_ErrorNone) {
+    LOGE("Failed");
+    return rc;
+  }
+  return rc;
+}
+
+/** mm_jpeg_get_mem:
+ *
+ *  Arguments:
+ *    @p_out_buf : jpeg output buffer
+ *    @p_jpeg_session: job session
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *      gets the jpeg output buffer
+ *
+ **/
+static int32_t mm_jpeg_get_mem(
+  omx_jpeg_ouput_buf_t *p_out_buf, void* p_jpeg_session)
+{
+  int32_t rc = 0;
+  mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *)p_jpeg_session;
+  mm_jpeg_encode_params_t *p_params = NULL;
+  mm_jpeg_encode_job_t *p_encode_job = NULL;
+
+  if (!p_session) {
+    LOGE("Invalid input");
+    return -1;
+  }
+  p_params = &p_session->params;
+  p_encode_job = &p_session->encode_job;
+  if (!p_params || !p_encode_job || !p_params->get_memory) {
+    LOGE("Invalid jpeg encode params");
+    return -1;
+  }
+  p_params->get_memory(p_out_buf);
+  p_encode_job->ref_count++;
+  p_encode_job->alloc_out_buffer = p_out_buf;
+  LOGD("ref_count %d p_out_buf %p",
+    p_encode_job->ref_count, p_out_buf);
+  return rc;
+}
+
+/** mm_jpeg_put_mem:
+ *
+ *  Arguments:
+ *    @p_jpeg_session: job session
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *      releases the jpeg output buffer
+ *
+ **/
+static int32_t mm_jpeg_put_mem(void* p_jpeg_session)
+{
+  int32_t rc = 0;
+  mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *)p_jpeg_session;
+  mm_jpeg_encode_params_t *p_params = NULL;
+  mm_jpeg_encode_job_t *p_encode_job = NULL;
+
+  if (!p_session) {
+    LOGE("Invalid input");
+    return -1;
+  }
+  p_params = &p_session->params;
+  p_encode_job = &p_session->encode_job;
+
+  if (!p_params->get_memory) {
+    LOGD("get_mem not defined, ignore put mem");
+    return 0;
+  }
+  if (!p_params || !p_encode_job || !p_params->put_memory) {
+    LOGE("Invalid jpeg encode params");
+    return -1;
+  }
+  if ((MM_JPEG_ABORT_NONE != p_session->abort_state) &&
+    p_encode_job->ref_count) {
+    omx_jpeg_ouput_buf_t *p_out_buf =
+      ( omx_jpeg_ouput_buf_t *) p_encode_job->alloc_out_buffer;
+    p_params->put_memory(p_out_buf);
+    p_encode_job->ref_count--;
+    p_encode_job->alloc_out_buffer = NULL;
+  } else if (p_encode_job->ref_count) {
+    p_encode_job->ref_count--;
+  } else {
+    LOGW("Buffer already released %d", p_encode_job->ref_count);
+    rc = -1;
+  }
+  LOGD("ref_count %d p_out_buf %p",
+    p_encode_job->ref_count, p_encode_job->alloc_out_buffer);
+  return rc;
+}
+
+/** mm_jpeg_mem_ops:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure the serial or parallel encoding
+ *       mode
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_mem_ops(
+  mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE rc = 0;
+  OMX_INDEXTYPE indextype;
+  QOMX_MEM_OPS mem_ops;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+
+  if (p_params->get_memory) {
+    mem_ops.get_memory = mm_jpeg_get_mem;
+  } else {
+    mem_ops.get_memory = NULL;
+    LOGH("HAL get_mem handler undefined");
+  }
+
+  mem_ops.psession = p_session;
+  rc = OMX_GetExtensionIndex(p_session->omx_handle,
+    QOMX_IMAGE_EXT_MEM_OPS_NAME, &indextype);
+  if (rc != OMX_ErrorNone) {
+    LOGE("Failed");
+    return rc;
+  }
+
+  rc = OMX_SetParameter(p_session->omx_handle, indextype, &mem_ops);
+  if (rc != OMX_ErrorNone) {
+    LOGE("Failed");
+    return rc;
+  }
+  return rc;
+}
+
+/** mm_jpeg_metadata:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Pass meta data
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_metadata(
+  mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+  OMX_INDEXTYPE indexType;
+  QOMX_METADATA lMeta;
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+  mm_jpeg_obj *my_obj = (mm_jpeg_obj *) p_session->jpeg_obj;
+
+  rc = OMX_GetExtensionIndex(p_session->omx_handle,
+      QOMX_IMAGE_EXT_METADATA_NAME, &indexType);
+
+  if (rc != OMX_ErrorNone) {
+    LOGE("Failed");
+    return rc;
+  }
+
+  lMeta.metadata = (OMX_U8 *)p_jobparams->p_metadata;
+  lMeta.metaPayloadSize = sizeof(*p_jobparams->p_metadata);
+  lMeta.mobicat_mask = p_jobparams->mobicat_mask;
+  lMeta.static_metadata = (OMX_U8 *)my_obj->jpeg_metadata;
+
+  rc = OMX_SetConfig(p_session->omx_handle, indexType, &lMeta);
+  if (rc != OMX_ErrorNone) {
+    LOGE("Failed");
+    return rc;
+  }
+  return OMX_ErrorNone;
+}
+
+/** mm_jpeg_meta_enc_key:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Pass metadata encrypt key
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_meta_enc_key(
+  mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+  OMX_INDEXTYPE indexType;
+  QOMX_META_ENC_KEY lKey;
+
+  lKey.metaKey = p_session->meta_enc_key;
+  lKey.keyLen = p_session->meta_enc_keylen;
+
+  if ((!lKey.metaKey) || (!lKey.keyLen)){
+    LOGD("Key is invalid");
+    return OMX_ErrorNone;
+  }
+
+  rc = OMX_GetExtensionIndex(p_session->omx_handle,
+      QOMX_IMAGE_EXT_META_ENC_KEY_NAME, &indexType);
+
+  if (rc != OMX_ErrorNone) {
+    LOGE("Failed");
+    return rc;
+  }
+
+  rc = OMX_SetConfig(p_session->omx_handle, indexType, &lKey);
+  if (rc != OMX_ErrorNone) {
+    LOGE("Failed");
+    return rc;
+  }
+  return OMX_ErrorNone;
+}
+
+/** map_jpeg_format:
+ *
+ *  Arguments:
+ *    @color_fmt: color format
+ *
+ *  Return:
+ *       OMX color format
+ *
+ *  Description:
+ *       Map mmjpeg color format to OMX color format
+ *
+ **/
+int map_jpeg_format(mm_jpeg_color_format color_fmt)
+{
+  switch (color_fmt) {
+  case MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2:
+    return (int)OMX_QCOM_IMG_COLOR_FormatYVU420SemiPlanar;
+  case MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2:
+    return (int)OMX_COLOR_FormatYUV420SemiPlanar;
+  case MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1:
+    return (int)OMX_QCOM_IMG_COLOR_FormatYVU422SemiPlanar;
+  case MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1:
+    return (int)OMX_COLOR_FormatYUV422SemiPlanar;
+  case MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V2:
+    return (int)OMX_QCOM_IMG_COLOR_FormatYVU422SemiPlanar_h1v2;
+  case MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V2:
+    return (int)OMX_QCOM_IMG_COLOR_FormatYUV422SemiPlanar_h1v2;
+  case MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V1:
+    return (int)OMX_QCOM_IMG_COLOR_FormatYVU444SemiPlanar;
+  case MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V1:
+    return (int)OMX_QCOM_IMG_COLOR_FormatYUV444SemiPlanar;
+  case MM_JPEG_COLOR_FORMAT_MONOCHROME:
+     return (int)OMX_COLOR_FormatMonochrome;
+  default:
+    LOGW("invalid format %d", color_fmt);
+    return (int)OMX_QCOM_IMG_COLOR_FormatYVU420SemiPlanar;
+  }
+}
+
+/** mm_jpeg_get_imgfmt_from_colorfmt:
+ *
+ *  Arguments:
+ *    @color_fmt: color format
+ *
+ *  Return:
+ *    cam format
+ *
+ *  Description:
+ *    Get camera image format from color format
+ *
+ **/
+cam_format_t mm_jpeg_get_imgfmt_from_colorfmt
+  (mm_jpeg_color_format color_fmt)
+{
+  switch (color_fmt) {
+  case MM_JPEG_COLOR_FORMAT_MONOCHROME:
+    return CAM_FORMAT_Y_ONLY;
+  case MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2:
+    return CAM_FORMAT_YUV_420_NV21;
+  case MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2:
+    return CAM_FORMAT_YUV_420_NV12;
+  case MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1:
+  case MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V2:
+    return CAM_FORMAT_YUV_422_NV61;
+  case MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1:
+  case MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V2:
+    return CAM_FORMAT_YUV_422_NV16;
+  case MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V1:
+    return CAM_FORMAT_YUV_444_NV42;
+  case MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V1:
+    return CAM_FORMAT_YUV_444_NV24;
+  default:
+    return CAM_FORMAT_Y_ONLY;
+  }
+}
+
+/** mm_jpeg_session_config_port:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure OMX ports
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_config_ports(mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+  OMX_CONFIG_ROTATIONTYPE rotate;
+
+  mm_jpeg_buf_t *p_src_buf =
+    &p_params->src_main_buf[0];
+
+  p_session->inputPort.nPortIndex = 0;
+  p_session->outputPort.nPortIndex = 1;
+  p_session->inputTmbPort.nPortIndex = 2;
+
+  ret = OMX_GetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+    &p_session->inputPort);
+  if (ret) {
+    LOGE("failed");
+    return ret;
+  }
+
+  ret = OMX_GetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+    &p_session->inputTmbPort);
+  if (ret) {
+    LOGE("failed");
+    return ret;
+  }
+
+  ret = OMX_GetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+    &p_session->outputPort);
+  if (ret) {
+    LOGE("failed");
+    return ret;
+  }
+
+  if (p_session->lib2d_rotation_flag &&
+    ((p_session->params.rotation == 90) ||
+    (p_session->params.rotation == 270))) {
+    p_session->inputPort.format.image.nFrameWidth =
+      (OMX_U32)p_params->main_dim.src_dim.height;
+    p_session->inputPort.format.image.nFrameHeight =
+      (OMX_U32)p_params->main_dim.src_dim.width;
+    p_session->inputPort.format.image.nStride =
+      p_src_buf->offset.mp[0].scanline;
+    p_session->inputPort.format.image.nSliceHeight =
+      (OMX_U32)p_src_buf->offset.mp[0].stride;
+  } else {
+    p_session->inputPort.format.image.nFrameWidth =
+      (OMX_U32)p_params->main_dim.src_dim.width;
+    p_session->inputPort.format.image.nFrameHeight =
+      (OMX_U32)p_params->main_dim.src_dim.height;
+    p_session->inputPort.format.image.nStride =
+      p_src_buf->offset.mp[0].stride;
+    p_session->inputPort.format.image.nSliceHeight =
+      (OMX_U32)p_src_buf->offset.mp[0].scanline;
+  }
+
+  p_session->inputPort.format.image.eColorFormat =
+    map_jpeg_format(p_params->color_format);
+  p_session->inputPort.nBufferSize =
+    p_params->src_main_buf[0/*p_jobparams->src_index*/].buf_size;
+
+  if (p_session->lib2d_rotation_flag) {
+    p_session->inputPort.nBufferCountActual =
+      (OMX_U32)p_session->num_src_rot_bufs;
+  } else {
+    p_session->inputPort.nBufferCountActual =
+      (OMX_U32)p_params->num_src_bufs;
+  }
+
+  ret = OMX_SetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+    &p_session->inputPort);
+  if (ret) {
+    LOGE("failed");
+    return ret;
+  }
+
+  if (p_session->params.encode_thumbnail) {
+    mm_jpeg_buf_t *p_tmb_buf =
+      &p_params->src_thumb_buf[0];
+    if ((p_session->lib2d_rotation_flag && p_session->thumb_from_main) &&
+      ((p_session->params.rotation == 90) ||
+      (p_session->params.rotation == 270))) {
+      p_session->inputTmbPort.format.image.nFrameWidth =
+        (OMX_U32)p_params->thumb_dim.src_dim.height;
+      p_session->inputTmbPort.format.image.nFrameHeight =
+        (OMX_U32)p_params->thumb_dim.src_dim.width;
+      p_session->inputTmbPort.format.image.nStride =
+        p_tmb_buf->offset.mp[0].scanline;
+      p_session->inputTmbPort.format.image.nSliceHeight =
+        (OMX_U32)p_tmb_buf->offset.mp[0].stride;
+    } else {
+      p_session->inputTmbPort.format.image.nFrameWidth =
+        (OMX_U32)p_params->thumb_dim.src_dim.width;
+      p_session->inputTmbPort.format.image.nFrameHeight =
+        (OMX_U32)p_params->thumb_dim.src_dim.height;
+      p_session->inputTmbPort.format.image.nStride =
+        p_tmb_buf->offset.mp[0].stride;
+      p_session->inputTmbPort.format.image.nSliceHeight =
+        (OMX_U32)p_tmb_buf->offset.mp[0].scanline;
+    }
+
+    p_session->inputTmbPort.format.image.eColorFormat =
+      map_jpeg_format(p_params->thumb_color_format);
+    p_session->inputTmbPort.nBufferSize =
+      p_params->src_thumb_buf[0].buf_size;
+
+    if (p_session->lib2d_rotation_flag && p_session->thumb_from_main) {
+      p_session->inputTmbPort.nBufferCountActual =
+        (OMX_U32)p_session->num_src_rot_bufs;
+    } else {
+      p_session->inputTmbPort.nBufferCountActual =
+        (OMX_U32)p_params->num_tmb_bufs;
+    }
+
+    ret = OMX_SetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+      &p_session->inputTmbPort);
+
+    if (ret) {
+      LOGE("failed");
+      return ret;
+    }
+
+    // Enable thumbnail port
+    ret = OMX_SendCommand(p_session->omx_handle, OMX_CommandPortEnable,
+        p_session->inputTmbPort.nPortIndex, NULL);
+
+    if (ret) {
+      LOGE("failed");
+      return ret;
+    }
+  } else {
+    // Disable thumbnail port
+    ret = OMX_SendCommand(p_session->omx_handle, OMX_CommandPortDisable,
+        p_session->inputTmbPort.nPortIndex, NULL);
+
+    if (ret) {
+      LOGE("failed");
+      return ret;
+    }
+  }
+
+  p_session->outputPort.nBufferSize =
+    p_params->dest_buf[0].buf_size;
+  p_session->outputPort.nBufferCountActual = (OMX_U32)p_params->num_dst_bufs;
+  ret = OMX_SetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+    &p_session->outputPort);
+  if (ret) {
+    LOGE("failed");
+    return ret;
+  }
+
+  /* set rotation */
+  memset(&rotate, 0, sizeof(rotate));
+  rotate.nPortIndex = 1;
+
+  if (p_session->lib2d_rotation_flag) {
+    rotate.nRotation = 0;
+  } else {
+    rotate.nRotation = (OMX_S32)p_params->rotation;
+  }
+
+  ret = OMX_SetConfig(p_session->omx_handle, OMX_IndexConfigCommonRotate,
+      &rotate);
+  if (OMX_ErrorNone != ret) {
+    LOGE("Error %d", ret);
+    return ret;
+  }
+  LOGD("Set rotation to %d at port_idx = %d",
+      (int)p_params->rotation, (int)rotate.nPortIndex);
+
+  return ret;
+}
+
+/** mm_jpeg_update_thumbnail_crop
+ *
+ *  Arguments:
+ *    @p_thumb_dim: thumbnail dimension
+ *    @crop_width : flag indicating if width needs to be cropped
+ *
+ *  Return:
+ *    OMX error values
+ *
+ *  Description:
+ *    Updates thumbnail crop aspect ratio based on
+ *    thumbnail destination aspect ratio.
+ *
+ */
+OMX_ERRORTYPE mm_jpeg_update_thumbnail_crop(mm_jpeg_dim_t *p_thumb_dim,
+  uint8_t crop_width)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  int32_t cropped_width = 0, cropped_height = 0;
+
+  if (crop_width) {
+    // Keep height constant
+    cropped_height = p_thumb_dim->crop.height;
+    cropped_width = floor((cropped_height * p_thumb_dim->dst_dim.width) /
+      p_thumb_dim->dst_dim.height);
+    if (cropped_width % 2) {
+      cropped_width -= 1;
+    }
+  } else {
+    // Keep width constant
+    cropped_width = p_thumb_dim->crop.width;
+    cropped_height = floor((cropped_width * p_thumb_dim->dst_dim.height) /
+      p_thumb_dim->dst_dim.width);
+    if (cropped_height % 2) {
+      cropped_height -= 1;
+    }
+  }
+  p_thumb_dim->crop.left = p_thumb_dim->crop.left +
+    floor((p_thumb_dim->crop.width - cropped_width) / 2);
+  if (p_thumb_dim->crop.left % 2) {
+    p_thumb_dim->crop.left -= 1;
+  }
+  p_thumb_dim->crop.top = p_thumb_dim->crop.top +
+    floor((p_thumb_dim->crop.height - cropped_height) / 2);
+  if (p_thumb_dim->crop.top % 2) {
+    p_thumb_dim->crop.top -= 1;
+  }
+  p_thumb_dim->crop.width = cropped_width;
+  p_thumb_dim->crop.height = cropped_height;
+
+  LOGH("New thumbnail crop: left %d, top %d, crop width %d,"
+    " crop height %d", p_thumb_dim->crop.left,
+    p_thumb_dim->crop.top, p_thumb_dim->crop.width,
+    p_thumb_dim->crop.height);
+
+  return ret;
+}
+
+/** mm_jpeg_omx_config_thumbnail:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure OMX ports
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_config_thumbnail(mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  QOMX_THUMBNAIL_INFO thumbnail_info;
+  OMX_INDEXTYPE thumb_indextype;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+  mm_jpeg_dim_t *p_thumb_dim = &p_jobparams->thumb_dim;
+  mm_jpeg_dim_t *p_main_dim = &p_jobparams->main_dim;
+  QOMX_YUV_FRAME_INFO *p_frame_info = &thumbnail_info.tmbOffset;
+  mm_jpeg_buf_t *p_tmb_buf = &p_params->src_thumb_buf[p_jobparams->thumb_index];
+
+  LOGH("encode_thumbnail %u",
+    p_params->encode_thumbnail);
+  if (OMX_FALSE == p_params->encode_thumbnail) {
+    return ret;
+  }
+
+  if ((p_thumb_dim->dst_dim.width == 0) || (p_thumb_dim->dst_dim.height == 0)) {
+    LOGE("Error invalid output dim for thumbnail");
+    return OMX_ErrorBadParameter;
+  }
+
+  if ((p_thumb_dim->src_dim.width == 0) || (p_thumb_dim->src_dim.height == 0)) {
+    LOGE("Error invalid input dim for thumbnail");
+    return OMX_ErrorBadParameter;
+  }
+
+  if ((p_thumb_dim->crop.width == 0) || (p_thumb_dim->crop.height == 0)) {
+    p_thumb_dim->crop.width = p_thumb_dim->src_dim.width;
+    p_thumb_dim->crop.height = p_thumb_dim->src_dim.height;
+  }
+
+  /* check crop boundary */
+  if ((p_thumb_dim->crop.width + p_thumb_dim->crop.left > p_thumb_dim->src_dim.width) ||
+    (p_thumb_dim->crop.height + p_thumb_dim->crop.top > p_thumb_dim->src_dim.height)) {
+    LOGE("invalid crop boundary (%d, %d) offset (%d, %d) out of (%d, %d)",
+      p_thumb_dim->crop.width,
+      p_thumb_dim->crop.height,
+      p_thumb_dim->crop.left,
+      p_thumb_dim->crop.top,
+      p_thumb_dim->src_dim.width,
+      p_thumb_dim->src_dim.height);
+    return OMX_ErrorBadParameter;
+  }
+
+  memset(&thumbnail_info, 0x0, sizeof(QOMX_THUMBNAIL_INFO));
+  ret = OMX_GetExtensionIndex(p_session->omx_handle,
+    QOMX_IMAGE_EXT_THUMBNAIL_NAME,
+    &thumb_indextype);
+  if (ret) {
+    LOGE("Error %d", ret);
+    return ret;
+  }
+
+  /* fill thumbnail info */
+  thumbnail_info.scaling_enabled = 1;
+  thumbnail_info.input_width = (OMX_U32)p_thumb_dim->src_dim.width;
+  thumbnail_info.input_height = (OMX_U32)p_thumb_dim->src_dim.height;
+  thumbnail_info.rotation = (OMX_U32)p_params->thumb_rotation;
+  thumbnail_info.quality = (OMX_U32)p_params->thumb_quality;
+  thumbnail_info.output_width = (OMX_U32)p_thumb_dim->dst_dim.width;
+  thumbnail_info.output_height = (OMX_U32)p_thumb_dim->dst_dim.height;
+
+  if (p_session->thumb_from_main) {
+
+    if (p_session->lib2d_rotation_flag) {
+      thumbnail_info.rotation = 0;
+    } else {
+      if ((p_session->params.thumb_rotation == 90 ||
+        p_session->params.thumb_rotation == 270) &&
+        (p_session->params.rotation == 0 ||
+        p_session->params.rotation == 180)) {
+
+        thumbnail_info.output_width = (OMX_U32)p_thumb_dim->dst_dim.height;
+        thumbnail_info.output_height = (OMX_U32)p_thumb_dim->dst_dim.width;
+        thumbnail_info.rotation = p_session->params.rotation;
+      }
+    }
+
+    //Thumb FOV should be within main image FOV
+    if (p_thumb_dim->crop.left < p_main_dim->crop.left) {
+      p_thumb_dim->crop.left = p_main_dim->crop.left;
+    }
+
+    if (p_thumb_dim->crop.top < p_main_dim->crop.top) {
+      p_thumb_dim->crop.top = p_main_dim->crop.top;
+    }
+
+    while ((p_thumb_dim->crop.left + p_thumb_dim->crop.width) >
+      (p_main_dim->crop.left + p_main_dim->crop.width)) {
+      if (p_thumb_dim->crop.left == p_main_dim->crop.left) {
+        p_thumb_dim->crop.width = p_main_dim->crop.width;
+      } else {
+        p_thumb_dim->crop.left = p_main_dim->crop.left;
+      }
+    }
+
+    while ((p_thumb_dim->crop.top + p_thumb_dim->crop.height) >
+      (p_main_dim->crop.top + p_main_dim->crop.height)) {
+      if (p_thumb_dim->crop.top == p_main_dim->crop.top) {
+        p_thumb_dim->crop.height = p_main_dim->crop.height;
+      } else {
+        p_thumb_dim->crop.top = p_main_dim->crop.top;
+      }
+    }
+  } else if ((p_thumb_dim->dst_dim.width > p_thumb_dim->src_dim.width) ||
+    (p_thumb_dim->dst_dim.height > p_thumb_dim->src_dim.height)) {
+    LOGE("Incorrect thumbnail dim %dx%d resetting to %dx%d", p_thumb_dim->dst_dim.width,
+      p_thumb_dim->dst_dim.height, p_thumb_dim->src_dim.width,
+      p_thumb_dim->src_dim.height);
+    thumbnail_info.output_width = (OMX_U32)p_thumb_dim->src_dim.width;
+    thumbnail_info.output_height = (OMX_U32)p_thumb_dim->src_dim.height;
+  }
+
+  // If the thumbnail crop aspect ratio image and thumbnail dest aspect
+  // ratio are different, reset the thumbnail crop
+  double thumbcrop_aspect_ratio = (double)p_thumb_dim->crop.width /
+    (double)p_thumb_dim->crop.height;
+  double thumbdst_aspect_ratio = (double)p_thumb_dim->dst_dim.width /
+    (double)p_thumb_dim->dst_dim.height;
+  if ((thumbdst_aspect_ratio - thumbcrop_aspect_ratio) >
+    ASPECT_TOLERANCE) {
+    mm_jpeg_update_thumbnail_crop(p_thumb_dim, 0);
+  } else if ((thumbcrop_aspect_ratio - thumbdst_aspect_ratio) >
+    ASPECT_TOLERANCE) {
+    mm_jpeg_update_thumbnail_crop(p_thumb_dim, 1);
+  }
+
+  // Fill thumbnail crop info
+  thumbnail_info.crop_info.nWidth = (OMX_U32)p_thumb_dim->crop.width;
+  thumbnail_info.crop_info.nHeight = (OMX_U32)p_thumb_dim->crop.height;
+  thumbnail_info.crop_info.nLeft = p_thumb_dim->crop.left;
+  thumbnail_info.crop_info.nTop = p_thumb_dim->crop.top;
+
+  memset(p_frame_info, 0x0, sizeof(*p_frame_info));
+
+  p_frame_info->cbcrStartOffset[0] = p_tmb_buf->offset.mp[0].len;
+  p_frame_info->cbcrStartOffset[1] = p_tmb_buf->offset.mp[1].len;
+  p_frame_info->yOffset = p_tmb_buf->offset.mp[0].offset;
+  p_frame_info->cbcrOffset[0] = p_tmb_buf->offset.mp[1].offset;
+  p_frame_info->cbcrOffset[1] = p_tmb_buf->offset.mp[2].offset;
+
+  if (p_session->lib2d_rotation_flag && p_session->thumb_from_main) {
+    p_frame_info->yOffset = 0;
+    p_frame_info->cbcrOffset[0] = 0;
+    p_frame_info->cbcrOffset[1] = 0;
+  }
+
+  ret = OMX_SetConfig(p_session->omx_handle, thumb_indextype,
+    &thumbnail_info);
+  if (ret) {
+    LOGE("Error");
+    return ret;
+  }
+
+  return ret;
+}
+
+/** mm_jpeg_session_config_main_crop:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure main image crop
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_config_main_crop(mm_jpeg_job_session_t *p_session)
+{
+  OMX_CONFIG_RECTTYPE rect_type_in, rect_type_out;
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+  mm_jpeg_dim_t *dim = &p_jobparams->main_dim;
+
+  if ((dim->crop.width == 0) || (dim->crop.height == 0)) {
+    dim->crop.width = dim->src_dim.width;
+    dim->crop.height = dim->src_dim.height;
+  }
+  /* error check first */
+  if ((dim->crop.width + dim->crop.left > dim->src_dim.width) ||
+    (dim->crop.height + dim->crop.top > dim->src_dim.height)) {
+    LOGE("invalid crop boundary (%d, %d) out of (%d, %d)",
+      dim->crop.width + dim->crop.left,
+      dim->crop.height + dim->crop.top,
+      dim->src_dim.width,
+      dim->src_dim.height);
+    return OMX_ErrorBadParameter;
+  }
+
+  memset(&rect_type_in, 0, sizeof(rect_type_in));
+  memset(&rect_type_out, 0, sizeof(rect_type_out));
+  rect_type_in.nPortIndex = 0;
+  rect_type_out.nPortIndex = 0;
+
+  if ((dim->src_dim.width != dim->crop.width) ||
+    (dim->src_dim.height != dim->crop.height) ||
+    (dim->src_dim.width != dim->dst_dim.width) ||
+    (dim->src_dim.height != dim->dst_dim.height)) {
+    /* Scaler information */
+    rect_type_in.nWidth = CEILING2(dim->crop.width);
+    rect_type_in.nHeight = CEILING2(dim->crop.height);
+    rect_type_in.nLeft = dim->crop.left;
+    rect_type_in.nTop = dim->crop.top;
+
+    if (dim->dst_dim.width && dim->dst_dim.height) {
+      rect_type_out.nWidth = (OMX_U32)dim->dst_dim.width;
+      rect_type_out.nHeight = (OMX_U32)dim->dst_dim.height;
+    }
+  }
+
+  ret = OMX_SetConfig(p_session->omx_handle, OMX_IndexConfigCommonInputCrop,
+    &rect_type_in);
+  if (OMX_ErrorNone != ret) {
+    LOGE("Error");
+    return ret;
+  }
+
+  LOGH("OMX_IndexConfigCommonInputCrop w = %d, h = %d, l = %d, t = %d,"
+    " port_idx = %d",
+    (int)rect_type_in.nWidth, (int)rect_type_in.nHeight,
+    (int)rect_type_in.nLeft, (int)rect_type_in.nTop,
+    (int)rect_type_in.nPortIndex);
+
+  ret = OMX_SetConfig(p_session->omx_handle, OMX_IndexConfigCommonOutputCrop,
+    &rect_type_out);
+  if (OMX_ErrorNone != ret) {
+    LOGE("Error");
+    return ret;
+  }
+  LOGD("OMX_IndexConfigCommonOutputCrop w = %d, h = %d,"
+    " port_idx = %d",
+    (int)rect_type_out.nWidth, (int)rect_type_out.nHeight,
+    (int)rect_type_out.nPortIndex);
+
+  return ret;
+}
+
+/** mm_jpeg_session_config_main:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure main image
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_config_main(mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+
+  /* config port */
+  LOGD("config port");
+  rc = mm_jpeg_session_config_ports(p_session);
+  if (OMX_ErrorNone != rc) {
+    LOGE("config port failed");
+    return rc;
+  }
+
+  /* config buffer offset */
+  LOGD("config main buf offset");
+  rc = mm_jpeg_session_config_main_buffer_offset(p_session);
+  if (OMX_ErrorNone != rc) {
+    LOGE("config buffer offset failed");
+    return rc;
+  }
+
+  /* set the encoding mode */
+  rc = mm_jpeg_encoding_mode(p_session);
+  if (OMX_ErrorNone != rc) {
+    LOGE("config encoding mode failed");
+    return rc;
+  }
+
+  /* set the metadata encrypt key */
+  rc = mm_jpeg_meta_enc_key(p_session);
+  if (OMX_ErrorNone != rc) {
+    LOGE("config session failed");
+    return rc;
+  }
+
+  /* set the mem ops */
+  rc = mm_jpeg_mem_ops(p_session);
+  if (OMX_ErrorNone != rc) {
+    LOGE("config mem ops failed");
+    return rc;
+  }
+  /* set the jpeg speed mode */
+  rc = mm_jpeg_speed_mode(p_session);
+  if (OMX_ErrorNone != rc) {
+    LOGE("config speed mode failed");
+    return rc;
+  }
+
+  return rc;
+}
+
+/** mm_jpeg_session_config_common:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure common parameters
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_config_common(mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+  OMX_INDEXTYPE exif_idx;
+  OMX_CONFIG_ROTATIONTYPE rotate;
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+  QOMX_EXIF_INFO exif_info;
+
+  /* set rotation */
+  memset(&rotate, 0, sizeof(rotate));
+  rotate.nPortIndex = 1;
+
+  if (p_session->lib2d_rotation_flag) {
+    rotate.nRotation = 0;
+  } else {
+    rotate.nRotation = (OMX_S32)p_jobparams->rotation;
+  }
+
+  rc = OMX_SetConfig(p_session->omx_handle, OMX_IndexConfigCommonRotate,
+    &rotate);
+  if (OMX_ErrorNone != rc) {
+      LOGE("Error %d", rc);
+      return rc;
+  }
+  LOGD("Set rotation to %d at port_idx = %d",
+    (int)p_jobparams->rotation, (int)rotate.nPortIndex);
+
+  /* Set Exif data*/
+  memset(&p_session->exif_info_local[0], 0, sizeof(p_session->exif_info_local));
+  rc = OMX_GetExtensionIndex(p_session->omx_handle, QOMX_IMAGE_EXT_EXIF_NAME,
+    &exif_idx);
+  if (OMX_ErrorNone != rc) {
+    LOGE("Error %d", rc);
+    return rc;
+  }
+
+  LOGD("Num of exif entries passed from HAL: %d",
+      (int)p_jobparams->exif_info.numOfEntries);
+  if (p_jobparams->exif_info.numOfEntries > 0) {
+    rc = OMX_SetConfig(p_session->omx_handle, exif_idx,
+        &p_jobparams->exif_info);
+    if (OMX_ErrorNone != rc) {
+      LOGE("Error %d", rc);
+      return rc;
+    }
+  }
+  /*parse aditional exif data from the metadata*/
+  exif_info.numOfEntries = 0;
+  exif_info.exif_data = &p_session->exif_info_local[0];
+  process_meta_data(p_jobparams->p_metadata, &exif_info,
+    &p_jobparams->cam_exif_params, p_jobparams->hal_version);
+  /* After Parse metadata */
+  p_session->exif_count_local = (int)exif_info.numOfEntries;
+
+  if (exif_info.numOfEntries > 0) {
+    /* set exif tags */
+    LOGD("exif tags from metadata count %d",
+      (int)exif_info.numOfEntries);
+
+    rc = OMX_SetConfig(p_session->omx_handle, exif_idx,
+      &exif_info);
+    if (OMX_ErrorNone != rc) {
+      LOGE("Error %d", rc);
+      return rc;
+    }
+  }
+
+  return rc;
+}
+
+/** mm_jpeg_session_abort:
+ *
+ *  Arguments:
+ *    @p_session: jpeg session
+ *
+ *  Return:
+ *       OMX_BOOL
+ *
+ *  Description:
+ *       Abort ongoing job
+ *
+ **/
+OMX_BOOL mm_jpeg_session_abort(mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  int rc = 0;
+
+  LOGD("E");
+  pthread_mutex_lock(&p_session->lock);
+  if (MM_JPEG_ABORT_NONE != p_session->abort_state) {
+    pthread_mutex_unlock(&p_session->lock);
+    LOGH("**** ALREADY ABORTED");
+    return 0;
+  }
+  p_session->abort_state = MM_JPEG_ABORT_INIT;
+  if (OMX_TRUE == p_session->encoding) {
+    p_session->state_change_pending = OMX_TRUE;
+
+    LOGH("**** ABORTING");
+    pthread_mutex_unlock(&p_session->lock);
+
+    ret = OMX_SendCommand(p_session->omx_handle, OMX_CommandStateSet,
+      OMX_StateIdle, NULL);
+
+    if (ret != OMX_ErrorNone) {
+      LOGE("OMX_SendCommand returned error %d", ret);
+      return 1;
+    }
+    rc = mm_jpegenc_destroy_job(p_session);
+    if (rc != 0) {
+      LOGE("Destroy job returned error %d", rc);
+    }
+
+    pthread_mutex_lock(&p_session->lock);
+    if (MM_JPEG_ABORT_INIT == p_session->abort_state) {
+      LOGL("before wait");
+      pthread_cond_wait(&p_session->cond, &p_session->lock);
+    }
+    LOGL("after wait");
+  }
+  p_session->abort_state = MM_JPEG_ABORT_DONE;
+
+  mm_jpeg_put_mem((void *)p_session);
+
+  pthread_mutex_unlock(&p_session->lock);
+
+  // Abort next session
+  if (p_session->next_session) {
+    mm_jpeg_session_abort(p_session->next_session);
+  }
+
+  LOGD("X");
+  return 0;
+}
+
+/** mm_jpeg_config_multi_image_info
+ *
+ *  Arguments:
+ *    @p_session: encode session
+ *
+ *  Return: OMX_ERRORTYPE
+ *
+ *  Description:
+ *       Configure multi image parameters
+ *
+ **/
+static OMX_ERRORTYPE mm_jpeg_config_multi_image_info(
+  mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  QOMX_JPEG_MULTI_IMAGE_INFO multi_image_info;
+  OMX_INDEXTYPE multi_image_index;
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+
+  ret = OMX_GetExtensionIndex(p_session->omx_handle,
+    QOMX_IMAGE_EXT_MULTI_IMAGE_NAME, &multi_image_index);
+  if (ret) {
+    LOGE("Error getting multi image info extention index %d", ret);
+    return ret;
+  }
+  memset(&multi_image_info, 0, sizeof(multi_image_info));
+  if (p_jobparams->multi_image_info.type == MM_JPEG_TYPE_MPO) {
+    multi_image_info.image_type = QOMX_JPEG_IMAGE_TYPE_MPO;
+  } else {
+    multi_image_info.image_type = QOMX_JPEG_IMAGE_TYPE_JPEG;
+  }
+  multi_image_info.is_primary_image = p_jobparams->multi_image_info.is_primary;
+  multi_image_info.num_of_images = p_jobparams->multi_image_info.num_of_images;
+  multi_image_info.enable_metadata = p_jobparams->multi_image_info.enable_metadata;
+
+  ret = OMX_SetConfig(p_session->omx_handle, multi_image_index,
+    &multi_image_info);
+  if (ret) {
+    LOGE("Error setting multi image config");
+    return ret;
+  }
+  return ret;
+}
+
+/** mm_jpeg_configure_params
+ *
+ *  Arguments:
+ *    @p_session: encode session
+ *
+ *  Return:
+ *       none
+ *
+ *  Description:
+ *       Configure the job specific params
+ *
+ **/
+static OMX_ERRORTYPE mm_jpeg_configure_job_params(
+  mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  OMX_IMAGE_PARAM_QFACTORTYPE q_factor;
+  QOMX_WORK_BUFFER work_buffer;
+  OMX_INDEXTYPE work_buffer_index;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+  int i;
+
+  /* common config */
+  ret = mm_jpeg_session_config_common(p_session);
+  if (OMX_ErrorNone != ret) {
+    LOGE("config common failed");
+  }
+
+  /* config Main Image crop */
+  LOGD("config main crop");
+  ret = mm_jpeg_session_config_main_crop(p_session);
+  if (OMX_ErrorNone != ret) {
+    LOGE("config crop failed");
+    return ret;
+  }
+
+  /* set quality */
+  memset(&q_factor, 0, sizeof(q_factor));
+  q_factor.nPortIndex = 0;
+  q_factor.nQFactor = p_params->quality;
+  ret = OMX_SetConfig(p_session->omx_handle, OMX_IndexParamQFactor, &q_factor);
+  LOGD("config QFactor: %d", (int)q_factor.nQFactor);
+  if (OMX_ErrorNone != ret) {
+    LOGE("Error setting Q factor %d", ret);
+    return ret;
+  }
+
+  /* config thumbnail */
+  ret = mm_jpeg_session_config_thumbnail(p_session);
+  if (OMX_ErrorNone != ret) {
+    LOGE("config thumbnail img failed");
+    return ret;
+  }
+
+  //Pass the ION buffer to be used as o/p for HW
+  memset(&work_buffer, 0x0, sizeof(QOMX_WORK_BUFFER));
+  ret = OMX_GetExtensionIndex(p_session->omx_handle,
+    QOMX_IMAGE_EXT_WORK_BUFFER_NAME,
+    &work_buffer_index);
+  if (ret) {
+    LOGE("Error getting work buffer index %d", ret);
+    return ret;
+  }
+  work_buffer.fd = p_session->work_buffer.p_pmem_fd;
+  work_buffer.vaddr = p_session->work_buffer.addr;
+  work_buffer.length = (uint32_t)p_session->work_buffer.size;
+  LOGH("Work buffer info %d %p WorkBufSize: %d invalidate",
+      work_buffer.fd, work_buffer.vaddr, work_buffer.length);
+
+  buffer_invalidate(&p_session->work_buffer);
+
+  ret = OMX_SetConfig(p_session->omx_handle, work_buffer_index,
+    &work_buffer);
+  if (ret) {
+    LOGE("Error");
+    return ret;
+  }
+
+  /* set metadata */
+  ret = mm_jpeg_metadata(p_session);
+  if (OMX_ErrorNone != ret) {
+    LOGE("config makernote data failed");
+    return ret;
+  }
+
+  /* set QTable */
+  for (i = 0; i < QTABLE_MAX; i++) {
+    if (p_jobparams->qtable_set[i]) {
+      ret = OMX_SetConfig(p_session->omx_handle,
+        OMX_IndexParamQuantizationTable, &p_jobparams->qtable[i]);
+      if (OMX_ErrorNone != ret) {
+        LOGE("set QTable Error");
+        return ret;
+      }
+    }
+  }
+
+  /* Set multi image data*/
+  ret = mm_jpeg_config_multi_image_info(p_session);
+  if (OMX_ErrorNone != ret) {
+    LOGE("config multi image data failed");
+    return ret;
+  }
+
+  return ret;
+}
+
+/** mm_jpeg_session_configure:
+ *
+ *  Arguments:
+ *    @data: encode session
+ *
+ *  Return:
+ *       none
+ *
+ *  Description:
+ *       Configure the session
+ *
+ **/
+static OMX_ERRORTYPE mm_jpeg_session_configure(mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+
+  LOGD("E ");
+
+  MM_JPEG_CHK_ABORT(p_session, ret, error);
+
+  /* config main img */
+  ret = mm_jpeg_session_config_main(p_session);
+  if (OMX_ErrorNone != ret) {
+    LOGE("config main img failed");
+    goto error;
+  }
+  ret = mm_jpeg_session_change_state(p_session, OMX_StateIdle,
+    mm_jpeg_session_send_buffers);
+  if (ret) {
+    LOGE("change state to idle failed %d", ret);
+    goto error;
+  }
+
+  ret = mm_jpeg_session_change_state(p_session, OMX_StateExecuting,
+    NULL);
+  if (ret) {
+    LOGE("change state to executing failed %d", ret);
+    goto error;
+  }
+
+error:
+  LOGD("X ret %d", ret);
+  return ret;
+}
+
+
+
+
+
+
+/** mm_jpeg_session_encode:
+ *
+ *  Arguments:
+ *    @p_session: encode session
+ *
+ *  Return:
+ *       OMX_ERRORTYPE
+ *
+ *  Description:
+ *       Start the encoding
+ *
+ **/
+static OMX_ERRORTYPE mm_jpeg_session_encode(mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+  mm_jpeg_obj *my_obj = (mm_jpeg_obj *) p_session->jpeg_obj;
+  OMX_BUFFERHEADERTYPE *p_in_buf = NULL;
+  OMX_BUFFERHEADERTYPE *p_in_thumb_buf = NULL;
+
+  pthread_mutex_lock(&p_session->lock);
+  p_session->abort_state = MM_JPEG_ABORT_NONE;
+  p_session->encoding = OMX_FALSE;
+  pthread_mutex_unlock(&p_session->lock);
+
+  if (p_session->thumb_from_main) {
+    if (0 > p_jobparams->src_index) {
+      LOGE("Error");
+      ret = OMX_ErrorUnsupportedIndex;
+      goto error;
+    }
+    p_jobparams->thumb_index = (uint32_t)p_jobparams->src_index;
+    p_jobparams->thumb_dim.crop = p_jobparams->main_dim.crop;
+  }
+
+  if (OMX_FALSE == p_session->config) {
+    /* If another session in progress clear that sessions configuration */
+    if (my_obj->p_session_inprogress != NULL) {
+      OMX_STATETYPE state;
+      mm_jpeg_job_session_t *p_session_inprogress = my_obj->p_session_inprogress;
+
+      OMX_GetState(p_session_inprogress->omx_handle, &state);
+
+      //Check state before state transition
+      if ((state == OMX_StateExecuting) || (state == OMX_StatePause)) {
+        ret = mm_jpeg_session_change_state(p_session_inprogress,
+          OMX_StateIdle, NULL);
+        if (ret) {
+          LOGE("Error");
+          goto error;
+        }
+      }
+
+      OMX_GetState(p_session_inprogress->omx_handle, &state);
+
+      if (state == OMX_StateIdle) {
+        ret = mm_jpeg_session_change_state(p_session_inprogress,
+          OMX_StateLoaded, mm_jpeg_session_free_buffers);
+        if (ret) {
+          LOGE("Error");
+          goto error;
+        }
+      }
+      p_session_inprogress->config = OMX_FALSE;
+      my_obj->p_session_inprogress = NULL;
+    }
+
+    ret = mm_jpeg_session_configure(p_session);
+    if (ret) {
+      LOGE("Error");
+      goto error;
+    }
+    p_session->config = OMX_TRUE;
+    my_obj->p_session_inprogress = p_session;
+  }
+
+  ret = mm_jpeg_configure_job_params(p_session);
+  if (ret) {
+      LOGE("Error");
+      goto error;
+  }
+  pthread_mutex_lock(&p_session->lock);
+  p_session->encoding = OMX_TRUE;
+  pthread_mutex_unlock(&p_session->lock);
+
+  MM_JPEG_CHK_ABORT(p_session, ret, error);
+
+  if (p_session->lib2d_rotation_flag) {
+    p_in_buf = p_session->p_in_rot_omx_buf[p_jobparams->src_index];
+  } else {
+    p_in_buf = p_session->p_in_omx_buf[p_jobparams->src_index];
+  }
+
+#ifdef MM_JPEG_DUMP_INPUT
+  char filename[256];
+  snprintf(filename, sizeof(filename),
+    QCAMERA_DUMP_FRM_LOCATION"jpeg/mm_jpeg_int%d.yuv", p_session->ebd_count);
+  DUMP_TO_FILE(filename, p_in_buf->pBuffer, (size_t)p_in_buf->nAllocLen);
+#endif
+  ret = OMX_EmptyThisBuffer(p_session->omx_handle, p_in_buf);
+  if (ret) {
+    LOGE("Error");
+    goto error;
+  }
+
+  if (p_session->params.encode_thumbnail) {
+
+    if (p_session->thumb_from_main &&
+      p_session->lib2d_rotation_flag) {
+      p_in_thumb_buf = p_session->p_in_rot_omx_thumb_buf[p_jobparams->thumb_index];
+    } else {
+      p_in_thumb_buf = p_session->p_in_omx_thumb_buf[p_jobparams->thumb_index];
+    }
+
+#ifdef MM_JPEG_DUMP_INPUT
+    char thumb_filename[FILENAME_MAX];
+    snprintf(thumb_filename, sizeof(thumb_filename),
+      QCAMERA_DUMP_FRM_LOCATION"jpeg/mm_jpeg_int_t%d.yuv", p_session->ebd_count);
+    DUMP_TO_FILE(filename, p_in_thumb_buf->pBuffer,
+      (size_t)p_in_thumb_buf->nAllocLen);
+#endif
+    ret = OMX_EmptyThisBuffer(p_session->omx_handle, p_in_thumb_buf);
+    if (ret) {
+      LOGE("Error");
+      goto error;
+    }
+  }
+
+  ret = OMX_FillThisBuffer(p_session->omx_handle,
+    p_session->p_out_omx_buf[p_jobparams->dst_index]);
+  if (ret) {
+    LOGE("Error");
+    goto error;
+  }
+
+  MM_JPEG_CHK_ABORT(p_session, ret, error);
+
+error:
+
+  LOGD("X ");
+  return ret;
+}
+
+/** mm_jpeg_process_encoding_job:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg client
+ *    @job_node: job node
+ *
+ *  Return:
+ *       0 for success -1 otherwise
+ *
+ *  Description:
+ *       Start the encoding job
+ *
+ **/
+int32_t mm_jpeg_process_encoding_job(mm_jpeg_obj *my_obj, mm_jpeg_job_q_node_t* job_node)
+{
+  mm_jpeg_q_data_t qdata;
+  int32_t rc = 0;
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_job_session_t *p_session = NULL;
+  uint32_t buf_idx;
+
+  /* check if valid session */
+  p_session = mm_jpeg_get_session(my_obj, job_node->enc_info.job_id);
+  if (NULL == p_session) {
+    LOGE("invalid job id %x",
+        job_node->enc_info.job_id);
+    return -1;
+  }
+
+  LOGD("before dequeue session %d", ret);
+
+  /* dequeue available omx handle */
+  qdata = mm_jpeg_queue_deq(p_session->session_handle_q);
+  p_session = qdata.p;
+
+  if (NULL == p_session) {
+    LOGH("No available sessions %d", ret);
+    /* No available handles */
+    qdata.p = job_node;
+    mm_jpeg_queue_enq_head(&my_obj->job_mgr.job_queue, qdata);
+
+    LOGH("end enqueue %d", ret);
+    return rc;
+
+  }
+
+  p_session->auto_out_buf = OMX_FALSE;
+  if (job_node->enc_info.encode_job.dst_index < 0) {
+    /* dequeue available output buffer idx */
+    qdata = mm_jpeg_queue_deq(p_session->out_buf_q);
+    buf_idx = qdata.u32;
+
+    if (0U == buf_idx) {
+      LOGE("No available output buffers %d", ret);
+      return OMX_ErrorUndefined;
+    }
+
+    buf_idx--;
+
+    job_node->enc_info.encode_job.dst_index = (int32_t)buf_idx;
+    p_session->auto_out_buf = OMX_TRUE;
+  }
+
+  /* sent encode cmd to OMX, queue job into ongoing queue */
+  qdata.p = job_node;
+  rc = mm_jpeg_queue_enq(&my_obj->ongoing_job_q, qdata);
+  if (rc) {
+    LOGE("jpeg enqueue failed %d", ret);
+    goto error;
+  }
+
+  p_session->encode_job = job_node->enc_info.encode_job;
+  p_session->jobId = job_node->enc_info.job_id;
+  ret = mm_jpeg_session_encode(p_session);
+  if (ret) {
+    LOGE("encode session failed");
+    goto error;
+  }
+
+  LOGH("Success X ");
+  return rc;
+
+error:
+
+  if ((OMX_ErrorNone != ret) &&
+    (NULL != p_session->params.jpeg_cb)) {
+    p_session->job_status = JPEG_JOB_STATUS_ERROR;
+    LOGE("send jpeg error callback %d",
+      p_session->job_status);
+    p_session->params.jpeg_cb(p_session->job_status,
+      p_session->client_hdl,
+      p_session->jobId,
+      NULL,
+      p_session->params.userdata);
+  }
+
+  /*remove the job*/
+  mm_jpegenc_job_done(p_session);
+  LOGD("Error X ");
+
+  return rc;
+}
+
+
+
+/** mm_jpeg_jobmgr_thread:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       job manager thread main function
+ *
+ **/
+static void *mm_jpeg_jobmgr_thread(void *data)
+{
+  mm_jpeg_q_data_t qdata;
+  int rc = 0;
+  int running = 1;
+  uint32_t num_ongoing_jobs = 0;
+  mm_jpeg_obj *my_obj = (mm_jpeg_obj*)data;
+  mm_jpeg_job_cmd_thread_t *cmd_thread = &my_obj->job_mgr;
+  mm_jpeg_job_q_node_t* node = NULL;
+  prctl(PR_SET_NAME, (unsigned long)"mm_jpeg_thread", 0, 0, 0);
+
+  do {
+    do {
+      rc = cam_sem_wait(&cmd_thread->job_sem);
+      if (rc != 0 && errno != EINVAL) {
+        LOGE("cam_sem_wait error (%s)",
+           strerror(errno));
+        return NULL;
+      }
+    } while (rc != 0);
+
+    /* check ongoing q size */
+    num_ongoing_jobs = mm_jpeg_queue_get_size(&my_obj->ongoing_job_q);
+
+    LOGD("ongoing job  %d %d", num_ongoing_jobs, MM_JPEG_CONCURRENT_SESSIONS_COUNT);
+    if (num_ongoing_jobs >= MM_JPEG_CONCURRENT_SESSIONS_COUNT) {
+      LOGE("ongoing job already reach max %d", num_ongoing_jobs);
+      continue;
+    }
+
+    pthread_mutex_lock(&my_obj->job_lock);
+    /* can go ahead with new work */
+    qdata = mm_jpeg_queue_deq(&cmd_thread->job_queue);
+    node = (mm_jpeg_job_q_node_t*)qdata.p;
+    if (node != NULL) {
+      switch (node->type) {
+      case MM_JPEG_CMD_TYPE_JOB:
+        rc = mm_jpeg_process_encoding_job(my_obj, node);
+        break;
+      case MM_JPEG_CMD_TYPE_DECODE_JOB:
+        rc = mm_jpegdec_process_decoding_job(my_obj, node);
+        break;
+      case MM_JPEG_CMD_TYPE_EXIT:
+      default:
+        /* free node */
+        free(node);
+        /* set running flag to false */
+        running = 0;
+        break;
+      }
+    }
+    pthread_mutex_unlock(&my_obj->job_lock);
+
+  } while (running);
+  return NULL;
+}
+
+/** mm_jpeg_jobmgr_thread_launch:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       launches the job manager thread
+ *
+ **/
+int32_t mm_jpeg_jobmgr_thread_launch(mm_jpeg_obj *my_obj)
+{
+  int32_t rc = 0;
+  mm_jpeg_job_cmd_thread_t *job_mgr = &my_obj->job_mgr;
+
+  cam_sem_init(&job_mgr->job_sem, 0);
+  mm_jpeg_queue_init(&job_mgr->job_queue);
+
+  /* launch the thread */
+  pthread_create(&job_mgr->pid,
+    NULL,
+    mm_jpeg_jobmgr_thread,
+    (void *)my_obj);
+  pthread_setname_np(job_mgr->pid, "CAM_jpeg_jobmgr");
+  return rc;
+}
+
+/** mm_jpeg_jobmgr_thread_release:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Releases the job manager thread
+ *
+ **/
+int32_t mm_jpeg_jobmgr_thread_release(mm_jpeg_obj * my_obj)
+{
+  mm_jpeg_q_data_t qdata;
+  int32_t rc = 0;
+  mm_jpeg_job_cmd_thread_t * cmd_thread = &my_obj->job_mgr;
+  mm_jpeg_job_q_node_t* node =
+    (mm_jpeg_job_q_node_t *)malloc(sizeof(mm_jpeg_job_q_node_t));
+  if (NULL == node) {
+    LOGE("No memory for mm_jpeg_job_q_node_t");
+    return -1;
+  }
+
+  memset(node, 0, sizeof(mm_jpeg_job_q_node_t));
+  node->type = MM_JPEG_CMD_TYPE_EXIT;
+
+  qdata.p = node;
+  mm_jpeg_queue_enq(&cmd_thread->job_queue, qdata);
+  cam_sem_post(&cmd_thread->job_sem);
+
+  /* wait until cmd thread exits */
+  if (pthread_join(cmd_thread->pid, NULL) != 0) {
+    LOGD("pthread dead already");
+  }
+  mm_jpeg_queue_deinit(&cmd_thread->job_queue);
+
+  cam_sem_destroy(&cmd_thread->job_sem);
+  memset(cmd_thread, 0, sizeof(mm_jpeg_job_cmd_thread_t));
+  return rc;
+}
+
+/** mm_jpeg_alloc_workbuffer:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @work_bufs_need: number of work buffers required
+ *    @work_buf_size: size of the work buffer
+ *
+ *  Return:
+ *       greater or equal to 0 for success else failure
+ *
+ *  Description:
+ *       Allocates work buffer
+ *
+ **/
+int32_t mm_jpeg_alloc_workbuffer(mm_jpeg_obj *my_obj,
+  uint32_t work_bufs_need,
+  uint32_t work_buf_size)
+{
+  int32_t rc = 0;
+  uint32_t i;
+  LOGH("work_bufs_need %d work_buf_cnt %d",
+    work_bufs_need, my_obj->work_buf_cnt);
+  for (i = my_obj->work_buf_cnt; i < work_bufs_need; i++) {
+    my_obj->ionBuffer[i].size = CEILING32(work_buf_size);
+    LOGH("Max picture size %d x %d, WorkBufSize = %zu",
+      my_obj->max_pic_w, my_obj->max_pic_h, my_obj->ionBuffer[i].size);
+    my_obj->ionBuffer[i].addr = (uint8_t *)buffer_allocate(&my_obj->ionBuffer[i], 1);
+    if (NULL == my_obj->ionBuffer[i].addr) {
+      LOGE("Ion allocation failed");
+      while (i--) {
+        buffer_deallocate(&my_obj->ionBuffer[i]);
+        my_obj->work_buf_cnt--;
+      }
+      return -1;
+    }
+    my_obj->work_buf_cnt++;
+    rc = i;
+  }
+ LOGH("rc %d ", rc);
+  return rc;
+}
+
+/** mm_jpeg_release_workbuffer:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @work_bufs_need: number of work buffers allocated
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Releases the allocated work buffer
+ *
+ **/
+int32_t mm_jpeg_release_workbuffer(mm_jpeg_obj *my_obj,
+  uint32_t work_bufs_need)
+{
+  int32_t rc = 0;
+  uint32_t i;
+ LOGH("release work_bufs %d ", work_bufs_need);
+  for (i = my_obj->work_buf_cnt; i < work_bufs_need; i++) {
+    buffer_deallocate(&my_obj->ionBuffer[i]);
+  }
+  return rc;
+}
+
+/** mm_jpeg_init:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Initializes the jpeg client
+ *
+ **/
+int32_t mm_jpeg_init(mm_jpeg_obj *my_obj)
+{
+  int32_t rc = 0;
+  uint32_t work_buf_size;
+  unsigned int initial_workbufs_cnt = 1;
+
+  /* init locks */
+  pthread_mutex_init(&my_obj->job_lock, NULL);
+
+  /* init ongoing job queue */
+  rc = mm_jpeg_queue_init(&my_obj->ongoing_job_q);
+  if (0 != rc) {
+    LOGE("Error");
+    pthread_mutex_destroy(&my_obj->job_lock);
+    return -1;
+  }
+
+
+  /* init job semaphore and launch jobmgr thread */
+  LOGD("Launch jobmgr thread rc %d", rc);
+  rc = mm_jpeg_jobmgr_thread_launch(my_obj);
+  if (0 != rc) {
+    LOGE("Error");
+    mm_jpeg_queue_deinit(&my_obj->ongoing_job_q);
+    pthread_mutex_destroy(&my_obj->job_lock);
+    return -1;
+  }
+
+  /* set work buf size from max picture size */
+  if (my_obj->max_pic_w <= 0 || my_obj->max_pic_h <= 0) {
+    LOGE("Width and height are not valid "
+      "dimensions, cannot calc work buf size");
+    mm_jpeg_jobmgr_thread_release(my_obj);
+    mm_jpeg_queue_deinit(&my_obj->ongoing_job_q);
+    pthread_mutex_destroy(&my_obj->job_lock);
+    return -1;
+  }
+
+  /* allocate work buffer if reproc source buffer is not supposed to be used */
+  if (!my_obj->reuse_reproc_buffer) {
+    work_buf_size = CEILING64((uint32_t)my_obj->max_pic_w) *
+     CEILING64((uint32_t)my_obj->max_pic_h) * 3U / 2U;
+    rc = mm_jpeg_alloc_workbuffer(my_obj, initial_workbufs_cnt, work_buf_size);
+    if (rc == -1) {
+      LOGE("Work buffer allocation failure");
+      return rc;
+    }
+  }
+
+  /* load OMX */
+  if (OMX_ErrorNone != OMX_Init()) {
+    /* roll back in error case */
+    LOGE("OMX_Init failed (%d)", rc);
+    if (!my_obj->reuse_reproc_buffer) {
+      mm_jpeg_release_workbuffer(my_obj, initial_workbufs_cnt);
+    }
+    mm_jpeg_jobmgr_thread_release(my_obj);
+    mm_jpeg_queue_deinit(&my_obj->ongoing_job_q);
+    pthread_mutex_destroy(&my_obj->job_lock);
+  }
+
+#ifdef LOAD_ADSP_RPC_LIB
+  my_obj->adsprpc_lib_handle = dlopen("libadsprpc.so", RTLD_NOW);
+  if (NULL == my_obj->adsprpc_lib_handle) {
+    LOGE("Cannot load the library");
+    /* not returning error here bcoz even if this loading fails
+        we can go ahead with SW JPEG enc */
+  }
+#endif
+
+  // create dummy OMX handle to avoid dlopen latency
+  OMX_GetHandle(&my_obj->dummy_handle, mm_jpeg_get_comp_name(), NULL, NULL);
+
+  return rc;
+}
+
+/** mm_jpeg_deinit:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Deinits the jpeg client
+ *
+ **/
+int32_t mm_jpeg_deinit(mm_jpeg_obj *my_obj)
+{
+  int32_t rc = 0;
+  uint32_t i = 0;
+
+  /* release jobmgr thread */
+  rc = mm_jpeg_jobmgr_thread_release(my_obj);
+  if (0 != rc) {
+    LOGE("Error");
+  }
+
+  if (my_obj->dummy_handle) {
+    OMX_FreeHandle(my_obj->dummy_handle);
+  }
+
+  /* unload OMX engine */
+  OMX_Deinit();
+
+  /* deinit ongoing job and cb queue */
+  rc = mm_jpeg_queue_deinit(&my_obj->ongoing_job_q);
+  if (0 != rc) {
+    LOGE("Error");
+  }
+
+  for (i = 0; i < my_obj->work_buf_cnt; i++) {
+    /*Release the ION buffer*/
+    rc = buffer_deallocate(&my_obj->ionBuffer[i]);
+    if (0 != rc) {
+      LOGE("Error releasing ION buffer");
+    }
+  }
+  my_obj->work_buf_cnt = 0;
+  my_obj->jpeg_metadata = NULL;
+
+  /* destroy locks */
+  pthread_mutex_destroy(&my_obj->job_lock);
+
+  return rc;
+}
+
+/** mm_jpeg_new_client:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Create new jpeg client
+ *
+ **/
+uint32_t mm_jpeg_new_client(mm_jpeg_obj *my_obj)
+{
+  uint32_t client_hdl = 0;
+  uint8_t idx;
+  int i = 0;
+
+  if (my_obj->num_clients >= MAX_JPEG_CLIENT_NUM) {
+    LOGE("num of clients reached limit");
+    return client_hdl;
+  }
+
+  for (idx = 0; idx < MAX_JPEG_CLIENT_NUM; idx++) {
+    if (0 == my_obj->clnt_mgr[idx].is_used) {
+      break;
+    }
+  }
+
+  if (idx < MAX_JPEG_CLIENT_NUM) {
+    /* client session avail */
+    /* generate client handler by index */
+    client_hdl = mm_jpeg_util_generate_handler(idx);
+
+    /* update client session */
+    my_obj->clnt_mgr[idx].is_used = 1;
+    my_obj->clnt_mgr[idx].client_handle = client_hdl;
+
+    pthread_mutex_init(&my_obj->clnt_mgr[idx].lock, NULL);
+    for (i = 0; i < MM_JPEG_MAX_SESSION; i++) {
+      memset(&my_obj->clnt_mgr[idx].session[i], 0x0, sizeof(mm_jpeg_job_session_t));
+    }
+
+    /* increse client count */
+    my_obj->num_clients++;
+  }
+
+  return client_hdl;
+}
+
+#ifdef LIB2D_ROTATION_ENABLE
+/**
+ * Function: mm_jpeg_lib2d_rotation_cb
+ *
+ * Description: Callback that is called on completion of requested job.
+ *
+ * Input parameters:
+ *   userdata - App userdata
+ *   jobid - job id that is finished execution
+ *
+ * Return values:
+ *   MM_LIB2D_SUCCESS
+ *   MM_LIB2D_ERR_GENERAL
+ *
+ * Notes: none
+ **/
+lib2d_error mm_jpeg_lib2d_rotation_cb(void *userdata, int jobid)
+{
+  LOGE("_GM_ Received CB from lib2d\n");
+  return MM_LIB2D_SUCCESS;
+}
+
+/**
+ * Function: mm_jpeg_lib2d_rotation
+ *
+ * Description: lib2d rotation function.
+ *
+ * Input parameters:
+ *   p_session - pointer to session
+ *   p_node - pointer to job queue node
+ *   p_job - pointer to job
+ *   p_job_id - pointer to job id
+ *
+ * Return values:
+ *   0 - success
+ *   -1 - failure
+ *
+ * Notes: none
+ **/
+int32_t mm_jpeg_lib2d_rotation(mm_jpeg_job_session_t *p_session,
+  mm_jpeg_job_q_node_t* p_node, mm_jpeg_job_t *p_job, uint32_t *p_job_id)
+{
+  void *lib2d_handle = NULL;
+  lib2d_error lib2d_err = MM_LIB2D_SUCCESS;
+  mm_lib2d_buffer src_buffer;
+  mm_lib2d_buffer dst_buffer;
+  mm_jpeg_buf_t *p_src_main_buf = p_session->params.src_main_buf;
+  mm_jpeg_buf_t *p_src_rot_main_buf = p_session->src_rot_main_buf;
+  mm_jpeg_encode_job_t *p_jobparams  = &p_job->encode_job;
+  mm_jpeg_encode_job_t *p_jobparams_node = &p_node->enc_info.encode_job;
+  cam_format_t format;
+  int32_t scanline = 0;
+
+  memset(&src_buffer, 0x0, sizeof(mm_lib2d_buffer));
+  memset(&dst_buffer, 0x0, sizeof(mm_lib2d_buffer));
+
+  switch (p_session->params.rotation) {
+  case 0:
+    break;
+  case 90:
+    p_jobparams_node->main_dim.src_dim.width =
+      p_jobparams->main_dim.src_dim.height;
+    p_jobparams_node->main_dim.src_dim.height =
+      p_jobparams->main_dim.src_dim.width;
+
+    p_jobparams_node->main_dim.dst_dim.width =
+      p_jobparams->main_dim.dst_dim.height;
+    p_jobparams_node->main_dim.dst_dim.height =
+      p_jobparams->main_dim.dst_dim.width;
+
+    p_jobparams_node->main_dim.crop.width =
+      p_jobparams->main_dim.crop.height;
+    p_jobparams_node->main_dim.crop.height =
+      p_jobparams->main_dim.crop.width;
+    p_jobparams_node->main_dim.crop.left =
+      p_jobparams->main_dim.src_dim.height -
+      (p_jobparams->main_dim.crop.top +
+      p_jobparams->main_dim.crop.height);
+    p_jobparams_node->main_dim.crop.top =
+      p_jobparams->main_dim.crop.left;
+    break;
+  case 180:
+    p_jobparams_node->main_dim.crop.left =
+      p_jobparams->main_dim.src_dim.width -
+      (p_jobparams->main_dim.crop.left +
+      p_jobparams->main_dim.crop.width);
+    p_jobparams_node->main_dim.crop.top =
+      p_jobparams->main_dim.src_dim.height -
+      (p_jobparams->main_dim.crop.top +
+      p_jobparams->main_dim.crop.height);
+    break;
+  case 270:
+    p_jobparams_node->main_dim.src_dim.width =
+      p_jobparams->main_dim.src_dim.height;
+    p_jobparams_node->main_dim.src_dim.height =
+      p_jobparams->main_dim.src_dim.width;
+
+    p_jobparams_node->main_dim.dst_dim.width =
+      p_jobparams->main_dim.dst_dim.height;
+    p_jobparams_node->main_dim.dst_dim.height =
+      p_jobparams->main_dim.dst_dim.width;
+
+    p_jobparams_node->main_dim.crop.width =
+      p_jobparams->main_dim.crop.height;
+    p_jobparams_node->main_dim.crop.height =
+      p_jobparams->main_dim.crop.width;
+    p_jobparams_node->main_dim.crop.left =
+      p_jobparams->main_dim.crop.top;
+    p_jobparams_node->main_dim.crop.top =
+      p_jobparams->main_dim.src_dim.width -
+      (p_jobparams->main_dim.crop.left +
+      p_jobparams->main_dim.crop.width);
+    break;
+  }
+
+  format = mm_jpeg_get_imgfmt_from_colorfmt(p_session->params.color_format);
+  lib2d_err = mm_lib2d_init(MM_LIB2D_SYNC_MODE, format,
+    format, &lib2d_handle);
+  if (lib2d_err != MM_LIB2D_SUCCESS) {
+    LOGE("lib2d init for rotation failed\n");
+    return -1;
+  }
+
+  src_buffer.buffer_type = MM_LIB2D_BUFFER_TYPE_YUV;
+  src_buffer.yuv_buffer.fd =
+    p_src_main_buf[p_jobparams->src_index].fd;
+  src_buffer.yuv_buffer.format = format;
+  src_buffer.yuv_buffer.width = p_jobparams->main_dim.src_dim.width;
+  src_buffer.yuv_buffer.height = p_jobparams->main_dim.src_dim.height;
+  src_buffer.yuv_buffer.plane0 =
+    p_src_main_buf[p_jobparams->src_index].buf_vaddr;
+  src_buffer.yuv_buffer.stride0 =
+    p_src_main_buf[p_jobparams->src_index].offset.mp[0].stride;
+  scanline = p_src_main_buf[p_jobparams->src_index].offset.mp[0].scanline;
+  src_buffer.yuv_buffer.plane1 =
+    (uint8_t*)src_buffer.yuv_buffer.plane0 +
+    (src_buffer.yuv_buffer.stride0 * scanline);
+  src_buffer.yuv_buffer.stride1 = src_buffer.yuv_buffer.stride0;
+
+  LOGD(" lib2d SRC wxh = %dx%d , stxsl = %dx%d\n",
+    src_buffer.yuv_buffer.width, src_buffer.yuv_buffer.height,
+    src_buffer.yuv_buffer.stride0, scanline);
+
+  dst_buffer.buffer_type = MM_LIB2D_BUFFER_TYPE_YUV;
+  dst_buffer.yuv_buffer.fd =
+    p_src_rot_main_buf[p_jobparams->src_index].fd;
+  dst_buffer.yuv_buffer.format = format;
+  dst_buffer.yuv_buffer.width = p_jobparams_node->main_dim.src_dim.width;
+  dst_buffer.yuv_buffer.height = p_jobparams_node->main_dim.src_dim.height;
+  dst_buffer.yuv_buffer.plane0 =
+    p_src_rot_main_buf[p_jobparams->src_index].buf_vaddr;
+
+  if ((p_session->params.rotation == 90) ||
+    (p_session->params.rotation == 270)) {
+    dst_buffer.yuv_buffer.stride0 =
+      p_src_main_buf[p_jobparams->src_index].offset.mp[0].scanline;
+    scanline = p_src_main_buf[p_jobparams->src_index].offset.mp[0].stride;
+  } else {
+    dst_buffer.yuv_buffer.stride0 =
+      p_src_main_buf[p_jobparams->src_index].offset.mp[0].stride;
+    scanline = p_src_main_buf[p_jobparams->src_index].offset.mp[0].scanline;
+  }
+
+  dst_buffer.yuv_buffer.plane1 =
+    (uint8_t*) dst_buffer.yuv_buffer.plane0 +
+    (dst_buffer.yuv_buffer.stride0 * scanline);
+  dst_buffer.yuv_buffer.stride1 = dst_buffer.yuv_buffer.stride0;
+
+  LOGD(" lib2d DEST wxh = %dx%d , stxsl = %dx%d\n",
+    dst_buffer.yuv_buffer.width, dst_buffer.yuv_buffer.height,
+    dst_buffer.yuv_buffer.stride0, scanline);
+
+  LOGD(" lib2d rotation = %d\n", p_session->params.rotation);
+
+  lib2d_err = mm_lib2d_start_job(lib2d_handle, &src_buffer, &dst_buffer,
+    *p_job_id, NULL, mm_jpeg_lib2d_rotation_cb, p_session->params.rotation);
+  if (lib2d_err != MM_LIB2D_SUCCESS) {
+    LOGE("Error in mm_lib2d_start_job \n");
+    return -1;
+  }
+
+  buffer_clean(&p_session->src_rot_ion_buffer[p_jobparams->src_index]);
+
+  return 0;
+}
+#endif
+
+/** mm_jpeg_start_job:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_hdl: client handle
+ *    @job: pointer to encode job
+ *    @jobId: job id
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Start the encoding job
+ *
+ **/
+int32_t mm_jpeg_start_job(mm_jpeg_obj *my_obj,
+  mm_jpeg_job_t *job,
+  uint32_t *job_id)
+{
+  mm_jpeg_q_data_t qdata;
+  int32_t rc = -1;
+  uint8_t session_idx = 0;
+  uint8_t client_idx = 0;
+  mm_jpeg_job_q_node_t* node = NULL;
+  mm_jpeg_job_session_t *p_session = NULL;
+  mm_jpeg_encode_job_t *p_jobparams  = NULL;
+  uint32_t work_bufs_need;
+  uint32_t work_buf_size;
+
+  *job_id = 0;
+
+  if (!job) {
+    LOGE("invalid job !!!");
+    return rc;
+  }
+  p_jobparams = &job->encode_job;
+
+  /* check if valid session */
+  session_idx = GET_SESSION_IDX(p_jobparams->session_id);
+  client_idx = GET_CLIENT_IDX(p_jobparams->session_id);
+  LOGD("session_idx %d client idx %d",
+    session_idx, client_idx);
+
+  if ((session_idx >= MM_JPEG_MAX_SESSION) ||
+    (client_idx >= MAX_JPEG_CLIENT_NUM)) {
+    LOGE("invalid session id %x",
+      job->encode_job.session_id);
+    return rc;
+  }
+
+  p_session = &my_obj->clnt_mgr[client_idx].session[session_idx];
+
+  if (my_obj->reuse_reproc_buffer) {
+    p_session->work_buffer.addr           = p_jobparams->work_buf.buf_vaddr;
+    p_session->work_buffer.size           = p_jobparams->work_buf.buf_size;
+    p_session->work_buffer.ion_info_fd.fd = p_jobparams->work_buf.fd;
+    p_session->work_buffer.p_pmem_fd      = p_jobparams->work_buf.fd;
+
+    work_bufs_need = my_obj->num_sessions + 1;
+    if (work_bufs_need > MM_JPEG_CONCURRENT_SESSIONS_COUNT) {
+      work_bufs_need = MM_JPEG_CONCURRENT_SESSIONS_COUNT;
+    }
+
+    if (p_session->work_buffer.addr) {
+      work_bufs_need--;
+      LOGD("HAL passed the work buffer of size = %d; don't alloc internally",
+          p_session->work_buffer.size);
+    } else {
+      p_session->work_buffer = my_obj->ionBuffer[0];
+    }
+
+    LOGD(">>>> Work bufs need %d, %d",
+      work_bufs_need, my_obj->work_buf_cnt);
+    if (work_bufs_need) {
+      work_buf_size = CEILING64(my_obj->max_pic_w) *
+        CEILING64(my_obj->max_pic_h) * 3 / 2;
+      rc = mm_jpeg_alloc_workbuffer(my_obj, work_bufs_need, work_buf_size);
+      if (rc == -1) {
+        LOGE("Work buffer allocation failure");
+        return rc;
+      } else {
+        p_session->work_buffer = my_obj->ionBuffer[rc];
+      }
+    }
+  }
+
+  if (OMX_FALSE == p_session->active) {
+    LOGE("session not active %x",
+      job->encode_job.session_id);
+    return rc;
+  }
+
+  if ((p_jobparams->src_index >= (int32_t)p_session->params.num_src_bufs) ||
+    (p_jobparams->dst_index >= (int32_t)p_session->params.num_dst_bufs)) {
+    LOGE("invalid buffer indices");
+    return rc;
+  }
+
+  /* enqueue new job into todo job queue */
+  node = (mm_jpeg_job_q_node_t *)malloc(sizeof(mm_jpeg_job_q_node_t));
+  if (NULL == node) {
+    LOGE("No memory for mm_jpeg_job_q_node_t");
+    return -1;
+  }
+
+  KPI_ATRACE_INT("Camera:JPEG",
+      (int32_t)((uint32_t)session_idx<<16 | ++p_session->job_index));
+
+  *job_id = job->encode_job.session_id |
+    (((uint32_t)p_session->job_hist++ % JOB_HIST_MAX) << 16);
+
+  memset(node, 0, sizeof(mm_jpeg_job_q_node_t));
+  node->enc_info.encode_job = job->encode_job;
+
+#ifdef LIB2D_ROTATION_ENABLE
+  if (p_session->lib2d_rotation_flag) {
+    rc = mm_jpeg_lib2d_rotation(p_session, node, job, job_id);
+    if (rc < 0) {
+      LOGE("Lib2d rotation failed");
+      return rc;
+    }
+  }
+#endif
+
+  if (p_session->thumb_from_main) {
+    node->enc_info.encode_job.thumb_dim.src_dim =
+      node->enc_info.encode_job.main_dim.src_dim;
+    node->enc_info.encode_job.thumb_dim.crop =
+      node->enc_info.encode_job.main_dim.crop;
+    if (p_session->lib2d_rotation_flag) {
+      if ((p_session->params.rotation == 90) ||
+        (p_session->params.rotation == 270)) {
+        node->enc_info.encode_job.thumb_dim.dst_dim.width =
+          job->encode_job.thumb_dim.dst_dim.height;
+        node->enc_info.encode_job.thumb_dim.dst_dim.height =
+          job->encode_job.thumb_dim.dst_dim.width;
+      }
+    }
+  }
+  node->enc_info.job_id = *job_id;
+  node->enc_info.client_handle = p_session->client_hdl;
+  node->type = MM_JPEG_CMD_TYPE_JOB;
+
+  qdata.p = node;
+  rc = mm_jpeg_queue_enq(&my_obj->job_mgr.job_queue, qdata);
+  if (0 == rc) {
+      cam_sem_post(&my_obj->job_mgr.job_sem);
+  }
+
+  LOGH("session_idx %u client_idx %u job_id %d X",
+    session_idx, client_idx, *job_id);
+
+  return rc;
+}
+
+
+
+/** mm_jpeg_abort_job:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_hdl: client handle
+ *    @jobId: job id
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Abort the encoding session
+ *
+ **/
+int32_t mm_jpeg_abort_job(mm_jpeg_obj *my_obj,
+  uint32_t jobId)
+{
+  int32_t rc = -1;
+  mm_jpeg_job_q_node_t *node = NULL;
+  mm_jpeg_job_session_t *p_session = NULL;
+
+  pthread_mutex_lock(&my_obj->job_lock);
+
+  /* abort job if in todo queue */
+  node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->job_mgr.job_queue, jobId);
+  if (NULL != node) {
+    free(node);
+    goto abort_done;
+  }
+
+  /* abort job if in ongoing queue */
+  node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->ongoing_job_q, jobId);
+  if (NULL != node) {
+    /* find job that is OMX ongoing, ask OMX to abort the job */
+    p_session = mm_jpeg_get_session(my_obj, node->enc_info.job_id);
+    if (p_session) {
+      mm_jpeg_session_abort(p_session);
+    } else {
+      LOGE("Invalid job id 0x%x",
+        node->enc_info.job_id);
+    }
+    free(node);
+    goto abort_done;
+  }
+
+abort_done:
+  pthread_mutex_unlock(&my_obj->job_lock);
+
+  return rc;
+}
+
+
+#ifdef MM_JPEG_READ_META_KEYFILE
+static int32_t mm_jpeg_read_meta_keyfile(mm_jpeg_job_session_t *p_session,
+    const char *filename)
+{
+  int rc = 0;
+  FILE *fp = NULL;
+  size_t file_size = 0;
+  fp = fopen(filename, "r");
+  if (!fp) {
+    LOGE("Key not present");
+    return -1;
+  }
+  fseek(fp, 0, SEEK_END);
+  file_size = (size_t)ftell(fp);
+  fseek(fp, 0, SEEK_SET);
+
+  p_session->meta_enc_key = (uint8_t *) malloc((file_size + 1) * sizeof(uint8_t));
+
+  if (!p_session->meta_enc_key) {
+    LOGE("error");
+    return -1;
+  }
+
+  fread(p_session->meta_enc_key, 1, file_size, fp);
+  fclose(fp);
+
+  p_session->meta_enc_keylen = file_size;
+
+  return rc;
+}
+#endif // MM_JPEG_READ_META_KEYFILE
+
+/** mm_jpeg_create_session:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_hdl: client handle
+ *    @p_params: pointer to encode params
+ *    @p_session_id: session id
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Start the encoding session
+ *
+ **/
+int32_t mm_jpeg_create_session(mm_jpeg_obj *my_obj,
+  uint32_t client_hdl,
+  mm_jpeg_encode_params_t *p_params,
+  uint32_t* p_session_id)
+{
+  mm_jpeg_q_data_t qdata;
+  int32_t rc = 0;
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  uint8_t clnt_idx = 0;
+  int session_idx = -1;
+  mm_jpeg_job_session_t *p_session = NULL;
+  mm_jpeg_job_session_t * p_prev_session = NULL;
+  *p_session_id = 0;
+  uint32_t i = 0;
+  uint32_t j = 0;
+  uint32_t num_omx_sessions = 1;
+  uint32_t work_buf_size;
+  mm_jpeg_queue_t *p_session_handle_q, *p_out_buf_q;
+  uint32_t work_bufs_need;
+  char trace_tag[32];
+
+  /* validate the parameters */
+  if ((p_params->num_src_bufs > MM_JPEG_MAX_BUF)
+    || (p_params->num_dst_bufs > MM_JPEG_MAX_BUF)) {
+    LOGE("invalid num buffers");
+    return -1;
+  }
+
+  /* check if valid client */
+  clnt_idx = mm_jpeg_util_get_index_by_handler(client_hdl);
+  if (clnt_idx >= MAX_JPEG_CLIENT_NUM) {
+    LOGE("invalid client with handler (%d)", client_hdl);
+    return -1;
+  }
+
+  if (p_params->burst_mode) {
+    num_omx_sessions = MM_JPEG_CONCURRENT_SESSIONS_COUNT;
+  }
+
+  if (!my_obj->reuse_reproc_buffer) {
+    work_bufs_need = num_omx_sessions;
+    if (work_bufs_need > MM_JPEG_CONCURRENT_SESSIONS_COUNT) {
+      work_bufs_need = MM_JPEG_CONCURRENT_SESSIONS_COUNT;
+    }
+    LOGD(">>>> Work bufs need %d", work_bufs_need);
+    work_buf_size = CEILING64(my_obj->max_pic_w) *
+      CEILING64(my_obj->max_pic_h) * 3 / 2;
+    rc = mm_jpeg_alloc_workbuffer(my_obj, work_bufs_need, work_buf_size);
+    if (rc == -1) {
+      LOGE("Work buffer allocation failure");
+      return rc;
+    }
+  }
+
+
+  /* init omx handle queue */
+  p_session_handle_q = (mm_jpeg_queue_t *) malloc(sizeof(*p_session_handle_q));
+  if (NULL == p_session_handle_q) {
+    LOGE("Error");
+    goto error1;
+  }
+  rc = mm_jpeg_queue_init(p_session_handle_q);
+  if (0 != rc) {
+    LOGE("Error");
+    free(p_session_handle_q);
+    goto error1;
+  }
+
+  /* init output buf queue */
+  p_out_buf_q = (mm_jpeg_queue_t *) malloc(sizeof(*p_out_buf_q));
+  if (NULL == p_out_buf_q) {
+    LOGE("Error: Cannot allocate memory\n");
+    return -1;
+  }
+
+  /* init omx handle queue */
+  rc = mm_jpeg_queue_init(p_out_buf_q);
+  if (0 != rc) {
+    LOGE("Error");
+    free(p_out_buf_q);
+    goto error1;
+  }
+
+  for (i = 0; i < num_omx_sessions; i++) {
+    uint32_t buf_idx = 0U;
+    session_idx = mm_jpeg_get_new_session_idx(my_obj, clnt_idx, &p_session);
+    if (session_idx < 0 || NULL == p_session) {
+      LOGE("invalid session id (%d)", session_idx);
+      goto error2;
+    }
+
+    snprintf(trace_tag, sizeof(trace_tag), "Camera:JPEGsession%d", session_idx);
+    ATRACE_INT(trace_tag, 1);
+
+    p_session->job_index = 0;
+
+    p_session->next_session = NULL;
+
+    if (p_prev_session) {
+      p_prev_session->next_session = p_session;
+    }
+    p_prev_session = p_session;
+
+    buf_idx = i;
+    if (buf_idx < MM_JPEG_CONCURRENT_SESSIONS_COUNT) {
+      p_session->work_buffer = my_obj->ionBuffer[buf_idx];
+    } else {
+      LOGE("Invalid Index, Setting buffer add to null");
+      p_session->work_buffer.addr = NULL;
+      p_session->work_buffer.ion_fd = -1;
+      p_session->work_buffer.p_pmem_fd = -1;
+    }
+
+    p_session->jpeg_obj = (void*)my_obj; /* save a ptr to jpeg_obj */
+
+    /*copy the params*/
+    p_session->params = *p_params;
+    ret = mm_jpeg_session_create(p_session);
+    if (OMX_ErrorNone != ret) {
+      p_session->active = OMX_FALSE;
+      LOGE("jpeg session create failed");
+      goto error2;
+    }
+
+    uint32_t session_id = (JOB_ID_MAGICVAL << 24) |
+        ((uint32_t)session_idx << 8) | clnt_idx;
+
+    if (!*p_session_id) {
+      *p_session_id = session_id;
+    }
+
+    if (p_session->thumb_from_main) {
+      memcpy(p_session->params.src_thumb_buf, p_session->params.src_main_buf,
+        sizeof(p_session->params.src_thumb_buf));
+      p_session->params.num_tmb_bufs =  p_session->params.num_src_bufs;
+      if (!p_session->params.encode_thumbnail) {
+         p_session->params.num_tmb_bufs = 0;
+      }
+      p_session->params.thumb_dim.src_dim = p_session->params.main_dim.src_dim;
+      p_session->params.thumb_dim.crop = p_session->params.main_dim.crop;
+    }
+#ifdef LIB2D_ROTATION_ENABLE
+    if (p_session->params.rotation) {
+      LOGD("Enable lib2d rotation");
+      p_session->lib2d_rotation_flag = 1;
+    } else {
+      LOGD("Disable lib2d rotation");
+      p_session->lib2d_rotation_flag = 0;
+    }
+#else
+    p_session->lib2d_rotation_flag = 0;
+#endif
+
+    if (p_session->lib2d_rotation_flag) {
+      p_session->num_src_rot_bufs = p_session->params.num_src_bufs;
+      memset(p_session->src_rot_main_buf, 0,
+        sizeof(p_session->src_rot_main_buf));
+
+      for (j = 0; j < p_session->num_src_rot_bufs; j++) {
+        p_session->src_rot_main_buf[j].buf_size =
+          p_session->params.src_main_buf[j].buf_size;
+        p_session->src_rot_main_buf[j].format =
+          p_session->params.src_main_buf[j].format;
+        p_session->src_rot_main_buf[j].index = j;
+
+        memset(&p_session->src_rot_ion_buffer[j], 0, sizeof(buffer_t));
+        p_session->src_rot_ion_buffer[j].size =
+          p_session->src_rot_main_buf[j].buf_size;
+        p_session->src_rot_ion_buffer[j].addr =
+          (uint8_t *)buffer_allocate(&p_session->src_rot_ion_buffer[j], 1);
+
+        if (NULL == p_session->src_rot_ion_buffer[j].addr) {
+          LOGE("Ion buff alloc for rotation failed");
+          // deallocate all previously allocated rotation ion buffs
+          for (j = 0; j < p_session->num_src_rot_bufs; j++) {
+            if (p_session->src_rot_ion_buffer[j].addr) {
+              buffer_deallocate(&p_session->src_rot_ion_buffer[j]);
+            }
+          }
+          //fall back to SW encoding for rotation
+          p_session->lib2d_rotation_flag = 0;
+        } else {
+          p_session->src_rot_main_buf[j].buf_vaddr =
+            p_session->src_rot_ion_buffer[j].addr;
+          p_session->src_rot_main_buf[j].fd =
+            p_session->src_rot_ion_buffer[j].p_pmem_fd;
+        }
+      }
+    }
+
+    p_session->client_hdl = client_hdl;
+    p_session->sessionId = session_id;
+    p_session->session_handle_q = p_session_handle_q;
+    p_session->out_buf_q = p_out_buf_q;
+
+    qdata.p = p_session;
+    mm_jpeg_queue_enq(p_session_handle_q, qdata);
+
+    p_session->meta_enc_key = NULL;
+    p_session->meta_enc_keylen = 0;
+
+#ifdef MM_JPEG_READ_META_KEYFILE
+    mm_jpeg_read_meta_keyfile(p_session, META_KEYFILE);
+#endif
+
+    pthread_mutex_lock(&my_obj->job_lock);
+    /* Configure session if not already configured and if
+       no other session configured*/
+    if ((OMX_FALSE == p_session->config) &&
+      (my_obj->p_session_inprogress == NULL)) {
+      rc = mm_jpeg_session_configure(p_session);
+      if (rc) {
+        LOGE("Error");
+        pthread_mutex_unlock(&my_obj->job_lock);
+        goto error2;
+      }
+      p_session->config = OMX_TRUE;
+      my_obj->p_session_inprogress = p_session;
+    }
+    pthread_mutex_unlock(&my_obj->job_lock);
+    p_session->num_omx_sessions = num_omx_sessions;
+
+    LOGH("session id %x thumb_from_main %d",
+      session_id, p_session->thumb_from_main);
+  }
+
+  // Queue the output buf indexes
+  for (i = 0; i < p_params->num_dst_bufs; i++) {
+    qdata.u32 = i + 1;
+    mm_jpeg_queue_enq(p_out_buf_q, qdata);
+  }
+
+  return rc;
+
+error1:
+  rc = -1;
+error2:
+  if (NULL != p_session) {
+    ATRACE_INT(trace_tag, 0);
+  }
+  return rc;
+}
+
+/** mm_jpegenc_destroy_job
+ *
+ *  Arguments:
+ *    @p_session: Session obj
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Destroy the job based paramenters
+ *
+ **/
+static int32_t mm_jpegenc_destroy_job(mm_jpeg_job_session_t *p_session)
+{
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+  int i = 0, rc = 0;
+
+  LOGD("Exif entry count %d %d",
+    (int)p_jobparams->exif_info.numOfEntries,
+    (int)p_session->exif_count_local);
+  for (i = 0; i < p_session->exif_count_local; i++) {
+    rc = releaseExifEntry(&p_session->exif_info_local[i]);
+    if (rc) {
+      LOGE("Exif release failed (%d)", rc);
+    }
+  }
+  p_session->exif_count_local = 0;
+
+  return rc;
+}
+
+/** mm_jpeg_session_encode:
+ *
+ *  Arguments:
+ *    @p_session: encode session
+ *
+ *  Return:
+ *       OMX_ERRORTYPE
+ *
+ *  Description:
+ *       Start the encoding
+ *
+ **/
+static void mm_jpegenc_job_done(mm_jpeg_job_session_t *p_session)
+{
+  mm_jpeg_q_data_t qdata;
+  mm_jpeg_obj *my_obj = (mm_jpeg_obj *)p_session->jpeg_obj;
+  mm_jpeg_job_q_node_t *node = NULL;
+
+  /*Destroy job related params*/
+  mm_jpegenc_destroy_job(p_session);
+
+  /*remove the job*/
+  node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->ongoing_job_q,
+    p_session->jobId);
+  if (node) {
+    free(node);
+  }
+  p_session->encoding = OMX_FALSE;
+
+  // Queue to available sessions
+  qdata.p = p_session;
+  mm_jpeg_queue_enq(p_session->session_handle_q, qdata);
+
+  if (p_session->auto_out_buf) {
+    //Queue out buf index
+    qdata.u32 = (uint32_t)(p_session->encode_job.dst_index + 1);
+    mm_jpeg_queue_enq(p_session->out_buf_q, qdata);
+  }
+
+  /* wake up jobMgr thread to work on new job if there is any */
+  cam_sem_post(&my_obj->job_mgr.job_sem);
+}
+
+/** mm_jpeg_destroy_session:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @session_id: session index
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Destroy the encoding session
+ *
+ **/
+int32_t mm_jpeg_destroy_session(mm_jpeg_obj *my_obj,
+  mm_jpeg_job_session_t *p_session)
+{
+  mm_jpeg_q_data_t qdata;
+  int32_t rc = 0;
+  mm_jpeg_job_q_node_t *node = NULL;
+  uint32_t session_id = 0;
+  mm_jpeg_job_session_t *p_cur_sess;
+  char trace_tag[32];
+
+  if (NULL == p_session) {
+    LOGE("invalid session");
+    return rc;
+  }
+
+  session_id = p_session->sessionId;
+
+  pthread_mutex_lock(&my_obj->job_lock);
+
+  /* abort job if in todo queue */
+  LOGD("abort todo jobs");
+  node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->job_mgr.job_queue, session_id);
+  while (NULL != node) {
+    free(node);
+    node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->job_mgr.job_queue, session_id);
+  }
+
+  /* abort job if in ongoing queue */
+  LOGD("abort ongoing jobs");
+  node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->ongoing_job_q, session_id);
+  while (NULL != node) {
+    free(node);
+    node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->ongoing_job_q, session_id);
+  }
+
+  /* abort the current session */
+  mm_jpeg_session_abort(p_session);
+  mm_jpeg_session_destroy(p_session);
+
+  p_cur_sess = p_session;
+
+  do {
+    mm_jpeg_remove_session_idx(my_obj, p_cur_sess->sessionId);
+  } while (NULL != (p_cur_sess = p_cur_sess->next_session));
+
+
+  pthread_mutex_unlock(&my_obj->job_lock);
+
+  while (1) {
+    qdata = mm_jpeg_queue_deq(p_session->session_handle_q);
+    if (NULL == qdata.p)
+      break;
+  }
+  mm_jpeg_queue_deinit(p_session->session_handle_q);
+  free(p_session->session_handle_q);
+  p_session->session_handle_q = NULL;
+
+  while (1) {
+    qdata = mm_jpeg_queue_deq(p_session->out_buf_q);
+    if (0U == qdata.u32)
+      break;
+  }
+  mm_jpeg_queue_deinit(p_session->out_buf_q);
+  free(p_session->out_buf_q);
+  p_session->out_buf_q = NULL;
+
+
+  /* wake up jobMgr thread to work on new job if there is any */
+  cam_sem_post(&my_obj->job_mgr.job_sem);
+
+  snprintf(trace_tag, sizeof(trace_tag), "Camera:JPEGsession%d", GET_SESSION_IDX(session_id));
+  ATRACE_INT(trace_tag, 0);
+
+  LOGH("destroy session successful. X");
+
+  return rc;
+}
+
+
+
+
+/** mm_jpeg_destroy_session:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @session_id: session index
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Destroy the encoding session
+ *
+ **/
+int32_t mm_jpeg_destroy_session_unlocked(mm_jpeg_obj *my_obj,
+  mm_jpeg_job_session_t *p_session)
+{
+  int32_t rc = -1;
+  mm_jpeg_job_q_node_t *node = NULL;
+  uint32_t session_id = 0;
+  if (NULL == p_session) {
+    LOGE("invalid session");
+    return rc;
+  }
+
+  session_id = p_session->sessionId;
+
+  /* abort job if in todo queue */
+  LOGD("abort todo jobs");
+  node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->job_mgr.job_queue, session_id);
+  while (NULL != node) {
+    free(node);
+    node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->job_mgr.job_queue, session_id);
+  }
+
+  /* abort job if in ongoing queue */
+  LOGD("abort ongoing jobs");
+  node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->ongoing_job_q, session_id);
+  while (NULL != node) {
+    free(node);
+    node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->ongoing_job_q, session_id);
+  }
+
+  /* abort the current session */
+  mm_jpeg_session_abort(p_session);
+  //mm_jpeg_remove_session_idx(my_obj, session_id);
+
+  return rc;
+}
+
+/** mm_jpeg_destroy_session:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @session_id: session index
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Destroy the encoding session
+ *
+ **/
+int32_t mm_jpeg_destroy_session_by_id(mm_jpeg_obj *my_obj, uint32_t session_id)
+{
+  mm_jpeg_job_session_t *p_session = mm_jpeg_get_session(my_obj, session_id);
+
+  return mm_jpeg_destroy_session(my_obj, p_session);
+}
+
+
+
+/** mm_jpeg_close:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_hdl: client handle
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Close the jpeg client
+ *
+ **/
+int32_t mm_jpeg_close(mm_jpeg_obj *my_obj, uint32_t client_hdl)
+{
+  int32_t rc = -1;
+  uint8_t clnt_idx = 0;
+  int i = 0;
+
+  /* check if valid client */
+  clnt_idx = mm_jpeg_util_get_index_by_handler(client_hdl);
+  if (clnt_idx >= MAX_JPEG_CLIENT_NUM) {
+    LOGE("invalid client with handler (%d)", client_hdl);
+    return rc;
+  }
+
+  LOGD("E");
+
+  /* abort all jobs from the client */
+  pthread_mutex_lock(&my_obj->job_lock);
+
+  for (i = 0; i < MM_JPEG_MAX_SESSION; i++) {
+    if (OMX_TRUE == my_obj->clnt_mgr[clnt_idx].session[i].active)
+      mm_jpeg_destroy_session_unlocked(my_obj,
+        &my_obj->clnt_mgr[clnt_idx].session[i]);
+  }
+
+#ifdef LOAD_ADSP_RPC_LIB
+  if (NULL != my_obj->adsprpc_lib_handle) {
+    dlclose(my_obj->adsprpc_lib_handle);
+    my_obj->adsprpc_lib_handle = NULL;
+  }
+#endif
+
+  pthread_mutex_unlock(&my_obj->job_lock);
+
+  /* invalidate client session */
+  pthread_mutex_destroy(&my_obj->clnt_mgr[clnt_idx].lock);
+  memset(&my_obj->clnt_mgr[clnt_idx], 0, sizeof(mm_jpeg_client_t));
+
+  rc = 0;
+  LOGD("X");
+  return rc;
+}
+
+OMX_ERRORTYPE mm_jpeg_ebd(OMX_HANDLETYPE hComponent,
+  OMX_PTR pAppData,
+  OMX_BUFFERHEADERTYPE *pBuffer)
+{
+  mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *) pAppData;
+
+  LOGH("count %d ", p_session->ebd_count);
+  pthread_mutex_lock(&p_session->lock);
+  p_session->ebd_count++;
+  pthread_mutex_unlock(&p_session->lock);
+  return 0;
+}
+
+OMX_ERRORTYPE mm_jpeg_fbd(OMX_HANDLETYPE hComponent,
+  OMX_PTR pAppData,
+  OMX_BUFFERHEADERTYPE *pBuffer)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *) pAppData;
+  mm_jpeg_output_t output_buf;
+  LOGI("count %d ", p_session->fbd_count);
+  LOGI("KPI Perf] : PROFILE_JPEG_FBD");
+
+  pthread_mutex_lock(&p_session->lock);
+  KPI_ATRACE_INT("Camera:JPEG",
+      (int32_t)((uint32_t)GET_SESSION_IDX(
+        p_session->sessionId)<<16 | --p_session->job_index));
+  if (MM_JPEG_ABORT_NONE != p_session->abort_state) {
+    pthread_mutex_unlock(&p_session->lock);
+    return ret;
+  }
+#ifdef MM_JPEG_DUMP_OUT_BS
+  char filename[256];
+  static int bsc;
+  snprintf(filename, sizeof(filename),
+      QCAMERA_DUMP_FRM_LOCATION"jpeg/mm_jpeg_bs%d.jpg", bsc++);
+  DUMP_TO_FILE(filename,
+    pBuffer->pBuffer,
+    (size_t)(uint32_t)pBuffer->nFilledLen);
+#endif
+
+  p_session->fbd_count++;
+  if (NULL != p_session->params.jpeg_cb) {
+
+    p_session->job_status = JPEG_JOB_STATUS_DONE;
+    output_buf.buf_filled_len = (uint32_t)pBuffer->nFilledLen;
+    output_buf.buf_vaddr = pBuffer->pBuffer;
+    output_buf.fd = -1;
+    LOGH("send jpeg callback %d buf 0x%p len %u JobID %u",
+      p_session->job_status, pBuffer->pBuffer,
+      (unsigned int)pBuffer->nFilledLen, p_session->jobId);
+    p_session->params.jpeg_cb(p_session->job_status,
+      p_session->client_hdl,
+      p_session->jobId,
+      &output_buf,
+      p_session->params.userdata);
+
+    mm_jpegenc_job_done(p_session);
+
+    mm_jpeg_put_mem((void *)p_session);
+  }
+  pthread_mutex_unlock(&p_session->lock);
+
+  return ret;
+}
+
+
+
+OMX_ERRORTYPE mm_jpeg_event_handler(OMX_HANDLETYPE hComponent,
+  OMX_PTR pAppData,
+  OMX_EVENTTYPE eEvent,
+  OMX_U32 nData1,
+  OMX_U32 nData2,
+  OMX_PTR pEventData)
+{
+  mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *) pAppData;
+
+  LOGD("%d %d %d state %d", eEvent, (int)nData1,
+    (int)nData2, p_session->abort_state);
+
+  pthread_mutex_lock(&p_session->lock);
+
+  if (MM_JPEG_ABORT_INIT == p_session->abort_state) {
+    p_session->abort_state = MM_JPEG_ABORT_DONE;
+    pthread_cond_signal(&p_session->cond);
+    pthread_mutex_unlock(&p_session->lock);
+    return OMX_ErrorNone;
+  }
+
+  if (eEvent == OMX_EventError) {
+    p_session->error_flag = nData2;
+    if (p_session->encoding == OMX_TRUE) {
+      LOGE("Error during encoding");
+
+      /* send jpeg callback */
+      if (NULL != p_session->params.jpeg_cb) {
+        p_session->job_status = JPEG_JOB_STATUS_ERROR;
+        LOGE("send jpeg error callback %d",
+          p_session->job_status);
+        p_session->params.jpeg_cb(p_session->job_status,
+          p_session->client_hdl,
+          p_session->jobId,
+          NULL,
+          p_session->params.userdata);
+      }
+
+      /* remove from ready queue */
+      mm_jpegenc_job_done(p_session);
+    }
+    pthread_cond_signal(&p_session->cond);
+  } else if (eEvent == OMX_EventCmdComplete) {
+    if (p_session->state_change_pending == OMX_TRUE) {
+      p_session->state_change_pending = OMX_FALSE;
+      pthread_cond_signal(&p_session->cond);
+    }
+  }
+
+  pthread_mutex_unlock(&p_session->lock);
+  return OMX_ErrorNone;
+}
+
+
+
+/* remove the first job from the queue with matching client handle */
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_client_id(
+  mm_jpeg_queue_t* queue, uint32_t client_hdl)
+{
+  mm_jpeg_q_node_t* node = NULL;
+  mm_jpeg_job_q_node_t* data = NULL;
+  mm_jpeg_job_q_node_t* job_node = NULL;
+  struct cam_list *head = NULL;
+  struct cam_list *pos = NULL;
+
+  pthread_mutex_lock(&queue->lock);
+  head = &queue->head.list;
+  pos = head->next;
+  while(pos != head) {
+    node = member_of(pos, mm_jpeg_q_node_t, list);
+    data = (mm_jpeg_job_q_node_t *)node->data.p;
+
+    if (data && (data->enc_info.client_handle == client_hdl)) {
+      LOGH("found matching client handle");
+      job_node = data;
+      cam_list_del_node(&node->list);
+      queue->size--;
+      free(node);
+      LOGH("queue size = %d", queue->size);
+      break;
+    }
+    pos = pos->next;
+  }
+
+  pthread_mutex_unlock(&queue->lock);
+
+  return job_node;
+}
+
+/* remove the first job from the queue with matching session id */
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_session_id(
+  mm_jpeg_queue_t* queue, uint32_t session_id)
+{
+  mm_jpeg_q_node_t* node = NULL;
+  mm_jpeg_job_q_node_t* data = NULL;
+  mm_jpeg_job_q_node_t* job_node = NULL;
+  struct cam_list *head = NULL;
+  struct cam_list *pos = NULL;
+
+  pthread_mutex_lock(&queue->lock);
+  head = &queue->head.list;
+  pos = head->next;
+  while(pos != head) {
+    node = member_of(pos, mm_jpeg_q_node_t, list);
+    data = (mm_jpeg_job_q_node_t *)node->data.p;
+
+    if (data && (data->enc_info.encode_job.session_id == session_id)) {
+      LOGH("found matching session id");
+      job_node = data;
+      cam_list_del_node(&node->list);
+      queue->size--;
+      free(node);
+      LOGH("queue size = %d", queue->size);
+      break;
+    }
+    pos = pos->next;
+  }
+
+  pthread_mutex_unlock(&queue->lock);
+
+  return job_node;
+}
+
+/* remove job from the queue with matching job id */
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_job_id(
+  mm_jpeg_queue_t* queue, uint32_t job_id)
+{
+  mm_jpeg_q_node_t* node = NULL;
+  mm_jpeg_job_q_node_t* data = NULL;
+  mm_jpeg_job_q_node_t* job_node = NULL;
+  struct cam_list *head = NULL;
+  struct cam_list *pos = NULL;
+  uint32_t lq_job_id;
+
+  pthread_mutex_lock(&queue->lock);
+  head = &queue->head.list;
+  pos = head->next;
+  while(pos != head) {
+    node = member_of(pos, mm_jpeg_q_node_t, list);
+    data = (mm_jpeg_job_q_node_t *)node->data.p;
+
+    if(NULL == data) {
+      LOGE("Data is NULL");
+      pthread_mutex_unlock(&queue->lock);
+      return NULL;
+    }
+
+    if (data->type == MM_JPEG_CMD_TYPE_DECODE_JOB) {
+      lq_job_id = data->dec_info.job_id;
+    } else {
+      lq_job_id = data->enc_info.job_id;
+    }
+
+    if (data && (lq_job_id == job_id)) {
+      LOGD("found matching job id");
+      job_node = data;
+      cam_list_del_node(&node->list);
+      queue->size--;
+      free(node);
+      break;
+    }
+    pos = pos->next;
+  }
+
+  pthread_mutex_unlock(&queue->lock);
+
+  return job_node;
+}
+
+/* remove job from the queue with matching job id */
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_unlk(
+  mm_jpeg_queue_t* queue, uint32_t job_id)
+{
+  mm_jpeg_q_node_t* node = NULL;
+  mm_jpeg_job_q_node_t* data = NULL;
+  mm_jpeg_job_q_node_t* job_node = NULL;
+  struct cam_list *head = NULL;
+  struct cam_list *pos = NULL;
+
+  head = &queue->head.list;
+  pos = head->next;
+  while(pos != head) {
+    node = member_of(pos, mm_jpeg_q_node_t, list);
+    data = (mm_jpeg_job_q_node_t *)node->data.p;
+
+    if (data && (data->enc_info.job_id == job_id)) {
+      job_node = data;
+      cam_list_del_node(&node->list);
+      queue->size--;
+      free(node);
+      break;
+    }
+    pos = pos->next;
+  }
+
+  return job_node;
+}
diff --git a/msmcobalt/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c b/msmcobalt/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c
new file mode 100644
index 0000000..e56fc24
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c
@@ -0,0 +1,652 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <pthread.h>
+#include <string.h>
+#include <math.h>
+
+// JPEG dependencies
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg.h"
+
+
+#define LOWER(a)               ((a) & 0xFFFF)
+#define UPPER(a)               (((a)>>16) & 0xFFFF)
+#define CHANGE_ENDIAN_16(a)  ((0x00FF & ((a)>>8)) | (0xFF00 & ((a)<<8)))
+#define ROUND(a) \
+        ((a >= 0) ? (uint32_t)(a + 0.5) : (uint32_t)(a - 0.5))
+
+
+/** addExifEntry:
+ *
+ *  Arguments:
+ *   @exif_info : Exif info struct
+ *   @p_session: job session
+ *   @tagid   : exif tag ID
+ *   @type    : data type
+ *   @count   : number of data in uint of its type
+ *   @data    : input data ptr
+ *
+ *  Retrun     : int32_t type of status
+ *               0  -- success
+ *              none-zero failure code
+ *
+ *  Description:
+ *       Function to add an entry to exif data
+ *
+ **/
+int32_t addExifEntry(QOMX_EXIF_INFO *p_exif_info, exif_tag_id_t tagid,
+  exif_tag_type_t type, uint32_t count, void *data)
+{
+    int32_t rc = 0;
+    uint32_t numOfEntries = (uint32_t)p_exif_info->numOfEntries;
+    QEXIF_INFO_DATA *p_info_data = p_exif_info->exif_data;
+    if(numOfEntries >= MAX_EXIF_TABLE_ENTRIES) {
+        LOGE("Number of entries exceeded limit");
+        return -1;
+    }
+
+    p_info_data[numOfEntries].tag_id = tagid;
+    p_info_data[numOfEntries].tag_entry.type = type;
+    p_info_data[numOfEntries].tag_entry.count = count;
+    p_info_data[numOfEntries].tag_entry.copy = 1;
+    switch (type) {
+    case EXIF_BYTE: {
+      if (count > 1) {
+        uint8_t *values = (uint8_t *)malloc(count);
+        if (values == NULL) {
+          LOGE("No memory for byte array");
+          rc = -1;
+        } else {
+          memcpy(values, data, count);
+          p_info_data[numOfEntries].tag_entry.data._bytes = values;
+        }
+      } else {
+        p_info_data[numOfEntries].tag_entry.data._byte = *(uint8_t *)data;
+      }
+    }
+    break;
+    case EXIF_ASCII: {
+      char *str = NULL;
+      str = (char *)malloc(count + 1);
+      if (str == NULL) {
+        LOGE("No memory for ascii string");
+        rc = -1;
+      } else {
+        memset(str, 0, count + 1);
+        memcpy(str, data, count);
+        p_info_data[numOfEntries].tag_entry.data._ascii = str;
+      }
+    }
+    break;
+    case EXIF_SHORT: {
+      if (count > 1) {
+        uint16_t *values = (uint16_t *)malloc(count * sizeof(uint16_t));
+        if (values == NULL) {
+          LOGE("No memory for short array");
+          rc = -1;
+        } else {
+          memcpy(values, data, count * sizeof(uint16_t));
+          p_info_data[numOfEntries].tag_entry.data._shorts = values;
+        }
+      } else {
+        p_info_data[numOfEntries].tag_entry.data._short = *(uint16_t *)data;
+      }
+    }
+    break;
+    case EXIF_LONG: {
+      if (count > 1) {
+        uint32_t *values = (uint32_t *)malloc(count * sizeof(uint32_t));
+        if (values == NULL) {
+          LOGE("No memory for long array");
+          rc = -1;
+        } else {
+          memcpy(values, data, count * sizeof(uint32_t));
+          p_info_data[numOfEntries].tag_entry.data._longs = values;
+        }
+      } else {
+        p_info_data[numOfEntries].tag_entry.data._long = *(uint32_t *)data;
+      }
+    }
+    break;
+    case EXIF_RATIONAL: {
+      if (count > 1) {
+        rat_t *values = (rat_t *)malloc(count * sizeof(rat_t));
+        if (values == NULL) {
+          LOGE("No memory for rational array");
+          rc = -1;
+        } else {
+          memcpy(values, data, count * sizeof(rat_t));
+          p_info_data[numOfEntries].tag_entry.data._rats = values;
+        }
+      } else {
+        p_info_data[numOfEntries].tag_entry.data._rat = *(rat_t *)data;
+      }
+    }
+    break;
+    case EXIF_UNDEFINED: {
+      uint8_t *values = (uint8_t *)malloc(count);
+      if (values == NULL) {
+        LOGE("No memory for undefined array");
+        rc = -1;
+      } else {
+        memcpy(values, data, count);
+        p_info_data[numOfEntries].tag_entry.data._undefined = values;
+      }
+    }
+    break;
+    case EXIF_SLONG: {
+      if (count > 1) {
+        int32_t *values = (int32_t *)malloc(count * sizeof(int32_t));
+        if (values == NULL) {
+          LOGE("No memory for signed long array");
+          rc = -1;
+        } else {
+          memcpy(values, data, count * sizeof(int32_t));
+          p_info_data[numOfEntries].tag_entry.data._slongs = values;
+        }
+      } else {
+        p_info_data[numOfEntries].tag_entry.data._slong = *(int32_t *)data;
+      }
+    }
+    break;
+    case EXIF_SRATIONAL: {
+      if (count > 1) {
+        srat_t *values = (srat_t *)malloc(count * sizeof(srat_t));
+        if (values == NULL) {
+          LOGE("No memory for signed rational array");
+          rc = -1;
+        } else {
+          memcpy(values, data, count * sizeof(srat_t));
+          p_info_data[numOfEntries].tag_entry.data._srats = values;
+        }
+      } else {
+        p_info_data[numOfEntries].tag_entry.data._srat = *(srat_t *)data;
+      }
+    }
+    break;
+    }
+
+    // Increase number of entries
+    p_exif_info->numOfEntries++;
+    return rc;
+}
+
+/** releaseExifEntry
+ *
+ *  Arguments:
+ *   @p_exif_data : Exif info struct
+ *
+ *  Retrun     : int32_t type of status
+ *               0  -- success
+ *              none-zero failure code
+ *
+ *  Description:
+ *       Function to release an entry from exif data
+ *
+ **/
+int32_t releaseExifEntry(QEXIF_INFO_DATA *p_exif_data)
+{
+ switch (p_exif_data->tag_entry.type) {
+  case EXIF_BYTE: {
+    if (p_exif_data->tag_entry.count > 1 &&
+      p_exif_data->tag_entry.data._bytes != NULL) {
+      free(p_exif_data->tag_entry.data._bytes);
+      p_exif_data->tag_entry.data._bytes = NULL;
+    }
+  }
+  break;
+  case EXIF_ASCII: {
+    if (p_exif_data->tag_entry.data._ascii != NULL) {
+      free(p_exif_data->tag_entry.data._ascii);
+      p_exif_data->tag_entry.data._ascii = NULL;
+    }
+  }
+  break;
+  case EXIF_SHORT: {
+    if (p_exif_data->tag_entry.count > 1 &&
+      p_exif_data->tag_entry.data._shorts != NULL) {
+      free(p_exif_data->tag_entry.data._shorts);
+      p_exif_data->tag_entry.data._shorts = NULL;
+    }
+  }
+  break;
+  case EXIF_LONG: {
+    if (p_exif_data->tag_entry.count > 1 &&
+      p_exif_data->tag_entry.data._longs != NULL) {
+      free(p_exif_data->tag_entry.data._longs);
+      p_exif_data->tag_entry.data._longs = NULL;
+    }
+  }
+  break;
+  case EXIF_RATIONAL: {
+    if (p_exif_data->tag_entry.count > 1 &&
+      p_exif_data->tag_entry.data._rats != NULL) {
+      free(p_exif_data->tag_entry.data._rats);
+      p_exif_data->tag_entry.data._rats = NULL;
+    }
+  }
+  break;
+  case EXIF_UNDEFINED: {
+    if (p_exif_data->tag_entry.data._undefined != NULL) {
+      free(p_exif_data->tag_entry.data._undefined);
+      p_exif_data->tag_entry.data._undefined = NULL;
+    }
+  }
+  break;
+  case EXIF_SLONG: {
+    if (p_exif_data->tag_entry.count > 1 &&
+      p_exif_data->tag_entry.data._slongs != NULL) {
+      free(p_exif_data->tag_entry.data._slongs);
+      p_exif_data->tag_entry.data._slongs = NULL;
+    }
+  }
+  break;
+  case EXIF_SRATIONAL: {
+    if (p_exif_data->tag_entry.count > 1 &&
+      p_exif_data->tag_entry.data._srats != NULL) {
+      free(p_exif_data->tag_entry.data._srats);
+      p_exif_data->tag_entry.data._srats = NULL;
+    }
+  }
+  break;
+  } /*end of switch*/
+
+  return 0;
+}
+
+/** process_sensor_data:
+ *
+ *  Arguments:
+ *   @p_sensor_params : ptr to sensor data
+ *
+ *  Return     : int32_t type of status
+ *               NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ *  Description:
+ *       process sensor data
+ *
+ *  Notes: this needs to be filled for the metadata
+ **/
+int process_sensor_data(cam_sensor_params_t *p_sensor_params,
+  QOMX_EXIF_INFO *exif_info)
+{
+  int rc = 0;
+  rat_t val_rat;
+
+  if (NULL == p_sensor_params) {
+    LOGE("Sensor params are null");
+    return 0;
+  }
+
+  LOGD("From metadata aperture = %f ",
+    p_sensor_params->aperture_value );
+
+  if (p_sensor_params->aperture_value >= 1.0) {
+    double apex_value;
+    apex_value = (double)2.0 * log(p_sensor_params->aperture_value) / log(2.0);
+    val_rat.num = (uint32_t)(apex_value * 100);
+    val_rat.denom = 100;
+    rc = addExifEntry(exif_info, EXIFTAGID_APERTURE, EXIF_RATIONAL, 1, &val_rat);
+    if (rc) {
+      LOGE(": Error adding Exif Entry");
+    }
+
+    val_rat.num = (uint32_t)(p_sensor_params->aperture_value * 100);
+    val_rat.denom = 100;
+    rc = addExifEntry(exif_info, EXIFTAGID_F_NUMBER, EXIF_RATIONAL, 1, &val_rat);
+    if (rc) {
+      LOGE(": Error adding Exif Entry");
+    }
+  }
+
+  /*Flash*/
+  short val_short = 0;
+  int flash_mode_exif, flash_fired;
+  if (p_sensor_params->flash_state == CAM_FLASH_STATE_FIRED) {
+    flash_fired = 1;
+  } else {
+    flash_fired = 0;
+  }
+  LOGD("Flash mode %d flash state %d",
+    p_sensor_params->flash_mode, p_sensor_params->flash_state);
+
+  switch(p_sensor_params->flash_mode) {
+  case  CAM_FLASH_MODE_OFF:
+    flash_mode_exif = MM_JPEG_EXIF_FLASH_MODE_OFF;
+    break;
+  case CAM_FLASH_MODE_ON:
+    flash_mode_exif = MM_JPEG_EXIF_FLASH_MODE_ON;
+    break;
+  case CAM_FLASH_MODE_AUTO:
+    flash_mode_exif = MM_JPEG_EXIF_FLASH_MODE_AUTO;
+    break;
+  default:
+    flash_mode_exif = MM_JPEG_EXIF_FLASH_MODE_AUTO;
+    LOGE(": Unsupported flash mode");
+  }
+  val_short = (short)(flash_fired | (flash_mode_exif << 3));
+
+  rc = addExifEntry(exif_info, EXIFTAGID_FLASH, EXIF_SHORT, 1, &val_short);
+  if (rc) {
+    LOGE(": Error adding flash exif entry");
+  }
+  /* Sensing Method */
+  val_short = (short) p_sensor_params->sensing_method;
+  rc = addExifEntry(exif_info, EXIFTAGID_SENSING_METHOD, EXIF_SHORT,
+    sizeof(val_short)/2, &val_short);
+  if (rc) {
+    LOGE(": Error adding flash Exif Entry");
+  }
+
+  /* Focal Length in 35 MM Film */
+  val_short = (short)
+    ((p_sensor_params->focal_length * p_sensor_params->crop_factor) + 0.5f);
+  rc = addExifEntry(exif_info, EXIFTAGID_FOCAL_LENGTH_35MM, EXIF_SHORT,
+    1, &val_short);
+  if (rc) {
+    LOGE(": Error adding Exif Entry");
+  }
+
+  /* F Number */
+  val_rat.num = (uint32_t)(p_sensor_params->f_number * 100);
+  val_rat.denom = 100;
+  rc = addExifEntry(exif_info, EXIFTAGTYPE_F_NUMBER, EXIF_RATIONAL, 1, &val_rat);
+  if (rc) {
+    LOGE(": Error adding Exif Entry");
+  }
+  return rc;
+}
+
+
+/** process_3a_data:
+ *
+ *  Arguments:
+ *   @p_3a_params : ptr to 3a data
+ *   @exif_info : Exif info struct
+ *
+ *  Return     : int32_t type of status
+ *               NO_ERROR  -- success
+ *               none-zero failure code
+ *
+ *  Description:
+ *       process 3a data
+ *
+ *  Notes: this needs to be filled for the metadata
+ **/
+int process_3a_data(cam_3a_params_t *p_3a_params, QOMX_EXIF_INFO *exif_info)
+{
+  int rc = 0;
+  srat_t val_srat;
+  rat_t val_rat;
+  double shutter_speed_value;
+
+  if (NULL == p_3a_params) {
+    LOGE("3A params are null");
+    return 0;
+  }
+
+  LOGD("exp_time %f, iso_value %d, wb_mode %d",
+    p_3a_params->exp_time, p_3a_params->iso_value, p_3a_params->wb_mode);
+
+  /* Exposure time */
+  if (p_3a_params->exp_time <= 0.0f) {
+    val_rat.num = 0;
+    val_rat.denom = 0;
+  } else if (p_3a_params->exp_time < 1.0f) {
+    val_rat.num = 1;
+    val_rat.denom = ROUND(1.0/p_3a_params->exp_time);
+  } else {
+    val_rat.num = ROUND(p_3a_params->exp_time);
+    val_rat.denom = 1;
+  }
+  LOGD("numer %d denom %d %zd", val_rat.num, val_rat.denom,
+    sizeof(val_rat) / (8));
+
+  rc = addExifEntry(exif_info, EXIFTAGID_EXPOSURE_TIME, EXIF_RATIONAL,
+    (sizeof(val_rat)/(8)), &val_rat);
+  if (rc) {
+    LOGE(": Error adding Exif Entry Exposure time");
+  }
+
+  /* Shutter Speed*/
+  if (p_3a_params->exp_time > 0) {
+    shutter_speed_value = log10(1/p_3a_params->exp_time)/log10(2);
+    val_srat.num = (int32_t)(shutter_speed_value * 1000);
+    val_srat.denom = 1000;
+  } else {
+    val_srat.num = 0;
+    val_srat.denom = 0;
+  }
+  rc = addExifEntry(exif_info, EXIFTAGID_SHUTTER_SPEED, EXIF_SRATIONAL,
+    (sizeof(val_srat)/(8)), &val_srat);
+  if (rc) {
+    LOGE(": Error adding Exif Entry");
+  }
+
+  /*ISO*/
+  short val_short;
+  val_short = (short)p_3a_params->iso_value;
+  rc = addExifEntry(exif_info, EXIFTAGID_ISO_SPEED_RATING, EXIF_SHORT,
+    sizeof(val_short)/2, &val_short);
+  if (rc) {
+     LOGE(": Error adding Exif Entry");
+  }
+
+  /*WB mode*/
+  if (p_3a_params->wb_mode == CAM_WB_MODE_AUTO)
+    val_short = 0;
+  else
+    val_short = 1;
+  rc = addExifEntry(exif_info, EXIFTAGID_WHITE_BALANCE, EXIF_SHORT,
+    sizeof(val_short)/2, &val_short);
+  if (rc) {
+    LOGE(": Error adding Exif Entry");
+  }
+
+  /* Metering Mode   */
+  val_short = (short) p_3a_params->metering_mode;
+  rc = addExifEntry(exif_info,EXIFTAGID_METERING_MODE, EXIF_SHORT,
+     sizeof(val_short)/2, &val_short);
+  if (rc) {
+     LOGE(": Error adding Exif Entry");
+   }
+
+  /*Exposure Program*/
+   val_short = (short) p_3a_params->exposure_program;
+   rc = addExifEntry(exif_info,EXIFTAGID_EXPOSURE_PROGRAM, EXIF_SHORT,
+      sizeof(val_short)/2, &val_short);
+   if (rc) {
+      LOGE(": Error adding Exif Entry");
+    }
+
+   /*Exposure Mode */
+    val_short = (short) p_3a_params->exposure_mode;
+    rc = addExifEntry(exif_info,EXIFTAGID_EXPOSURE_MODE, EXIF_SHORT,
+       sizeof(val_short)/2, &val_short);
+    if (rc) {
+       LOGE(": Error adding Exif Entry");
+     }
+
+    /*Scenetype*/
+     uint8_t val_undef;
+     val_undef = (uint8_t) p_3a_params->scenetype;
+     rc = addExifEntry(exif_info,EXIFTAGID_SCENE_TYPE, EXIF_UNDEFINED,
+        sizeof(val_undef), &val_undef);
+     if (rc) {
+        LOGE(": Error adding Exif Entry");
+      }
+
+     LOGD("brightness %f",
+       p_3a_params->brightness);
+
+    /* Brightness Value*/
+     val_srat.num = (int32_t) (p_3a_params->brightness * 100.0f);
+     val_srat.denom = 100;
+     rc = addExifEntry(exif_info,EXIFTAGID_BRIGHTNESS, EXIF_SRATIONAL,
+                 (sizeof(val_srat)/(8)), &val_srat);
+     if (rc) {
+        LOGE(": Error adding Exif Entry");
+     }
+
+  return rc;
+}
+
+/** process_meta_data
+ *
+ *  Arguments:
+ *   @p_meta : ptr to metadata
+ *   @exif_info: Exif info struct
+ *   @mm_jpeg_exif_params: exif params
+ *
+ *  Return     : int32_t type of status
+ *               NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ *  Description:
+ *       Extract exif data from the metadata
+ **/
+int process_meta_data(metadata_buffer_t *p_meta, QOMX_EXIF_INFO *exif_info,
+  mm_jpeg_exif_params_t *p_cam_exif_params, cam_hal_version_t hal_version)
+{
+  int rc = 0;
+  cam_sensor_params_t p_sensor_params;
+  cam_3a_params_t p_3a_params;
+  bool is_3a_meta_valid = false, is_sensor_meta_valid = false;
+
+  memset(&p_3a_params,  0,  sizeof(cam_3a_params_t));
+  memset(&p_sensor_params, 0, sizeof(cam_sensor_params_t));
+
+  if (p_meta) {
+    /* for HAL V1*/
+    if (hal_version == CAM_HAL_V1) {
+
+      IF_META_AVAILABLE(cam_3a_params_t, l_3a_params, CAM_INTF_META_AEC_INFO,
+          p_meta) {
+        p_3a_params = *l_3a_params;
+        is_3a_meta_valid = true;
+      }
+
+      IF_META_AVAILABLE(int32_t, wb_mode, CAM_INTF_PARM_WHITE_BALANCE, p_meta) {
+        p_3a_params.wb_mode = *wb_mode;
+      }
+
+      IF_META_AVAILABLE(cam_sensor_params_t, l_sensor_params,
+          CAM_INTF_META_SENSOR_INFO, p_meta) {
+        p_sensor_params = *l_sensor_params;
+        is_sensor_meta_valid = true;
+      }
+    } else {
+      /* HAL V3 */
+      IF_META_AVAILABLE(int32_t, iso, CAM_INTF_META_SENSOR_SENSITIVITY, p_meta) {
+        p_3a_params.iso_value= *iso;
+      } else {
+        LOGE("Cannot extract Iso value");
+      }
+
+      IF_META_AVAILABLE(int64_t, sensor_exposure_time,
+          CAM_INTF_META_SENSOR_EXPOSURE_TIME, p_meta) {
+        p_3a_params.exp_time =
+          (float)((double)(*sensor_exposure_time) / 1000000000.0);
+      } else {
+        LOGE("Cannot extract Exp time value");
+      }
+
+      IF_META_AVAILABLE(int32_t, wb_mode, CAM_INTF_PARM_WHITE_BALANCE, p_meta) {
+        p_3a_params.wb_mode = *wb_mode;
+      } else {
+        LOGE("Cannot extract white balance mode");
+      }
+
+      /* Process sensor data */
+      IF_META_AVAILABLE(float, aperture, CAM_INTF_META_LENS_APERTURE, p_meta) {
+        p_sensor_params.aperture_value = *aperture;
+      } else {
+        LOGE("Cannot extract Aperture value");
+      }
+
+      IF_META_AVAILABLE(uint32_t, flash_mode, CAM_INTF_META_FLASH_MODE, p_meta) {
+        p_sensor_params.flash_mode = *flash_mode;
+      } else {
+        LOGE("Cannot extract flash mode value");
+      }
+
+      IF_META_AVAILABLE(int32_t, flash_state, CAM_INTF_META_FLASH_STATE, p_meta) {
+        p_sensor_params.flash_state = (cam_flash_state_t) *flash_state;
+      } else {
+        LOGE("Cannot extract flash state value");
+      }
+    }
+  }
+
+  /* take the cached values if meta is invalid */
+  if ((!is_3a_meta_valid) && (hal_version == CAM_HAL_V1)) {
+    p_3a_params = p_cam_exif_params->cam_3a_params;
+    LOGW("Warning using cached values for 3a");
+  }
+
+  if ((!is_sensor_meta_valid) && (hal_version == CAM_HAL_V1)) {
+    p_sensor_params = p_cam_exif_params->sensor_params;
+    LOGW("Warning using cached values for sensor");
+  }
+
+  if ((hal_version != CAM_HAL_V1) || (p_sensor_params.sens_type != CAM_SENSOR_YUV)) {
+    rc = process_3a_data(&p_3a_params, exif_info);
+    if (rc) {
+      LOGE("Failed to add 3a exif params");
+    }
+  }
+
+  rc = process_sensor_data(&p_sensor_params, exif_info);
+  if (rc) {
+    LOGE("Failed to extract sensor params");
+  }
+
+  if (p_meta) {
+    short val_short = 0;
+    cam_asd_decision_t *scene_info = NULL;
+
+    IF_META_AVAILABLE(cam_asd_decision_t, scene_cap_type,
+        CAM_INTF_META_ASD_SCENE_INFO, p_meta) {
+      scene_info = (cam_asd_decision_t*)scene_cap_type;
+      val_short = (short) scene_info->detected_scene;
+    }
+
+    rc = addExifEntry(exif_info, EXIFTAGID_SCENE_CAPTURE_TYPE, EXIF_SHORT,
+      sizeof(val_short)/2, &val_short);
+    if (rc) {
+      LOGE(": Error adding ASD Exif Entry");
+    }
+  } else {
+    LOGE(": Error adding ASD Exif Entry, no meta");
+  }
+  return rc;
+}
diff --git a/msmcobalt/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_interface.c b/msmcobalt/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_interface.c
new file mode 100644
index 0000000..5655c49
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_interface.c
@@ -0,0 +1,409 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// To remove
+#include <cutils/properties.h>
+
+// System dependencies
+#include <stdlib.h>
+#include <pthread.h>
+
+// JPEG dependencies
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg_interface.h"
+#include "mm_jpeg.h"
+#include "mm_jpeg_mpo.h"
+
+static pthread_mutex_t g_intf_lock = PTHREAD_MUTEX_INITIALIZER;
+static mm_jpeg_obj* g_jpeg_obj = NULL;
+
+static pthread_mutex_t g_handler_lock = PTHREAD_MUTEX_INITIALIZER;
+static uint16_t g_handler_history_count = 0; /* history count for handler */
+volatile uint32_t gKpiDebugLevel = 0;
+
+/** mm_jpeg_util_generate_handler:
+ *
+ *  Arguments:
+ *    @index: client index
+ *
+ *  Return:
+ *       handle value
+ *
+ *  Description:
+ *       utility function to generate handler
+ *
+ **/
+uint32_t mm_jpeg_util_generate_handler(uint8_t index)
+{
+  uint32_t handler = 0;
+  pthread_mutex_lock(&g_handler_lock);
+  g_handler_history_count++;
+  if (0 == g_handler_history_count) {
+    g_handler_history_count++;
+  }
+  handler = g_handler_history_count;
+  handler = (handler<<8) | index;
+  pthread_mutex_unlock(&g_handler_lock);
+  return handler;
+}
+
+/** mm_jpeg_util_get_index_by_handler:
+ *
+ *  Arguments:
+ *    @handler: handle value
+ *
+ *  Return:
+ *       client index
+ *
+ *  Description:
+ *       get client index
+ *
+ **/
+uint8_t mm_jpeg_util_get_index_by_handler(uint32_t handler)
+{
+  return (handler & 0x000000ff);
+}
+
+/** mm_jpeg_intf_start_job:
+ *
+ *  Arguments:
+ *    @client_hdl: client handle
+ *    @job: jpeg job object
+ *    @jobId: job id
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       start the jpeg job
+ *
+ **/
+static int32_t mm_jpeg_intf_start_job(mm_jpeg_job_t* job, uint32_t* job_id)
+{
+  int32_t rc = -1;
+
+  if (NULL == job ||
+    NULL == job_id) {
+    LOGE("invalid parameters for job or jobId");
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_intf_lock);
+  if (NULL == g_jpeg_obj) {
+    /* mm_jpeg obj not exists, return error */
+    LOGE("mm_jpeg is not opened yet");
+    pthread_mutex_unlock(&g_intf_lock);
+    return rc;
+  }
+  rc = mm_jpeg_start_job(g_jpeg_obj, job, job_id);
+  pthread_mutex_unlock(&g_intf_lock);
+  return rc;
+}
+
+/** mm_jpeg_intf_create_session:
+ *
+ *  Arguments:
+ *    @client_hdl: client handle
+ *    @p_params: encode parameters
+ *    @p_session_id: session id
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       Create new jpeg session
+ *
+ **/
+static int32_t mm_jpeg_intf_create_session(uint32_t client_hdl,
+    mm_jpeg_encode_params_t *p_params,
+    uint32_t *p_session_id)
+{
+  int32_t rc = -1;
+
+  if (0 == client_hdl || NULL == p_params || NULL == p_session_id) {
+    LOGE("invalid client_hdl or jobId");
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_intf_lock);
+  if (NULL == g_jpeg_obj) {
+    /* mm_jpeg obj not exists, return error */
+    LOGE("mm_jpeg is not opened yet");
+    pthread_mutex_unlock(&g_intf_lock);
+    return rc;
+  }
+
+ rc = mm_jpeg_create_session(g_jpeg_obj, client_hdl, p_params, p_session_id);
+  pthread_mutex_unlock(&g_intf_lock);
+  return rc;
+}
+
+/** mm_jpeg_intf_destroy_session:
+ *
+ *  Arguments:
+ *    @session_id: session id
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       Destroy jpeg session
+ *
+ **/
+static int32_t mm_jpeg_intf_destroy_session(uint32_t session_id)
+{
+  int32_t rc = -1;
+
+  if (0 == session_id) {
+    LOGE("invalid client_hdl or jobId");
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_intf_lock);
+  if (NULL == g_jpeg_obj) {
+    /* mm_jpeg obj not exists, return error */
+    LOGE("mm_jpeg is not opened yet");
+    pthread_mutex_unlock(&g_intf_lock);
+    return rc;
+  }
+
+  rc = mm_jpeg_destroy_session_by_id(g_jpeg_obj, session_id);
+  pthread_mutex_unlock(&g_intf_lock);
+  return rc;
+}
+
+/** mm_jpeg_intf_abort_job:
+ *
+ *  Arguments:
+ *    @jobId: job id
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       Abort the jpeg job
+ *
+ **/
+static int32_t mm_jpeg_intf_abort_job(uint32_t job_id)
+{
+  int32_t rc = -1;
+
+  if (0 == job_id) {
+    LOGE("invalid jobId");
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_intf_lock);
+  if (NULL == g_jpeg_obj) {
+    /* mm_jpeg obj not exists, return error */
+    LOGE("mm_jpeg is not opened yet");
+    pthread_mutex_unlock(&g_intf_lock);
+    return rc;
+  }
+
+  rc = mm_jpeg_abort_job(g_jpeg_obj, job_id);
+  pthread_mutex_unlock(&g_intf_lock);
+  return rc;
+}
+
+/** mm_jpeg_intf_close:
+ *
+ *  Arguments:
+ *    @client_hdl: client handle
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       Close the jpeg job
+ *
+ **/
+static int32_t mm_jpeg_intf_close(uint32_t client_hdl)
+{
+  int32_t rc = -1;
+
+  if (0 == client_hdl) {
+    LOGE("invalid client_hdl");
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_intf_lock);
+  if (NULL == g_jpeg_obj) {
+    /* mm_jpeg obj not exists, return error */
+    LOGE("mm_jpeg is not opened yet");
+    pthread_mutex_unlock(&g_intf_lock);
+    return rc;
+  }
+
+  rc = mm_jpeg_close(g_jpeg_obj, client_hdl);
+  g_jpeg_obj->num_clients--;
+  if(0 == rc) {
+    if (0 == g_jpeg_obj->num_clients) {
+      /* No client, close jpeg internally */
+      rc = mm_jpeg_deinit(g_jpeg_obj);
+      free(g_jpeg_obj);
+      g_jpeg_obj = NULL;
+    }
+  }
+
+  pthread_mutex_unlock(&g_intf_lock);
+  return rc;
+}
+
+/** mm_jpeg_intf_compose_mpo:
+ *
+ *  Arguments:
+ *    @mpo_info : MPO Information
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       Compose MPO image from jpeg images
+ *
+ **/
+static int32_t mm_jpeg_intf_compose_mpo(mm_jpeg_mpo_info_t *mpo_info)
+{
+  int32_t rc = -1;
+  if (!mpo_info) {
+    LOGE("Invalid input");
+    return rc;
+  }
+
+  if (mpo_info->num_of_images > MM_JPEG_MAX_MPO_IMAGES) {
+    LOGE("Num of images exceeds max supported images in MPO");
+    return rc;
+  }
+  //Call MPo composition
+  rc = mm_jpeg_mpo_compose(mpo_info);
+
+  return rc;
+}
+
+/** jpeg_open:
+ *
+ *  Arguments:
+ *    @ops: ops table pointer
+ *    @mpo_ops: mpo ops table ptr
+ *    @picture_size: Max available dim
+ *    @jpeg_metadata: Jpeg meta data
+ *
+ *  Return:
+ *       0 failure, success otherwise
+ *
+ *  Description:
+ *       Open a jpeg client. Jpeg meta data will be cached
+ *       but memory manegement has to be done by the cient.
+ *
+ **/
+uint32_t jpeg_open(mm_jpeg_ops_t *ops, mm_jpeg_mpo_ops_t *mpo_ops,
+  mm_dimension picture_size,
+  cam_jpeg_metadata_t *jpeg_metadata)
+{
+  int32_t rc = 0;
+  uint32_t clnt_hdl = 0;
+  mm_jpeg_obj* jpeg_obj = NULL;
+  char prop[PROPERTY_VALUE_MAX];
+
+  property_get("persist.camera.kpi.debug", prop, "0");
+  gKpiDebugLevel = atoi(prop);
+
+  pthread_mutex_lock(&g_intf_lock);
+  /* first time open */
+  if(NULL == g_jpeg_obj) {
+    jpeg_obj = (mm_jpeg_obj *)malloc(sizeof(mm_jpeg_obj));
+    if(NULL == jpeg_obj) {
+      LOGE("no mem");
+      pthread_mutex_unlock(&g_intf_lock);
+      return clnt_hdl;
+    }
+
+    /* initialize jpeg obj */
+    memset(jpeg_obj, 0, sizeof(mm_jpeg_obj));
+
+    /* by default reuse reproc source buffer if available */
+    if (mpo_ops == NULL) {
+      jpeg_obj->reuse_reproc_buffer = 1;
+    } else {
+      jpeg_obj->reuse_reproc_buffer = 0;
+    }
+   LOGH("reuse_reproc_buffer %d ",
+      jpeg_obj->reuse_reproc_buffer);
+
+    /* used for work buf calculation */
+    jpeg_obj->max_pic_w = picture_size.w;
+    jpeg_obj->max_pic_h = picture_size.h;
+
+    /*Cache OTP Data for the session*/
+    if (NULL != jpeg_metadata) {
+      jpeg_obj->jpeg_metadata = jpeg_metadata;
+    }
+
+    rc = mm_jpeg_init(jpeg_obj);
+    if(0 != rc) {
+      LOGE("mm_jpeg_init err = %d", rc);
+      free(jpeg_obj);
+      pthread_mutex_unlock(&g_intf_lock);
+      return clnt_hdl;
+    }
+
+    /* remember in global variable */
+    g_jpeg_obj = jpeg_obj;
+  }
+
+  /* open new client */
+  clnt_hdl = mm_jpeg_new_client(g_jpeg_obj);
+  if (clnt_hdl > 0) {
+    /* valid client */
+    if (NULL != ops) {
+      /* fill in ops tbl if ptr not NULL */
+      ops->start_job = mm_jpeg_intf_start_job;
+      ops->abort_job = mm_jpeg_intf_abort_job;
+      ops->create_session = mm_jpeg_intf_create_session;
+      ops->destroy_session = mm_jpeg_intf_destroy_session;
+      ops->close = mm_jpeg_intf_close;
+    }
+    if (NULL != mpo_ops) {
+      mpo_ops->compose_mpo = mm_jpeg_intf_compose_mpo;
+    }
+  } else {
+    /* failed new client */
+    LOGE("mm_jpeg_new_client failed");
+
+    if (0 == g_jpeg_obj->num_clients) {
+      /* no client, close jpeg */
+      mm_jpeg_deinit(g_jpeg_obj);
+      free(g_jpeg_obj);
+      g_jpeg_obj = NULL;
+    }
+  }
+
+  pthread_mutex_unlock(&g_intf_lock);
+  return clnt_hdl;
+}
diff --git a/msmcobalt/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_ionbuf.c b/msmcobalt/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_ionbuf.c
new file mode 100644
index 0000000..34702e7
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_ionbuf.c
@@ -0,0 +1,206 @@
+/* Copyright (c) 2013-2014, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <errno.h>
+#include <fcntl.h>
+#include <stdio.h>
+#include <string.h>
+#include <linux/msm_ion.h>
+#define MMAN_H <SYSTEM_HEADER_PREFIX/mman.h>
+#include MMAN_H
+
+// JPEG dependencies
+#include "mm_jpeg_ionbuf.h"
+
+/** buffer_allocate:
+ *
+ *  Arguments:
+ *     @p_buffer: ION buffer
+ *
+ *  Return:
+ *     buffer address
+ *
+ *  Description:
+ *      allocates ION buffer
+ *
+ **/
+void *buffer_allocate(buffer_t *p_buffer, int cached)
+{
+  void *l_buffer = NULL;
+
+  int lrc = 0;
+  struct ion_handle_data lhandle_data;
+
+   p_buffer->alloc.len = p_buffer->size;
+   p_buffer->alloc.align = 4096;
+   p_buffer->alloc.flags = (cached) ? ION_FLAG_CACHED : 0;
+   p_buffer->alloc.heap_id_mask = 0x1 << ION_IOMMU_HEAP_ID;
+
+   p_buffer->ion_fd = open("/dev/ion", O_RDONLY);
+   if(p_buffer->ion_fd < 0) {
+    LOGE("Ion open failed");
+    goto ION_ALLOC_FAILED;
+  }
+
+  /* Make it page size aligned */
+  p_buffer->alloc.len = (p_buffer->alloc.len + 4095U) & (~4095U);
+  lrc = ioctl(p_buffer->ion_fd, ION_IOC_ALLOC, &p_buffer->alloc);
+  if (lrc < 0) {
+    LOGE("ION allocation failed len %zu",
+      p_buffer->alloc.len);
+    goto ION_ALLOC_FAILED;
+  }
+
+  p_buffer->ion_info_fd.handle = p_buffer->alloc.handle;
+  lrc = ioctl(p_buffer->ion_fd, ION_IOC_SHARE,
+    &p_buffer->ion_info_fd);
+  if (lrc < 0) {
+    LOGE("ION map failed %s", strerror(errno));
+    goto ION_MAP_FAILED;
+  }
+
+  p_buffer->p_pmem_fd = p_buffer->ion_info_fd.fd;
+
+  l_buffer = mmap(NULL, p_buffer->alloc.len, PROT_READ  | PROT_WRITE,
+    MAP_SHARED,p_buffer->p_pmem_fd, 0);
+
+  if (l_buffer == MAP_FAILED) {
+    LOGE("ION_MMAP_FAILED: %s (%d)",
+      strerror(errno), errno);
+    goto ION_MAP_FAILED;
+  }
+
+  return l_buffer;
+
+ION_MAP_FAILED:
+  lhandle_data.handle = p_buffer->ion_info_fd.handle;
+  ioctl(p_buffer->ion_fd, ION_IOC_FREE, &lhandle_data);
+  return NULL;
+ION_ALLOC_FAILED:
+  return NULL;
+
+}
+
+/** buffer_deallocate:
+ *
+ *  Arguments:
+ *     @p_buffer: ION buffer
+ *
+ *  Return:
+ *     buffer address
+ *
+ *  Description:
+ *      deallocates ION buffer
+ *
+ **/
+int buffer_deallocate(buffer_t *p_buffer)
+{
+  int lrc = 0;
+  size_t lsize = (p_buffer->size + 4095U) & (~4095U);
+
+  struct ion_handle_data lhandle_data;
+  lrc = munmap(p_buffer->addr, lsize);
+
+  close(p_buffer->ion_info_fd.fd);
+
+  lhandle_data.handle = p_buffer->ion_info_fd.handle;
+  ioctl(p_buffer->ion_fd, ION_IOC_FREE, &lhandle_data);
+
+  close(p_buffer->ion_fd);
+  return lrc;
+}
+
+/** buffer_invalidate:
+ *
+ *  Arguments:
+ *     @p_buffer: ION buffer
+ *
+ *  Return:
+ *     error val
+ *
+ *  Description:
+ *      Invalidates the cached buffer
+ *
+ **/
+int buffer_invalidate(buffer_t *p_buffer)
+{
+  int lrc = 0;
+  struct ion_flush_data cache_inv_data;
+  struct ion_custom_data custom_data;
+
+  memset(&cache_inv_data, 0, sizeof(cache_inv_data));
+  memset(&custom_data, 0, sizeof(custom_data));
+  cache_inv_data.vaddr = p_buffer->addr;
+  cache_inv_data.fd = p_buffer->ion_info_fd.fd;
+  cache_inv_data.handle = p_buffer->ion_info_fd.handle;
+  cache_inv_data.length = (unsigned int)p_buffer->size;
+  custom_data.cmd = (unsigned int)ION_IOC_INV_CACHES;
+  custom_data.arg = (unsigned long)&cache_inv_data;
+
+  lrc = ioctl(p_buffer->ion_fd, ION_IOC_CUSTOM, &custom_data);
+  if (lrc < 0)
+    LOGW("Cache Invalidate failed: %s\n", strerror(errno));
+
+  return lrc;
+}
+
+/** buffer_clean:
+ *
+ *  Arguments:
+ *     @p_buffer: ION buffer
+ *
+ *  Return:
+ *     error val
+ *
+ *  Description:
+ *      Clean the cached buffer
+ *
+ **/
+int buffer_clean(buffer_t *p_buffer)
+{
+  int lrc = 0;
+  struct ion_flush_data cache_clean_data;
+  struct ion_custom_data custom_data;
+
+  memset(&cache_clean_data, 0, sizeof(cache_clean_data));
+  memset(&custom_data, 0, sizeof(custom_data));
+  cache_clean_data.vaddr = p_buffer->addr;
+  cache_clean_data.fd = p_buffer->ion_info_fd.fd;
+  cache_clean_data.handle = p_buffer->ion_info_fd.handle;
+  cache_clean_data.length = (unsigned int)p_buffer->size;
+  custom_data.cmd = (unsigned int)ION_IOC_CLEAN_CACHES;
+  custom_data.arg = (unsigned long)&cache_clean_data;
+
+  lrc = ioctl(p_buffer->ion_fd, ION_IOC_CUSTOM, &custom_data);
+  if (lrc < 0)
+    LOGW("Cache clean failed: %s\n", strerror(errno));
+
+  return lrc;
+}
diff --git a/msmcobalt/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_mpo_composer.c b/msmcobalt/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_mpo_composer.c
new file mode 100644
index 0000000..fb9c222
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_mpo_composer.c
@@ -0,0 +1,414 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+
+// System dependencies
+#include <pthread.h>
+
+// JPEG dependencies
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg_mpo.h"
+
+#define M_APP0    0xe0
+#define M_APP1    0xe1
+#define M_APP2    0xe2
+#define M_EOI     0xd9
+#define M_SOI     0xd8
+
+/** READ_LONG:
+ *  @b: Buffer start addr
+ *  @o: Buffer offset to start reading
+ *
+ *  Read long value from the specified buff addr at given offset
+ **/
+#define READ_LONG(b, o)  \
+  (uint32_t)(((uint32_t)b[o] << 24) + \
+  ((uint32_t)b[o+1] << 16) + \
+  ((uint32_t)b[o+2] << 8) + \
+  ((uint32_t)b[o+3]))
+
+/** READ_LONG_LITTLE:
+ *  @b: Buffer start addr
+ *  @o: Buffer offset to start reading
+ *
+ *  Read long value from the specified buff addr at given offset
+ *  in Little Endian
+ **/
+#define READ_LONG_LITTLE(b, o)  \
+  (uint32_t)(((uint32_t)b[o + 3] << 24) + \
+  ((uint32_t) b[o + 2] << 16) + \
+  ((uint32_t) b[o + 1] << 8) + \
+  (uint32_t) b[o]);
+
+/** READ_LONG:
+ *  @b: Buffer start addr
+ *  @o: Buffer offset to start reading
+ *
+ *  Read short value from the specified buff addr at given
+ *  offset
+ **/
+#define READ_SHORT(b, o)  \
+  (uint16_t) (((uint16_t)b[o] << 8) + \
+  (uint16_t) b[o + 1]);
+
+/*Mutex to serializa MPO composition*/
+static pthread_mutex_t g_mpo_lock = PTHREAD_MUTEX_INITIALIZER;
+
+/** mm_jpeg_mpo_write_long_little_endian
+ *
+ *  Arguments:
+ *    @buffer_addr: image start addr
+ *    @buff_offset: offset in the buffer
+ *    @buffer_size: Size of the buffer
+ *    @value: Value to write
+ *    @overflow : Overflow flag
+ *
+ *  Return:
+ *       None
+ *
+ *  Description:
+ *       Write value at the given offset
+ *
+ **/
+void mm_jpeg_mpo_write_long_little_endian(uint8_t *buff_addr, uint32_t buff_offset,
+  uint32_t buffer_size, int value, uint8_t *overflow)
+{
+  if (buff_offset + 3 >= buffer_size) {
+    *overflow = TRUE;
+  }
+
+  if (!(*overflow)) {
+    buff_addr[buff_offset + 3] = (uint8_t)((value >> 24) & 0xFF);
+    buff_addr[buff_offset + 2] = (uint8_t)((value >> 16) & 0xFF);
+    buff_addr[buff_offset + 1] = (uint8_t)((value >> 8) & 0xFF);
+    buff_addr[buff_offset] = (uint8_t)(value & 0xFF);
+  }
+}
+
+/** mm_jpeg_mpo_write_long
+ *
+ *  Arguments:
+ *    @buffer_addr: image start addr
+ *    @buff_offset: offset in the buffer
+ *    @buffer_size: Size of the buffer
+ *    @value: Value to write
+ *    @overflow : Overflow flag
+ *
+ *  Return:
+ *       None
+ *
+ *  Description:
+ *       Write value at the given offset
+ *
+ **/
+void mm_jpeg_mpo_write_long(uint8_t *buff_addr, uint32_t buff_offset,
+  uint32_t buffer_size, int value, uint8_t *overflow)
+{
+  if ((buff_offset + 3) >= buffer_size) {
+    *overflow = TRUE;
+  }
+
+  if (!(*overflow)) {
+    buff_addr[buff_offset] = (uint8_t)((value >> 24) & 0xFF);
+    buff_addr[buff_offset+1] = (uint8_t)((value >> 16) & 0xFF);
+    buff_addr[buff_offset+2] = (uint8_t)((value >> 8) & 0xFF);
+    buff_addr[buff_offset+3] = (uint8_t)(value & 0xFF);
+  }
+}
+
+/** mm_jpeg_mpo_get_app_marker
+ *
+ *  Arguments:
+ *    @buffer_addr: Jpeg image start addr
+ *    @buffer_size: Size of the Buffer
+ *    @app_marker: app_marker to find
+ *
+ *  Return:
+ *       Start offset of the specified app marker
+ *
+ *  Description:
+ *       Gets the start offset of the given app marker
+ *
+ **/
+uint8_t *mm_jpeg_mpo_get_app_marker(uint8_t *buffer_addr, int buffer_size,
+  int app_marker)
+{
+  int32_t byte;
+  uint8_t *p_current_addr = NULL, *p_start_offset = NULL;
+  uint16_t app_marker_size = 0;
+
+  p_current_addr = buffer_addr;
+  do {
+    do {
+      byte = *(p_current_addr);
+      p_current_addr++;
+    }
+    while ((byte != 0xFF) &&
+      (p_current_addr < (buffer_addr + (buffer_size - 1))));
+
+    //If 0xFF is not found at all, break
+    if (byte != 0xFF) {
+      LOGD("0xFF not found");
+      break;
+    }
+
+    //Read the next byte after 0xFF
+    byte = *(p_current_addr);
+    LOGD("Byte %x", byte);
+    if (byte == app_marker) {
+      LOGD("Byte %x", byte);
+      p_start_offset = ++p_current_addr;
+      break;
+    } else if (byte != M_SOI) {
+      app_marker_size = READ_SHORT(p_current_addr, 1);
+      LOGD("size %d", app_marker_size);
+      p_current_addr += app_marker_size;
+    }
+  }
+  while ((byte != M_EOI) &&
+    (p_current_addr < (buffer_addr + (buffer_size - 1))));
+
+  return p_start_offset;
+}
+
+/** mm_jpeg_mpo_get_mp_header
+ *
+ *  Arguments:
+ *    @app2_marker: app2_marker start offset
+ *
+ *  Return:
+ *       Start offset of the MP header
+ *
+ *  Description:
+ *       Get the start offset of the MP header (before the MP
+ *       Endian field). All offsets in the MP header need to be
+ *       specified wrt this start offset.
+ *
+ **/
+uint8_t *mm_jpeg_mpo_get_mp_header(uint8_t *app2_start_offset)
+{
+  uint8_t *mp_headr_start_offset = NULL;
+
+  if (app2_start_offset != NULL) {
+    mp_headr_start_offset = app2_start_offset + MP_APP2_FIELD_LENGTH_BYTES +
+      MP_FORMAT_IDENTIFIER_BYTES;
+  }
+
+  return mp_headr_start_offset;
+}
+
+/** mm_jpeg_mpo_update_header
+ *
+ *  Arguments:
+ *    @mpo_info: MPO Info
+ *
+ *  Return:
+ *       0 - Success
+ *       -1 - otherwise
+ *
+ *  Description:
+ *      Update the MP Index IFD of the first image with info
+ *      about about all other images.
+ *
+ **/
+int mm_jpeg_mpo_update_header(mm_jpeg_mpo_info_t *mpo_info)
+{
+  uint8_t *app2_start_off_addr = NULL, *mp_headr_start_off_addr = NULL;
+  uint32_t mp_index_ifd_offset = 0, current_offset = 0, mp_entry_val_offset = 0;
+  uint8_t *aux_start_addr = NULL;
+  uint8_t overflow_flag = 0;
+  int i = 0, rc = -1;
+  uint32_t endianess = MPO_LITTLE_ENDIAN, offset_to_nxt_ifd = 8;
+  uint16_t ifd_tag_count = 0;
+
+  //Get the addr of the App Marker
+  app2_start_off_addr = mm_jpeg_mpo_get_app_marker(
+    mpo_info->output_buff.buf_vaddr, mpo_info->primary_image.buf_filled_len, M_APP2);
+  if (!app2_start_off_addr) {
+    LOGE("Cannot find App2 marker. MPO composition failed" );
+    return rc;
+  }
+  LOGD("app2_start_off_addr %p = %x",
+    app2_start_off_addr, *app2_start_off_addr);
+
+  //Get the addr of the MP Headr start offset.
+  //All offsets in the MP header are wrt to this addr
+  mp_headr_start_off_addr = mm_jpeg_mpo_get_mp_header(app2_start_off_addr);
+  if (!mp_headr_start_off_addr) {
+    LOGE("mp headr start offset is NULL. MPO composition failed" );
+    return rc;
+  }
+  LOGD("mp_headr_start_off_addr %x",
+    *mp_headr_start_off_addr);
+
+  current_offset = mp_headr_start_off_addr - mpo_info->output_buff.buf_vaddr;
+
+  endianess = READ_LONG(mpo_info->output_buff.buf_vaddr, current_offset);
+  LOGD("Endianess %d", endianess);
+
+  //Add offset to first ifd
+  current_offset += MP_ENDIAN_BYTES;
+
+  //Read the value to get MP Index IFD.
+  if (endianess == MPO_LITTLE_ENDIAN) {
+    offset_to_nxt_ifd = READ_LONG_LITTLE(mpo_info->output_buff.buf_vaddr,
+      current_offset);
+  } else {
+    offset_to_nxt_ifd = READ_LONG(mpo_info->output_buff.buf_vaddr,
+      current_offset);
+  }
+  LOGD("offset_to_nxt_ifd %d", offset_to_nxt_ifd);
+
+  current_offset = ((mp_headr_start_off_addr + offset_to_nxt_ifd) -
+    mpo_info->output_buff.buf_vaddr);
+  mp_index_ifd_offset = current_offset;
+  LOGD("mp_index_ifd_offset %d",
+    mp_index_ifd_offset);
+
+  //Traverse to MP Entry value
+  ifd_tag_count = READ_SHORT(mpo_info->output_buff.buf_vaddr, current_offset);
+  LOGD("Tag count in MP entry %d", ifd_tag_count);
+  current_offset += MP_INDEX_COUNT_BYTES;
+
+  /* Get MP Entry Value offset - Count * 12 (Each tag is 12 bytes)*/
+  current_offset += (ifd_tag_count * 12);
+  /*Add Offset to next IFD*/
+  current_offset += MP_INDEX_OFFSET_OF_NEXT_IFD_BYTES;
+
+  mp_entry_val_offset = current_offset;
+  LOGD("MP Entry value offset %d",
+    mp_entry_val_offset);
+
+  //Update image size for primary image
+  current_offset += MP_INDEX_ENTRY_INDIVIDUAL_IMAGE_ATTRIBUTE_BYTES;
+  if (endianess == MPO_LITTLE_ENDIAN) {
+    mm_jpeg_mpo_write_long_little_endian(mpo_info->output_buff.buf_vaddr,
+      current_offset, mpo_info->output_buff_size,
+      mpo_info->primary_image.buf_filled_len, &overflow_flag);
+  } else {
+    mm_jpeg_mpo_write_long(mpo_info->output_buff.buf_vaddr,
+      current_offset, mpo_info->output_buff_size,
+      mpo_info->primary_image.buf_filled_len, &overflow_flag);
+  }
+
+  aux_start_addr = mpo_info->output_buff.buf_vaddr +
+    mpo_info->primary_image.buf_filled_len;
+
+  for (i = 0; i < mpo_info->num_of_images - 1; i++) {
+    //Go to MP Entry val for each image
+    mp_entry_val_offset += MP_INDEX_ENTRY_VALUE_BYTES;
+    current_offset = mp_entry_val_offset;
+
+    //Update image size
+    current_offset += MP_INDEX_ENTRY_INDIVIDUAL_IMAGE_ATTRIBUTE_BYTES;
+    if (endianess == MPO_LITTLE_ENDIAN) {
+      mm_jpeg_mpo_write_long_little_endian(mpo_info->output_buff.buf_vaddr,
+        current_offset, mpo_info->output_buff_size,
+        mpo_info->aux_images[i].buf_filled_len, &overflow_flag);
+    } else {
+      mm_jpeg_mpo_write_long(mpo_info->output_buff.buf_vaddr,
+        current_offset, mpo_info->output_buff_size,
+        mpo_info->aux_images[i].buf_filled_len, &overflow_flag);
+    }
+    LOGD("aux[start_addr %x", *aux_start_addr);
+    //Update the offset
+    current_offset += MP_INDEX_ENTRY_INDIVIDUAL_IMAGE_SIZE_BYTES;
+    if (endianess == MPO_LITTLE_ENDIAN) {
+      mm_jpeg_mpo_write_long_little_endian(mpo_info->output_buff.buf_vaddr,
+        current_offset, mpo_info->output_buff_size,
+        aux_start_addr - mp_headr_start_off_addr, &overflow_flag);
+    } else {
+      mm_jpeg_mpo_write_long(mpo_info->output_buff.buf_vaddr,
+        current_offset, mpo_info->output_buff_size,
+        aux_start_addr - mp_headr_start_off_addr, &overflow_flag);
+    }
+    aux_start_addr += mpo_info->aux_images[i].buf_filled_len;
+  }
+  if (!overflow_flag) {
+    rc = 0;
+  }
+  return rc;
+}
+
+/** mm_jpeg_mpo_compose
+ *
+ *  Arguments:
+ *    @mpo_info: MPO Info
+ *
+ *  Return:
+ *       0 - Success
+ *      -1 - otherwise
+ *
+ *  Description:
+ *      Compose MPO image from multiple JPEG images
+ *
+ **/
+int mm_jpeg_mpo_compose(mm_jpeg_mpo_info_t *mpo_info)
+{
+  uint8_t *aux_write_offset = NULL;
+  int i = 0, rc = -1;
+
+  pthread_mutex_lock(&g_mpo_lock);
+
+  //Primary image needs to be copied to the o/p buffer if its not already
+  if (mpo_info->output_buff.buf_filled_len == 0) {
+    if (mpo_info->primary_image.buf_filled_len < mpo_info->output_buff_size) {
+      memcpy(mpo_info->output_buff.buf_vaddr, mpo_info->primary_image.buf_vaddr,
+        mpo_info->primary_image.buf_filled_len);
+      mpo_info->output_buff.buf_filled_len +=
+        mpo_info->primary_image.buf_filled_len;
+    } else {
+      LOGE("O/P buffer not large enough. MPO composition failed");
+      pthread_mutex_unlock(&g_mpo_lock);
+      return rc;
+    }
+  }
+  //Append each Aux image to the buffer
+  for (i = 0; i < mpo_info->num_of_images - 1; i++) {
+    if ((mpo_info->output_buff.buf_filled_len +
+      mpo_info->aux_images[i].buf_filled_len) <= mpo_info->output_buff_size) {
+      aux_write_offset = mpo_info->output_buff.buf_vaddr +
+        mpo_info->output_buff.buf_filled_len;
+      memcpy(aux_write_offset, mpo_info->aux_images[i].buf_vaddr,
+        mpo_info->aux_images[i].buf_filled_len);
+      mpo_info->output_buff.buf_filled_len +=
+        mpo_info->aux_images[i].buf_filled_len;
+    } else {
+      LOGE("O/P buffer not large enough. MPO composition failed");
+      pthread_mutex_unlock(&g_mpo_lock);
+      return rc;
+    }
+  }
+
+  rc = mm_jpeg_mpo_update_header(mpo_info);
+  pthread_mutex_unlock(&g_mpo_lock);
+
+  return rc;
+}
diff --git a/msmcobalt/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_queue.c b/msmcobalt/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_queue.c
new file mode 100644
index 0000000..2aeb78f
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_queue.c
@@ -0,0 +1,186 @@
+/* Copyright (c) 2012-2014, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <pthread.h>
+
+// JPEG dependencies
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg.h"
+
+int32_t mm_jpeg_queue_init(mm_jpeg_queue_t* queue)
+{
+    pthread_mutex_init(&queue->lock, NULL);
+    cam_list_init(&queue->head.list);
+    queue->size = 0;
+    return 0;
+}
+
+int32_t mm_jpeg_queue_enq(mm_jpeg_queue_t* queue, mm_jpeg_q_data_t data)
+{
+    mm_jpeg_q_node_t* node =
+        (mm_jpeg_q_node_t *)malloc(sizeof(mm_jpeg_q_node_t));
+    if (NULL == node) {
+        LOGE("No memory for mm_jpeg_q_node_t");
+        return -1;
+    }
+
+    memset(node, 0, sizeof(mm_jpeg_q_node_t));
+    node->data = data;
+
+    pthread_mutex_lock(&queue->lock);
+    cam_list_add_tail_node(&node->list, &queue->head.list);
+    queue->size++;
+    pthread_mutex_unlock(&queue->lock);
+
+    return 0;
+
+}
+
+int32_t mm_jpeg_queue_enq_head(mm_jpeg_queue_t* queue, mm_jpeg_q_data_t data)
+{
+  struct cam_list *head = NULL;
+  struct cam_list *pos = NULL;
+  mm_jpeg_q_node_t* node =
+        (mm_jpeg_q_node_t *)malloc(sizeof(mm_jpeg_q_node_t));
+    if (NULL == node) {
+        LOGE("No memory for mm_jpeg_q_node_t");
+        return -1;
+    }
+
+    memset(node, 0, sizeof(mm_jpeg_q_node_t));
+    node->data = data;
+
+    head = &queue->head.list;
+    pos = head->next;
+
+    pthread_mutex_lock(&queue->lock);
+    cam_list_insert_before_node(&node->list, pos);
+    queue->size++;
+    pthread_mutex_unlock(&queue->lock);
+
+    return 0;
+}
+
+mm_jpeg_q_data_t mm_jpeg_queue_deq(mm_jpeg_queue_t* queue)
+{
+    mm_jpeg_q_data_t data;
+    mm_jpeg_q_node_t* node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    memset(&data, 0, sizeof(data));
+
+    pthread_mutex_lock(&queue->lock);
+    head = &queue->head.list;
+    pos = head->next;
+    if (pos != head) {
+        node = member_of(pos, mm_jpeg_q_node_t, list);
+        cam_list_del_node(&node->list);
+        queue->size--;
+    }
+    pthread_mutex_unlock(&queue->lock);
+
+    if (NULL != node) {
+        data = node->data;
+        free(node);
+    }
+
+    return data;
+}
+
+uint32_t mm_jpeg_queue_get_size(mm_jpeg_queue_t* queue)
+{
+    uint32_t size = 0;
+
+    pthread_mutex_lock(&queue->lock);
+    size = queue->size;
+    pthread_mutex_unlock(&queue->lock);
+
+    return size;
+
+}
+
+int32_t mm_jpeg_queue_deinit(mm_jpeg_queue_t* queue)
+{
+    mm_jpeg_queue_flush(queue);
+    pthread_mutex_destroy(&queue->lock);
+    return 0;
+}
+
+int32_t mm_jpeg_queue_flush(mm_jpeg_queue_t* queue)
+{
+    mm_jpeg_q_node_t* node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    pthread_mutex_lock(&queue->lock);
+    head = &queue->head.list;
+    pos = head->next;
+
+    while(pos != head) {
+        node = member_of(pos, mm_jpeg_q_node_t, list);
+        cam_list_del_node(&node->list);
+        queue->size--;
+
+        /* for now we only assume there is no ptr inside data
+         * so we free data directly */
+        if (NULL != node->data.p) {
+            free(node->data.p);
+        }
+        free(node);
+        pos = pos->next;
+    }
+    queue->size = 0;
+    pthread_mutex_unlock(&queue->lock);
+    return 0;
+}
+
+mm_jpeg_q_data_t mm_jpeg_queue_peek(mm_jpeg_queue_t* queue)
+{
+    mm_jpeg_q_data_t data;
+    mm_jpeg_q_node_t* node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    memset(&data, 0, sizeof(data));
+
+    pthread_mutex_lock(&queue->lock);
+    head = &queue->head.list;
+    pos = head->next;
+    if (pos != head) {
+        node = member_of(pos, mm_jpeg_q_node_t, list);
+    }
+    pthread_mutex_unlock(&queue->lock);
+
+    if (NULL != node) {
+        data = node->data;
+    }
+    return data;
+}
diff --git a/msmcobalt/QCamera2/stack/mm-jpeg-interface/src/mm_jpegdec.c b/msmcobalt/QCamera2/stack/mm-jpeg-interface/src/mm_jpegdec.c
new file mode 100644
index 0000000..b4ee1dc
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-jpeg-interface/src/mm_jpegdec.c
@@ -0,0 +1,1185 @@
+/* Copyright (c) 2013-2014, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <pthread.h>
+
+// JPEG dependencies
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg_interface.h"
+#include "mm_jpeg.h"
+#include "mm_jpeg_inlines.h"
+
+OMX_ERRORTYPE mm_jpegdec_ebd(OMX_HANDLETYPE hComponent,
+  OMX_PTR pAppData,
+  OMX_BUFFERHEADERTYPE *pBuffer);
+OMX_ERRORTYPE mm_jpegdec_fbd(OMX_HANDLETYPE hComponent,
+    OMX_PTR pAppData,
+    OMX_BUFFERHEADERTYPE* pBuffer);
+OMX_ERRORTYPE mm_jpegdec_event_handler(OMX_HANDLETYPE hComponent,
+    OMX_PTR pAppData,
+    OMX_EVENTTYPE eEvent,
+    OMX_U32 nData1,
+    OMX_U32 nData2,
+    OMX_PTR pEventData);
+
+
+/** mm_jpegdec_destroy_job
+ *
+ *  Arguments:
+ *    @p_session: Session obj
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Destroy the job based paramenters
+ *
+ **/
+static int32_t mm_jpegdec_destroy_job(mm_jpeg_job_session_t *p_session)
+{
+  int32_t rc = 0;
+
+  return rc;
+}
+
+/** mm_jpeg_job_done:
+ *
+ *  Arguments:
+ *    @p_session: decode session
+ *
+ *  Return:
+ *       OMX_ERRORTYPE
+ *
+ *  Description:
+ *       Finalize the job
+ *
+ **/
+static void mm_jpegdec_job_done(mm_jpeg_job_session_t *p_session)
+{
+  mm_jpeg_obj *my_obj = (mm_jpeg_obj *)p_session->jpeg_obj;
+  mm_jpeg_job_q_node_t *node = NULL;
+
+  /*Destroy job related params*/
+  mm_jpegdec_destroy_job(p_session);
+
+  /*remove the job*/
+  node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->ongoing_job_q,
+    p_session->jobId);
+  if (node) {
+    free(node);
+  }
+  p_session->encoding = OMX_FALSE;
+
+  /* wake up jobMgr thread to work on new job if there is any */
+  cam_sem_post(&my_obj->job_mgr.job_sem);
+}
+
+
+/** mm_jpegdec_session_send_buffers:
+ *
+ *  Arguments:
+ *    @data: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Send the buffers to OMX layer
+ *
+ **/
+OMX_ERRORTYPE mm_jpegdec_session_send_buffers(void *data)
+{
+  uint32_t i = 0;
+  mm_jpeg_job_session_t* p_session = (mm_jpeg_job_session_t *)data;
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  QOMX_BUFFER_INFO lbuffer_info;
+  mm_jpeg_decode_params_t *p_params = &p_session->dec_params;
+
+  memset(&lbuffer_info, 0x0, sizeof(QOMX_BUFFER_INFO));
+  for (i = 0; i < p_params->num_src_bufs; i++) {
+    LOGD("Source buffer %d", i);
+    lbuffer_info.fd = (OMX_U32)p_params->src_main_buf[i].fd;
+    ret = OMX_UseBuffer(p_session->omx_handle, &(p_session->p_in_omx_buf[i]), 0,
+      &lbuffer_info, p_params->src_main_buf[i].buf_size,
+      p_params->src_main_buf[i].buf_vaddr);
+    if (ret) {
+      LOGE("Error %d", ret);
+      return ret;
+    }
+  }
+
+  LOGD("Exit");
+  return ret;
+}
+
+/** mm_jpeg_session_free_buffers:
+ *
+ *  Arguments:
+ *    @data: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Free the buffers from OMX layer
+ *
+ **/
+OMX_ERRORTYPE mm_jpegdec_session_free_buffers(void *data)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  uint32_t i = 0;
+  mm_jpeg_job_session_t* p_session = (mm_jpeg_job_session_t *)data;
+  mm_jpeg_decode_params_t *p_params = &p_session->dec_params;
+
+  for (i = 0; i < p_params->num_src_bufs; i++) {
+    LOGD("Source buffer %d", i);
+    ret = OMX_FreeBuffer(p_session->omx_handle, 0, p_session->p_in_omx_buf[i]);
+    if (ret) {
+      LOGE("Error %d", ret);
+      return ret;
+    }
+  }
+
+  for (i = 0; i < p_params->num_dst_bufs; i++) {
+    LOGD("Dest buffer %d", i);
+    ret = OMX_FreeBuffer(p_session->omx_handle, 1, p_session->p_out_omx_buf[i]);
+    if (ret) {
+      LOGE("Error");
+      return ret;
+    }
+  }
+  LOGD("Exit");
+  return ret;
+}
+
+/** mm_jpegdec_session_create:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error types
+ *
+ *  Description:
+ *       Create a jpeg encode session
+ *
+ **/
+OMX_ERRORTYPE mm_jpegdec_session_create(mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+
+  pthread_mutex_init(&p_session->lock, NULL);
+  pthread_cond_init(&p_session->cond, NULL);
+  cirq_reset(&p_session->cb_q);
+  p_session->state_change_pending = OMX_FALSE;
+  p_session->abort_state = MM_JPEG_ABORT_NONE;
+  p_session->error_flag = OMX_ErrorNone;
+  p_session->ebd_count = 0;
+  p_session->fbd_count = 0;
+  p_session->encode_pid = -1;
+  p_session->config = OMX_FALSE;
+
+  p_session->omx_callbacks.EmptyBufferDone = mm_jpegdec_ebd;
+  p_session->omx_callbacks.FillBufferDone = mm_jpegdec_fbd;
+  p_session->omx_callbacks.EventHandler = mm_jpegdec_event_handler;
+  p_session->exif_count_local = 0;
+
+  rc = OMX_GetHandle(&p_session->omx_handle,
+    "OMX.qcom.image.jpeg.decoder",
+    (void *)p_session,
+    &p_session->omx_callbacks);
+
+  if (OMX_ErrorNone != rc) {
+    LOGE("OMX_GetHandle failed (%d)", rc);
+    return rc;
+  }
+  return rc;
+}
+
+/** mm_jpegdec_session_destroy:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       none
+ *
+ *  Description:
+ *       Destroy a jpeg encode session
+ *
+ **/
+void mm_jpegdec_session_destroy(mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+
+  LOGD("E");
+  if (NULL == p_session->omx_handle) {
+    LOGE("invalid handle");
+    return;
+  }
+
+  rc = mm_jpeg_session_change_state(p_session, OMX_StateIdle, NULL);
+  if (rc) {
+    LOGE("Error");
+  }
+
+  rc = mm_jpeg_session_change_state(p_session, OMX_StateLoaded,
+    mm_jpegdec_session_free_buffers);
+  if (rc) {
+    LOGE("Error");
+  }
+
+  rc = OMX_FreeHandle(p_session->omx_handle);
+  if (0 != rc) {
+    LOGE("OMX_FreeHandle failed (%d)", rc);
+  }
+  p_session->omx_handle = NULL;
+
+
+  pthread_mutex_destroy(&p_session->lock);
+  pthread_cond_destroy(&p_session->cond);
+  LOGD("X");
+}
+
+/** mm_jpeg_session_config_port:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure OMX ports
+ *
+ **/
+OMX_ERRORTYPE mm_jpegdec_session_config_ports(mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_decode_params_t *p_params = &p_session->dec_params;
+  mm_jpeg_decode_job_t *p_jobparams = &p_session->decode_job;
+
+  mm_jpeg_buf_t *p_src_buf =
+    &p_params->src_main_buf[p_jobparams->src_index];
+
+  p_session->inputPort.nPortIndex = 0;
+  p_session->outputPort.nPortIndex = 1;
+
+
+  ret = OMX_GetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+    &p_session->inputPort);
+  if (ret) {
+    LOGE("failed");
+    return ret;
+  }
+
+  ret = OMX_GetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+    &p_session->outputPort);
+  if (ret) {
+    LOGE("failed");
+    return ret;
+  }
+
+  p_session->inputPort.format.image.nFrameWidth =
+    (OMX_U32)p_jobparams->main_dim.src_dim.width;
+  p_session->inputPort.format.image.nFrameHeight =
+    (OMX_U32)p_jobparams->main_dim.src_dim.height;
+  p_session->inputPort.format.image.nStride =
+    p_src_buf->offset.mp[0].stride;
+  p_session->inputPort.format.image.nSliceHeight =
+    (OMX_U32)p_src_buf->offset.mp[0].scanline;
+  p_session->inputPort.format.image.eColorFormat =
+    map_jpeg_format(p_params->color_format);
+  p_session->inputPort.nBufferSize =
+    p_params->src_main_buf[p_jobparams->src_index].buf_size;
+  p_session->inputPort.nBufferCountActual = (OMX_U32)p_params->num_src_bufs;
+  ret = OMX_SetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+    &p_session->inputPort);
+  if (ret) {
+    LOGE("failed");
+    return ret;
+  }
+
+  return ret;
+}
+
+
+/** mm_jpegdec_session_config_main:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure main image
+ *
+ **/
+OMX_ERRORTYPE mm_jpegdec_session_config_main(mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+
+  /* config port */
+  LOGD("config port");
+  rc = mm_jpegdec_session_config_ports(p_session);
+  if (OMX_ErrorNone != rc) {
+    LOGE("config port failed");
+    return rc;
+  }
+
+
+  /* TODO: config crop */
+
+  return rc;
+}
+
+/** mm_jpeg_session_configure:
+ *
+ *  Arguments:
+ *    @data: encode session
+ *
+ *  Return:
+ *       none
+ *
+ *  Description:
+ *       Configure the session
+ *
+ **/
+static OMX_ERRORTYPE mm_jpegdec_session_configure(mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+
+  LOGD("E ");
+
+  MM_JPEG_CHK_ABORT(p_session, ret, error);
+
+  /* config main img */
+  ret = mm_jpegdec_session_config_main(p_session);
+  if (OMX_ErrorNone != ret) {
+    LOGE("config main img failed");
+    goto error;
+  }
+
+  /* TODO: common config (if needed) */
+
+  ret = mm_jpeg_session_change_state(p_session, OMX_StateIdle,
+    mm_jpegdec_session_send_buffers);
+  if (ret) {
+    LOGE("change state to idle failed %d", ret);
+    goto error;
+  }
+
+  ret = mm_jpeg_session_change_state(p_session, OMX_StateExecuting,
+    NULL);
+  if (ret) {
+    LOGE("change state to executing failed %d", ret);
+    goto error;
+  }
+
+error:
+  LOGD("X ret %d", ret);
+  return ret;
+}
+
+static OMX_ERRORTYPE mm_jpeg_session_port_enable(
+    mm_jpeg_job_session_t *p_session,
+    OMX_U32 nPortIndex,
+    OMX_BOOL wait)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  OMX_EVENTTYPE lEvent;
+
+  pthread_mutex_lock(&p_session->lock);
+  p_session->event_pending = OMX_TRUE;
+  pthread_mutex_unlock(&p_session->lock);
+
+  ret = OMX_SendCommand(p_session->omx_handle, OMX_CommandPortEnable,
+      nPortIndex, NULL);
+
+  if (ret) {
+    LOGE("failed");
+    return ret;
+  }
+
+  if (wait == OMX_TRUE) {
+    // Wait for cmd complete
+    pthread_mutex_lock(&p_session->lock);
+    if (p_session->event_pending == OMX_TRUE) {
+      LOGD("before wait");
+      pthread_cond_wait(&p_session->cond, &p_session->lock);
+      lEvent = p_session->omxEvent;
+      LOGD("after wait");
+    }
+    lEvent = p_session->omxEvent;
+    pthread_mutex_unlock(&p_session->lock);
+
+    if (lEvent != OMX_EventCmdComplete) {
+      LOGD("Unexpected event %d",lEvent);
+      return OMX_ErrorUndefined;
+    }
+  }
+  return OMX_ErrorNone;
+}
+
+static OMX_ERRORTYPE mm_jpeg_session_port_disable(
+    mm_jpeg_job_session_t *p_session,
+    OMX_U32 nPortIndex,
+    OMX_BOOL wait)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  OMX_EVENTTYPE lEvent;
+
+  pthread_mutex_lock(&p_session->lock);
+  p_session->event_pending = OMX_TRUE;
+  pthread_mutex_unlock(&p_session->lock);
+
+  ret = OMX_SendCommand(p_session->omx_handle, OMX_CommandPortDisable,
+      nPortIndex, NULL);
+
+  if (ret) {
+    LOGE("failed");
+    return ret;
+  }
+  if (wait == OMX_TRUE) {
+    // Wait for cmd complete
+    pthread_mutex_lock(&p_session->lock);
+    if (p_session->event_pending == OMX_TRUE) {
+      LOGD("before wait");
+      pthread_cond_wait(&p_session->cond, &p_session->lock);
+
+      LOGD("after wait");
+    }
+    lEvent = p_session->omxEvent;
+    pthread_mutex_unlock(&p_session->lock);
+
+    if (lEvent != OMX_EventCmdComplete) {
+      LOGD("Unexpected event %d",lEvent);
+      return OMX_ErrorUndefined;
+    }
+  }
+  return OMX_ErrorNone;
+}
+
+
+/** mm_jpegdec_session_decode:
+ *
+ *  Arguments:
+ *    @p_session: encode session
+ *
+ *  Return:
+ *       OMX_ERRORTYPE
+ *
+ *  Description:
+ *       Start the encoding
+ *
+ **/
+static OMX_ERRORTYPE mm_jpegdec_session_decode(mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_decode_params_t *p_params = &p_session->dec_params;
+  mm_jpeg_decode_job_t *p_jobparams = &p_session->decode_job;
+  OMX_EVENTTYPE lEvent;
+  uint32_t i;
+  QOMX_BUFFER_INFO lbuffer_info;
+
+  pthread_mutex_lock(&p_session->lock);
+  p_session->abort_state = MM_JPEG_ABORT_NONE;
+  p_session->encoding = OMX_FALSE;
+  pthread_mutex_unlock(&p_session->lock);
+
+  if (OMX_FALSE == p_session->config) {
+    ret = mm_jpegdec_session_configure(p_session);
+    if (ret) {
+      LOGE("Error");
+      goto error;
+    }
+    p_session->config = OMX_TRUE;
+  }
+
+  pthread_mutex_lock(&p_session->lock);
+  p_session->encoding = OMX_TRUE;
+  pthread_mutex_unlock(&p_session->lock);
+
+  MM_JPEG_CHK_ABORT(p_session, ret, error);
+
+  p_session->event_pending = OMX_TRUE;
+
+  ret = OMX_EmptyThisBuffer(p_session->omx_handle,
+    p_session->p_in_omx_buf[p_jobparams->src_index]);
+  if (ret) {
+    LOGE("Error");
+    goto error;
+  }
+
+  // Wait for port settings changed
+  pthread_mutex_lock(&p_session->lock);
+  if (p_session->event_pending == OMX_TRUE) {
+    LOGD("before wait");
+    pthread_cond_wait(&p_session->cond, &p_session->lock);
+  }
+  lEvent = p_session->omxEvent;
+  LOGD("after wait");
+  pthread_mutex_unlock(&p_session->lock);
+
+  if (lEvent != OMX_EventPortSettingsChanged) {
+    LOGD("Unexpected event %d",lEvent);
+    goto error;
+  }
+
+  // Disable output port (wait)
+  mm_jpeg_session_port_disable(p_session,
+      p_session->outputPort.nPortIndex,
+      OMX_TRUE);
+
+  // Get port definition
+  ret = OMX_GetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+      &p_session->outputPort);
+  if (ret) {
+    LOGE("failed");
+    return ret;
+  }
+
+  // Set port definition
+  p_session->outputPort.format.image.nFrameWidth =
+    (OMX_U32)p_jobparams->main_dim.dst_dim.width;
+  p_session->outputPort.format.image.nFrameHeight =
+    (OMX_U32)p_jobparams->main_dim.dst_dim.height;
+  p_session->outputPort.format.image.eColorFormat =
+    map_jpeg_format(p_params->color_format);
+
+  p_session->outputPort.nBufferSize =
+     p_params->dest_buf[p_jobparams->dst_index].buf_size;
+   p_session->outputPort.nBufferCountActual = (OMX_U32)p_params->num_dst_bufs;
+
+   p_session->outputPort.format.image.nSliceHeight =
+       (OMX_U32)
+       p_params->dest_buf[p_jobparams->dst_index].offset.mp[0].scanline;
+   p_session->outputPort.format.image.nStride =
+       p_params->dest_buf[p_jobparams->dst_index].offset.mp[0].stride;
+
+   ret = OMX_SetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+     &p_session->outputPort);
+   if (ret) {
+     LOGE("failed");
+     return ret;
+   }
+
+  // Enable port (no wait)
+  mm_jpeg_session_port_enable(p_session,
+      p_session->outputPort.nPortIndex,
+      OMX_FALSE);
+
+  memset(&lbuffer_info, 0x0, sizeof(QOMX_BUFFER_INFO));
+  // Use buffers
+  for (i = 0; i < p_params->num_dst_bufs; i++) {
+    lbuffer_info.fd = (OMX_U32)p_params->dest_buf[i].fd;
+    LOGD("Dest buffer %d", (unsigned int)i);
+    ret = OMX_UseBuffer(p_session->omx_handle, &(p_session->p_out_omx_buf[i]),
+        1, &lbuffer_info, p_params->dest_buf[i].buf_size,
+        p_params->dest_buf[i].buf_vaddr);
+    if (ret) {
+      LOGE("Error");
+      return ret;
+    }
+  }
+
+  // Wait for port enable completion
+  pthread_mutex_lock(&p_session->lock);
+  if (p_session->event_pending == OMX_TRUE) {
+    LOGD("before wait");
+    pthread_cond_wait(&p_session->cond, &p_session->lock);
+    lEvent = p_session->omxEvent;
+    LOGD("after wait");
+  }
+  lEvent = p_session->omxEvent;
+  pthread_mutex_unlock(&p_session->lock);
+
+  if (lEvent != OMX_EventCmdComplete) {
+    LOGD("Unexpected event %d",lEvent);
+    goto error;
+  }
+
+  ret = OMX_FillThisBuffer(p_session->omx_handle,
+    p_session->p_out_omx_buf[p_jobparams->dst_index]);
+  if (ret) {
+    LOGE("Error");
+    goto error;
+  }
+
+  MM_JPEG_CHK_ABORT(p_session, ret, error);
+
+error:
+
+  LOGD("X ");
+  return ret;
+}
+
+/** mm_jpegdec_process_decoding_job:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg client
+ *    @job_node: job node
+ *
+ *  Return:
+ *       0 for success -1 otherwise
+ *
+ *  Description:
+ *       Start the encoding job
+ *
+ **/
+int32_t mm_jpegdec_process_decoding_job(mm_jpeg_obj *my_obj, mm_jpeg_job_q_node_t* job_node)
+{
+  mm_jpeg_q_data_t qdata;
+  int32_t rc = 0;
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_job_session_t *p_session = NULL;
+
+  /* check if valid session */
+  p_session = mm_jpeg_get_session(my_obj, job_node->dec_info.job_id);
+  if (NULL == p_session) {
+    LOGE("invalid job id %x",
+      job_node->dec_info.job_id);
+    return -1;
+  }
+
+  /* sent encode cmd to OMX, queue job into ongoing queue */
+  qdata.p = job_node;
+  rc = mm_jpeg_queue_enq(&my_obj->ongoing_job_q, qdata);
+  if (rc) {
+    LOGE("jpeg enqueue failed %d", ret);
+    goto error;
+  }
+
+  p_session->decode_job = job_node->dec_info.decode_job;
+  p_session->jobId = job_node->dec_info.job_id;
+  ret = mm_jpegdec_session_decode(p_session);
+  if (ret) {
+    LOGE("encode session failed");
+    goto error;
+  }
+
+  LOGD("Success X ");
+  return rc;
+
+error:
+
+  if ((OMX_ErrorNone != ret) &&
+    (NULL != p_session->dec_params.jpeg_cb)) {
+    p_session->job_status = JPEG_JOB_STATUS_ERROR;
+    LOGD("send jpeg error callback %d",
+      p_session->job_status);
+    p_session->dec_params.jpeg_cb(p_session->job_status,
+      p_session->client_hdl,
+      p_session->jobId,
+      NULL,
+      p_session->dec_params.userdata);
+  }
+
+  /*remove the job*/
+  mm_jpegdec_job_done(p_session);
+  LOGD("Error X ");
+
+  return rc;
+}
+
+/** mm_jpeg_start_decode_job:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_hdl: client handle
+ *    @job: pointer to encode job
+ *    @jobId: job id
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Start the encoding job
+ *
+ **/
+int32_t mm_jpegdec_start_decode_job(mm_jpeg_obj *my_obj,
+  mm_jpeg_job_t *job,
+  uint32_t *job_id)
+{
+  mm_jpeg_q_data_t qdata;
+  int32_t rc = -1;
+  uint8_t session_idx = 0;
+  uint8_t client_idx = 0;
+  mm_jpeg_job_q_node_t* node = NULL;
+  mm_jpeg_job_session_t *p_session = NULL;
+  mm_jpeg_decode_job_t *p_jobparams  = &job->decode_job;
+
+  *job_id = 0;
+
+  /* check if valid session */
+  session_idx = GET_SESSION_IDX(p_jobparams->session_id);
+  client_idx = GET_CLIENT_IDX(p_jobparams->session_id);
+  LOGD("session_idx %d client idx %d",
+    session_idx, client_idx);
+
+  if ((session_idx >= MM_JPEG_MAX_SESSION) ||
+    (client_idx >= MAX_JPEG_CLIENT_NUM)) {
+    LOGE("invalid session id %x",
+      job->decode_job.session_id);
+    return rc;
+  }
+
+  p_session = &my_obj->clnt_mgr[client_idx].session[session_idx];
+  if (OMX_FALSE == p_session->active) {
+    LOGE("session not active %x",
+      job->decode_job.session_id);
+    return rc;
+  }
+
+  if ((p_jobparams->src_index >= (int32_t)p_session->dec_params.num_src_bufs) ||
+    (p_jobparams->dst_index >= (int32_t)p_session->dec_params.num_dst_bufs)) {
+    LOGE("invalid buffer indices");
+    return rc;
+  }
+
+  /* enqueue new job into todo job queue */
+  node = (mm_jpeg_job_q_node_t *)malloc(sizeof(mm_jpeg_job_q_node_t));
+  if (NULL == node) {
+    LOGE("No memory for mm_jpeg_job_q_node_t");
+    return -1;
+  }
+
+  *job_id = job->decode_job.session_id |
+    ((p_session->job_hist++ % JOB_HIST_MAX) << 16);
+
+  memset(node, 0, sizeof(mm_jpeg_job_q_node_t));
+  node->dec_info.decode_job = job->decode_job;
+  node->dec_info.job_id = *job_id;
+  node->dec_info.client_handle = p_session->client_hdl;
+  node->type = MM_JPEG_CMD_TYPE_DECODE_JOB;
+
+  qdata.p = node;
+  rc = mm_jpeg_queue_enq(&my_obj->job_mgr.job_queue, qdata);
+  if (0 == rc) {
+    cam_sem_post(&my_obj->job_mgr.job_sem);
+  }
+
+  return rc;
+}
+
+/** mm_jpegdec_create_session:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_hdl: client handle
+ *    @p_params: pointer to encode params
+ *    @p_session_id: session id
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Start the encoding session
+ *
+ **/
+int32_t mm_jpegdec_create_session(mm_jpeg_obj *my_obj,
+  uint32_t client_hdl,
+  mm_jpeg_decode_params_t *p_params,
+  uint32_t* p_session_id)
+{
+  int32_t rc = 0;
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  uint8_t clnt_idx = 0;
+  int session_idx = -1;
+  mm_jpeg_job_session_t *p_session = NULL;
+  *p_session_id = 0;
+
+  /* validate the parameters */
+  if ((p_params->num_src_bufs > MM_JPEG_MAX_BUF)
+    || (p_params->num_dst_bufs > MM_JPEG_MAX_BUF)) {
+    LOGE("invalid num buffers");
+    return rc;
+  }
+
+  /* check if valid client */
+  clnt_idx = mm_jpeg_util_get_index_by_handler(client_hdl);
+  if (clnt_idx >= MAX_JPEG_CLIENT_NUM) {
+    LOGE("invalid client with handler (%d)", client_hdl);
+    return rc;
+  }
+
+  session_idx = mm_jpeg_get_new_session_idx(my_obj, clnt_idx, &p_session);
+  if (session_idx < 0) {
+    LOGE("invalid session id (%d)", session_idx);
+    return rc;
+  }
+
+  ret = mm_jpegdec_session_create(p_session);
+  if (OMX_ErrorNone != ret) {
+    p_session->active = OMX_FALSE;
+    LOGE("jpeg session create failed");
+    return rc;
+  }
+
+  *p_session_id = (JOB_ID_MAGICVAL << 24) |
+    ((unsigned)session_idx << 8) | clnt_idx;
+
+  /*copy the params*/
+  p_session->dec_params = *p_params;
+  p_session->client_hdl = client_hdl;
+  p_session->sessionId = *p_session_id;
+  p_session->jpeg_obj = (void*)my_obj; /* save a ptr to jpeg_obj */
+  LOGD("session id %x", *p_session_id);
+
+  return rc;
+}
+
+/** mm_jpegdec_destroy_session:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @session_id: session index
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Destroy the encoding session
+ *
+ **/
+int32_t mm_jpegdec_destroy_session(mm_jpeg_obj *my_obj,
+  mm_jpeg_job_session_t *p_session)
+{
+  int32_t rc = 0;
+  mm_jpeg_job_q_node_t *node = NULL;
+
+  if (NULL == p_session) {
+    LOGE("invalid session");
+    return rc;
+  }
+  uint32_t session_id = p_session->sessionId;
+  pthread_mutex_lock(&my_obj->job_lock);
+
+  /* abort job if in todo queue */
+  LOGD("abort todo jobs");
+  node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->job_mgr.job_queue, session_id);
+  while (NULL != node) {
+    free(node);
+    node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->job_mgr.job_queue, session_id);
+  }
+
+  /* abort job if in ongoing queue */
+  LOGD("abort ongoing jobs");
+  node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->ongoing_job_q, session_id);
+  while (NULL != node) {
+    free(node);
+    node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->ongoing_job_q, session_id);
+  }
+
+  /* abort the current session */
+  mm_jpeg_session_abort(p_session);
+  mm_jpegdec_session_destroy(p_session);
+  mm_jpeg_remove_session_idx(my_obj, session_id);
+  pthread_mutex_unlock(&my_obj->job_lock);
+
+  /* wake up jobMgr thread to work on new job if there is any */
+  cam_sem_post(&my_obj->job_mgr.job_sem);
+  LOGD("X");
+
+  return rc;
+}
+
+/** mm_jpegdec_destroy_session_by_id:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @session_id: session index
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Destroy the encoding session
+ *
+ **/
+int32_t mm_jpegdec_destroy_session_by_id(mm_jpeg_obj *my_obj, uint32_t session_id)
+{
+  int32_t rc = 0;
+  mm_jpeg_job_session_t *p_session = mm_jpeg_get_session(my_obj, session_id);
+
+  if (NULL == p_session) {
+    LOGE("session is not valid");
+    return rc;
+  }
+
+  return mm_jpegdec_destroy_session(my_obj, p_session);
+}
+
+
+
+OMX_ERRORTYPE mm_jpegdec_ebd(OMX_HANDLETYPE hComponent,
+  OMX_PTR pAppData,
+  OMX_BUFFERHEADERTYPE *pBuffer)
+{
+  mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *) pAppData;
+
+  LOGD("count %d ", p_session->ebd_count);
+  pthread_mutex_lock(&p_session->lock);
+  p_session->ebd_count++;
+  pthread_mutex_unlock(&p_session->lock);
+  return 0;
+}
+
+OMX_ERRORTYPE mm_jpegdec_fbd(OMX_HANDLETYPE hComponent,
+  OMX_PTR pAppData,
+  OMX_BUFFERHEADERTYPE *pBuffer)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *) pAppData;
+  mm_jpeg_output_t output_buf;
+
+  LOGD("count %d ", p_session->fbd_count);
+
+  pthread_mutex_lock(&p_session->lock);
+
+  if (MM_JPEG_ABORT_NONE != p_session->abort_state) {
+    pthread_mutex_unlock(&p_session->lock);
+    return ret;
+  }
+
+  p_session->fbd_count++;
+  if (NULL != p_session->dec_params.jpeg_cb) {
+    p_session->job_status = JPEG_JOB_STATUS_DONE;
+    output_buf.buf_filled_len = (uint32_t)pBuffer->nFilledLen;
+    output_buf.buf_vaddr = pBuffer->pBuffer;
+    output_buf.fd = -1;
+    LOGD("send jpeg callback %d",
+      p_session->job_status);
+    p_session->dec_params.jpeg_cb(p_session->job_status,
+      p_session->client_hdl,
+      p_session->jobId,
+      &output_buf,
+      p_session->dec_params.userdata);
+
+    /* remove from ready queue */
+    mm_jpegdec_job_done(p_session);
+  }
+  pthread_mutex_unlock(&p_session->lock);
+  LOGD("Exit");
+
+  return ret;
+}
+
+OMX_ERRORTYPE mm_jpegdec_event_handler(OMX_HANDLETYPE hComponent,
+  OMX_PTR pAppData,
+  OMX_EVENTTYPE eEvent,
+  OMX_U32 nData1,
+  OMX_U32 nData2,
+  OMX_PTR pEventData)
+{
+  mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *) pAppData;
+
+  LOGD("%d %d %d state %d", eEvent, (int)nData1,
+    (int)nData2, p_session->abort_state);
+
+  LOGD("AppData=%p ", pAppData);
+
+  pthread_mutex_lock(&p_session->lock);
+  p_session->omxEvent = eEvent;
+  if (MM_JPEG_ABORT_INIT == p_session->abort_state) {
+    p_session->abort_state = MM_JPEG_ABORT_DONE;
+    pthread_cond_signal(&p_session->cond);
+    pthread_mutex_unlock(&p_session->lock);
+    return OMX_ErrorNone;
+  }
+
+  if (eEvent == OMX_EventError) {
+    if (p_session->encoding == OMX_TRUE) {
+      LOGD("Error during encoding");
+
+      /* send jpeg callback */
+      if (NULL != p_session->dec_params.jpeg_cb) {
+        p_session->job_status = JPEG_JOB_STATUS_ERROR;
+        LOGD("send jpeg error callback %d",
+          p_session->job_status);
+        p_session->dec_params.jpeg_cb(p_session->job_status,
+          p_session->client_hdl,
+          p_session->jobId,
+          NULL,
+          p_session->dec_params.userdata);
+      }
+
+      /* remove from ready queue */
+      mm_jpegdec_job_done(p_session);
+    }
+    pthread_cond_signal(&p_session->cond);
+  } else if (eEvent == OMX_EventCmdComplete) {
+    p_session->state_change_pending = OMX_FALSE;
+    p_session->event_pending = OMX_FALSE;
+    pthread_cond_signal(&p_session->cond);
+  }  else if (eEvent == OMX_EventPortSettingsChanged) {
+    p_session->event_pending = OMX_FALSE;
+    pthread_cond_signal(&p_session->cond);
+  }
+
+  pthread_mutex_unlock(&p_session->lock);
+  LOGD("Exit");
+  return OMX_ErrorNone;
+}
+
+/** mm_jpegdec_abort_job:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_hdl: client handle
+ *    @jobId: job id
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Abort the encoding session
+ *
+ **/
+int32_t mm_jpegdec_abort_job(mm_jpeg_obj *my_obj,
+  uint32_t jobId)
+{
+  int32_t rc = -1;
+  mm_jpeg_job_q_node_t *node = NULL;
+  mm_jpeg_job_session_t *p_session = NULL;
+
+  LOGD("Enter");
+  pthread_mutex_lock(&my_obj->job_lock);
+
+  /* abort job if in todo queue */
+  node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->job_mgr.job_queue, jobId);
+  if (NULL != node) {
+    free(node);
+    goto abort_done;
+  }
+
+  /* abort job if in ongoing queue */
+  node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->ongoing_job_q, jobId);
+  if (NULL != node) {
+    /* find job that is OMX ongoing, ask OMX to abort the job */
+    p_session = mm_jpeg_get_session(my_obj, node->dec_info.job_id);
+    if (p_session) {
+      mm_jpeg_session_abort(p_session);
+    } else {
+      LOGE("Invalid job id 0x%x",
+        node->dec_info.job_id);
+    }
+    free(node);
+    goto abort_done;
+  }
+
+abort_done:
+  pthread_mutex_unlock(&my_obj->job_lock);
+
+  return rc;
+}
+/** mm_jpegdec_init:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Initializes the jpeg client
+ *
+ **/
+int32_t mm_jpegdec_init(mm_jpeg_obj *my_obj)
+{
+  int32_t rc = 0;
+
+  /* init locks */
+  pthread_mutex_init(&my_obj->job_lock, NULL);
+
+  /* init ongoing job queue */
+  rc = mm_jpeg_queue_init(&my_obj->ongoing_job_q);
+  if (0 != rc) {
+    LOGE("Error");
+    return -1;
+  }
+
+  /* init job semaphore and launch jobmgr thread */
+  LOGD("Launch jobmgr thread rc %d", rc);
+  rc = mm_jpeg_jobmgr_thread_launch(my_obj);
+  if (0 != rc) {
+    LOGE("Error");
+    return -1;
+  }
+
+  /* load OMX */
+  if (OMX_ErrorNone != OMX_Init()) {
+    /* roll back in error case */
+    LOGE("OMX_Init failed (%d)", rc);
+    mm_jpeg_jobmgr_thread_release(my_obj);
+    mm_jpeg_queue_deinit(&my_obj->ongoing_job_q);
+    pthread_mutex_destroy(&my_obj->job_lock);
+  }
+
+  return rc;
+}
+
+/** mm_jpegdec_deinit:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Deinits the jpeg client
+ *
+ **/
+int32_t mm_jpegdec_deinit(mm_jpeg_obj *my_obj)
+{
+  int32_t rc = 0;
+
+  /* release jobmgr thread */
+  rc = mm_jpeg_jobmgr_thread_release(my_obj);
+  if (0 != rc) {
+    LOGE("Error");
+  }
+
+  /* unload OMX engine */
+  OMX_Deinit();
+
+  /* deinit ongoing job and cb queue */
+  rc = mm_jpeg_queue_deinit(&my_obj->ongoing_job_q);
+  if (0 != rc) {
+    LOGE("Error");
+  }
+
+  /* destroy locks */
+  pthread_mutex_destroy(&my_obj->job_lock);
+
+  return rc;
+}
diff --git a/msmcobalt/QCamera2/stack/mm-jpeg-interface/src/mm_jpegdec_interface.c b/msmcobalt/QCamera2/stack/mm-jpeg-interface/src/mm_jpegdec_interface.c
new file mode 100644
index 0000000..df6656b
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-jpeg-interface/src/mm_jpegdec_interface.c
@@ -0,0 +1,301 @@
+/* Copyright (c) 2013-2014, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <pthread.h>
+
+// JPEG dependencies
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg_interface.h"
+#include "mm_jpeg.h"
+
+static pthread_mutex_t g_dec_intf_lock = PTHREAD_MUTEX_INITIALIZER;
+
+static mm_jpeg_obj* g_jpegdec_obj = NULL;
+
+/** mm_jpeg_intf_start_job:
+ *
+ *  Arguments:
+ *    @client_hdl: client handle
+ *    @job: jpeg job object
+ *    @jobId: job id
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       start the jpeg job
+ *
+ **/
+static int32_t mm_jpegdec_intf_start_job(mm_jpeg_job_t* job, uint32_t* job_id)
+{
+  int32_t rc = -1;
+
+  if (NULL == job ||
+    NULL == job_id) {
+    LOGE("invalid parameters for job or jobId");
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_dec_intf_lock);
+  if (NULL == g_jpegdec_obj) {
+    /* mm_jpeg obj not exists, return error */
+    LOGE("mm_jpeg is not opened yet");
+    pthread_mutex_unlock(&g_dec_intf_lock);
+    return rc;
+  }
+  rc = mm_jpegdec_start_decode_job(g_jpegdec_obj, job, job_id);
+  pthread_mutex_unlock(&g_dec_intf_lock);
+  return rc;
+}
+
+/** mm_jpeg_intf_create_session:
+ *
+ *  Arguments:
+ *    @client_hdl: client handle
+ *    @p_params: encode parameters
+ *    @p_session_id: session id
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       Create new jpeg session
+ *
+ **/
+static int32_t mm_jpegdec_intf_create_session(uint32_t client_hdl,
+    mm_jpeg_decode_params_t *p_params,
+    uint32_t *p_session_id)
+{
+  int32_t rc = -1;
+
+  if (0 == client_hdl || NULL == p_params || NULL == p_session_id) {
+    LOGE("invalid client_hdl or jobId");
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_dec_intf_lock);
+  if (NULL == g_jpegdec_obj) {
+    /* mm_jpeg obj not exists, return error */
+    LOGE("mm_jpeg is not opened yet");
+    pthread_mutex_unlock(&g_dec_intf_lock);
+    return rc;
+  }
+
+  rc = mm_jpegdec_create_session(g_jpegdec_obj, client_hdl, p_params, p_session_id);
+  pthread_mutex_unlock(&g_dec_intf_lock);
+  return rc;
+}
+
+/** mm_jpeg_intf_destroy_session:
+ *
+ *  Arguments:
+ *    @session_id: session id
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       Destroy jpeg session
+ *
+ **/
+static int32_t mm_jpegdec_intf_destroy_session(uint32_t session_id)
+{
+  int32_t rc = -1;
+
+  if (0 == session_id) {
+    LOGE("invalid client_hdl or jobId");
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_dec_intf_lock);
+  if (NULL == g_jpegdec_obj) {
+    /* mm_jpeg obj not exists, return error */
+    LOGE("mm_jpeg is not opened yet");
+    pthread_mutex_unlock(&g_dec_intf_lock);
+    return rc;
+  }
+
+  rc = mm_jpegdec_destroy_session_by_id(g_jpegdec_obj, session_id);
+  pthread_mutex_unlock(&g_dec_intf_lock);
+  return rc;
+}
+
+/** mm_jpegdec_intf_abort_job:
+ *
+ *  Arguments:
+ *    @jobId: job id
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       Abort the jpeg job
+ *
+ **/
+static int32_t mm_jpegdec_intf_abort_job(uint32_t job_id)
+{
+  int32_t rc = -1;
+
+  if (0 == job_id) {
+    LOGE("invalid jobId");
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_dec_intf_lock);
+  if (NULL == g_jpegdec_obj) {
+    /* mm_jpeg obj not exists, return error */
+    LOGE("mm_jpeg is not opened yet");
+    pthread_mutex_unlock(&g_dec_intf_lock);
+    return rc;
+  }
+
+  rc = mm_jpegdec_abort_job(g_jpegdec_obj, job_id);
+  pthread_mutex_unlock(&g_dec_intf_lock);
+  return rc;
+}
+
+/** mm_jpeg_intf_close:
+ *
+ *  Arguments:
+ *    @client_hdl: client handle
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       Close the jpeg job
+ *
+ **/
+static int32_t mm_jpegdec_intf_close(uint32_t client_hdl)
+{
+  int32_t rc = -1;
+
+  if (0 == client_hdl) {
+    LOGE("invalid client_hdl");
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_dec_intf_lock);
+  if (NULL == g_jpegdec_obj) {
+    /* mm_jpeg obj not exists, return error */
+    LOGE("mm_jpeg is not opened yet");
+    pthread_mutex_unlock(&g_dec_intf_lock);
+    return rc;
+  }
+
+  rc = mm_jpeg_close(g_jpegdec_obj, client_hdl);
+  g_jpegdec_obj->num_clients--;
+  if(0 == rc) {
+    if (0 == g_jpegdec_obj->num_clients) {
+      /* No client, close jpeg internally */
+      rc = mm_jpegdec_deinit(g_jpegdec_obj);
+      free(g_jpegdec_obj);
+      g_jpegdec_obj = NULL;
+    }
+  }
+
+  pthread_mutex_unlock(&g_dec_intf_lock);
+  return rc;
+}
+
+
+
+/** jpegdec_open:
+ *
+ *  Arguments:
+ *    @ops: ops table pointer
+ *
+ *  Return:
+ *       0 failure, success otherwise
+ *
+ *  Description:
+ *       Open a jpeg client
+ *
+ **/
+uint32_t jpegdec_open(mm_jpegdec_ops_t *ops)
+{
+  int32_t rc = 0;
+  uint32_t clnt_hdl = 0;
+  mm_jpeg_obj* jpeg_obj = NULL;
+
+  pthread_mutex_lock(&g_dec_intf_lock);
+  /* first time open */
+  if(NULL == g_jpegdec_obj) {
+    jpeg_obj = (mm_jpeg_obj *)malloc(sizeof(mm_jpeg_obj));
+    if(NULL == jpeg_obj) {
+      LOGE("no mem");
+      pthread_mutex_unlock(&g_dec_intf_lock);
+      return clnt_hdl;
+    }
+
+    /* initialize jpeg obj */
+    memset(jpeg_obj, 0, sizeof(mm_jpeg_obj));
+    rc = mm_jpegdec_init(jpeg_obj);
+    if(0 != rc) {
+      LOGE("mm_jpeg_init err = %d", rc);
+      free(jpeg_obj);
+      pthread_mutex_unlock(&g_dec_intf_lock);
+      return clnt_hdl;
+    }
+
+    /* remember in global variable */
+    g_jpegdec_obj = jpeg_obj;
+  }
+
+  /* open new client */
+  clnt_hdl = mm_jpeg_new_client(g_jpegdec_obj);
+  if (clnt_hdl > 0) {
+    /* valid client */
+    if (NULL != ops) {
+      /* fill in ops tbl if ptr not NULL */
+      ops->start_job = mm_jpegdec_intf_start_job;
+      ops->abort_job = mm_jpegdec_intf_abort_job;
+      ops->create_session = mm_jpegdec_intf_create_session;
+      ops->destroy_session = mm_jpegdec_intf_destroy_session;
+      ops->close = mm_jpegdec_intf_close;
+    }
+  } else {
+    /* failed new client */
+    LOGE("mm_jpeg_new_client failed");
+
+    if (0 == g_jpegdec_obj->num_clients) {
+      /* no client, close jpeg */
+      mm_jpegdec_deinit(g_jpegdec_obj);
+      free(g_jpegdec_obj);
+      g_jpegdec_obj = NULL;
+    }
+  }
+
+  pthread_mutex_unlock(&g_dec_intf_lock);
+  return clnt_hdl;
+}
+
+
+
diff --git a/msmcobalt/QCamera2/stack/mm-jpeg-interface/test/Android.mk b/msmcobalt/QCamera2/stack/mm-jpeg-interface/test/Android.mk
new file mode 100644
index 0000000..a15c158
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-jpeg-interface/test/Android.mk
@@ -0,0 +1,87 @@
+#encoder int test
+OLD_LOCAL_PATH := $(LOCAL_PATH)
+MM_JPEG_TEST_PATH := $(call my-dir)
+
+include $(LOCAL_PATH)/../../common.mk
+include $(CLEAR_VARS)
+LOCAL_PATH := $(MM_JPEG_TEST_PATH)
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_CFLAGS := -DCAMERA_ION_HEAP_ID=ION_IOMMU_HEAP_ID
+LOCAL_CFLAGS += -Wall -Wextra -Werror -Wno-unused-parameter
+LOCAL_CFLAGS += -D_ANDROID_
+
+ifeq ($(strip $(TARGET_USES_ION)),true)
+LOCAL_CFLAGS += -DUSE_ION
+endif
+
+# System header file path prefix
+LOCAL_CFLAGS += -DSYSTEM_HEADER_PREFIX=sys
+
+OMX_HEADER_DIR := frameworks/native/include/media/openmax
+OMX_CORE_DIR := $(MM_JPEG_TEST_PATH)/../../../../mm-image-codec
+
+LOCAL_C_INCLUDES := $(MM_JPEG_TEST_PATH)
+LOCAL_C_INCLUDES += $(MM_JPEG_TEST_PATH)/../inc
+LOCAL_C_INCLUDES += $(MM_JPEG_TEST_PATH)/../../common
+LOCAL_C_INCLUDES += $(MM_JPEG_TEST_PATH)/../../mm-camera-interface/inc
+LOCAL_C_INCLUDES += $(OMX_HEADER_DIR)
+LOCAL_C_INCLUDES += $(OMX_CORE_DIR)/qexif
+LOCAL_C_INCLUDES += $(OMX_CORE_DIR)/qomx_core
+
+LOCAL_C_INCLUDES+= $(kernel_includes)
+LOCAL_ADDITIONAL_DEPENDENCIES := $(common_deps)
+
+LOCAL_SRC_FILES := mm_jpeg_test.c
+
+LOCAL_32_BIT_ONLY := $(BOARD_QTI_CAMERA_32BIT_ONLY)
+LOCAL_MODULE           := mm-jpeg-interface-test
+LOCAL_PRELINK_MODULE   := false
+LOCAL_SHARED_LIBRARIES := libcutils libdl libmmjpeg_interface
+
+include $(BUILD_EXECUTABLE)
+
+
+
+#decoder int test
+
+include $(CLEAR_VARS)
+LOCAL_PATH := $(MM_JPEG_TEST_PATH)
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_CFLAGS := -DCAMERA_ION_HEAP_ID=ION_IOMMU_HEAP_ID
+LOCAL_CFLAGS += -Wall -Wextra -Werror -Wno-unused-parameter
+
+LOCAL_CFLAGS += -D_ANDROID_
+
+ifeq ($(strip $(TARGET_USES_ION)),true)
+LOCAL_CFLAGS += -DUSE_ION
+endif
+
+# System header file path prefix
+LOCAL_CFLAGS += -DSYSTEM_HEADER_PREFIX=sys
+
+OMX_HEADER_DIR := frameworks/native/include/media/openmax
+OMX_CORE_DIR := $(MM_JPEG_TEST_PATH)/../../../../mm-image-codec
+
+LOCAL_C_INCLUDES := $(MM_JPEG_TEST_PATH)
+LOCAL_C_INCLUDES += $(MM_JPEG_TEST_PATH)/../inc
+LOCAL_C_INCLUDES += $(MM_JPEG_TEST_PATH)/../../common
+LOCAL_C_INCLUDES += $(MM_JPEG_TEST_PATH)/../../mm-camera-interface/inc
+LOCAL_C_INCLUDES += $(OMX_HEADER_DIR)
+LOCAL_C_INCLUDES += $(OMX_CORE_DIR)/qexif
+LOCAL_C_INCLUDES += $(OMX_CORE_DIR)/qomx_core
+
+LOCAL_C_INCLUDES+= $(kernel_includes)
+LOCAL_ADDITIONAL_DEPENDENCIES := $(common_deps)
+
+LOCAL_SRC_FILES := mm_jpegdec_test.c
+
+LOCAL_32_BIT_ONLY := $(BOARD_QTI_CAMERA_32BIT_ONLY)
+LOCAL_MODULE           := mm-jpegdec-interface-test
+LOCAL_PRELINK_MODULE   := false
+LOCAL_SHARED_LIBRARIES := libcutils libdl libmmjpeg_interface
+
+include $(BUILD_EXECUTABLE)
+
+LOCAL_PATH := $(OLD_LOCAL_PATH)
\ No newline at end of file
diff --git a/msmcobalt/QCamera2/stack/mm-jpeg-interface/test/mm_jpeg_test.c b/msmcobalt/QCamera2/stack/mm-jpeg-interface/test/mm_jpeg_test.c
new file mode 100644
index 0000000..b1ddafc
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-jpeg-interface/test/mm_jpeg_test.c
@@ -0,0 +1,776 @@
+/* Copyright (c) 2013-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <pthread.h>
+#include <stdlib.h>
+#define TIME_H <SYSTEM_HEADER_PREFIX/time.h>
+#include TIME_H
+
+// JPEG dependencies
+#include "mm_jpeg_interface.h"
+#include "mm_jpeg_ionbuf.h"
+
+// Camera dependencies
+#include "mm_camera_dbg.h"
+
+#define MAX_NUM_BUFS (12)
+#define MAX_NUM_CLIENT (8)
+
+/** DUMP_TO_FILE:
+ *  @filename: file name
+ *  @p_addr: address of the buffer
+ *  @len: buffer length
+ *
+ *  dump the image to the file
+ **/
+#define DUMP_TO_FILE(filename, p_addr, len) ({ \
+  FILE *fp = fopen(filename, "w+"); \
+  if (fp) { \
+    fwrite(p_addr, 1, len, fp); \
+    fclose(fp); \
+  } else { \
+    LOGE("cannot dump image"); \
+  } \
+})
+
+static uint32_t g_count = 1U, g_i;
+
+typedef struct {
+  mm_jpeg_color_format fmt;
+  cam_rational_type_t mult;
+  const char *str;
+} mm_jpeg_intf_test_colfmt_t;
+
+typedef struct {
+  char *filename;
+  int width;
+  int height;
+  char *out_filename;
+  uint32_t burst_mode;
+  uint32_t min_out_bufs;
+  mm_jpeg_intf_test_colfmt_t col_fmt;
+  uint32_t encode_thumbnail;
+  int tmb_width;
+  int tmb_height;
+  int main_quality;
+  int thumb_quality;
+  char *qtable_luma_file;
+  char *qtable_chroma_file;
+  int client_cnt;
+} jpeg_test_input_t;
+
+/* Static constants */
+/*  default Luma Qtable */
+uint8_t DEFAULT_QTABLE_0[QUANT_SIZE] = {
+  16, 11, 10, 16, 24, 40, 51, 61,
+  12, 12, 14, 19, 26, 58, 60, 55,
+  14, 13, 16, 24, 40, 57, 69, 56,
+  14, 17, 22, 29, 51, 87, 80, 62,
+  18, 22, 37, 56, 68, 109, 103, 77,
+  24, 35, 55, 64, 81, 104, 113, 92,
+  49, 64, 78, 87, 103, 121, 120, 101,
+  72, 92, 95, 98, 112, 100, 103, 99
+};
+
+/*  default Chroma Qtable */
+uint8_t DEFAULT_QTABLE_1[QUANT_SIZE] = {
+  17, 18, 24, 47, 99, 99, 99, 99,
+  18, 21, 26, 66, 99, 99, 99, 99,
+  24, 26, 56, 99, 99, 99, 99, 99,
+  47, 66, 99, 99, 99, 99, 99, 99,
+  99, 99, 99, 99, 99, 99, 99, 99,
+  99, 99, 99, 99, 99, 99, 99, 99,
+  99, 99, 99, 99, 99, 99, 99, 99,
+  99, 99, 99, 99, 99, 99, 99, 99
+};
+
+typedef struct {
+  char *filename[MAX_NUM_BUFS];
+  int width;
+  int height;
+  char *out_filename[MAX_NUM_BUFS];
+  pthread_mutex_t lock;
+  pthread_cond_t cond;
+  pthread_t thread_id;
+  buffer_t input[MAX_NUM_BUFS];
+  buffer_t output[MAX_NUM_BUFS];
+  int use_ion;
+  uint32_t handle;
+  mm_jpeg_ops_t ops;
+  uint32_t job_id[MAX_NUM_BUFS];
+  mm_jpeg_encode_params_t params;
+  mm_jpeg_job_t job;
+  uint32_t session_id;
+  uint32_t num_bufs;
+  uint32_t min_out_bufs;
+  size_t buf_filled_len[MAX_NUM_BUFS];
+  mm_dimension pic_size;
+  int ret;
+  int clinet_id;
+} mm_jpeg_intf_test_t;
+
+
+
+static const mm_jpeg_intf_test_colfmt_t color_formats[] =
+{
+  { MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2, {3, 2}, "YCRCBLP_H2V2" },
+  { MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2, {3, 2}, "YCBCRLP_H2V2" },
+  { MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1, {2, 1}, "YCRCBLP_H2V1" },
+  { MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1, {2, 1}, "YCBCRLP_H2V1" },
+  { MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V2, {2, 1}, "YCRCBLP_H1V2" },
+  { MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V2, {2, 1}, "YCBCRLP_H1V2" },
+  { MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V1, {3, 1}, "YCRCBLP_H1V1" },
+  { MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V1, {3, 1}, "YCBCRLP_H1V1" }
+};
+
+static jpeg_test_input_t jpeg_input[] = {
+  { QCAMERA_DUMP_FRM_LOCATION"test_1.yuv", 4000, 3008, QCAMERA_DUMP_FRM_LOCATION"test_1.jpg", 0, 0,
+    { MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2, {3, 2}, "YCRCBLP_H2V2" },
+      0, 320, 240, 80, 80, NULL, NULL, 1}
+};
+
+static void mm_jpeg_encode_callback(jpeg_job_status_t status,
+  uint32_t client_hdl,
+  uint32_t jobId,
+  mm_jpeg_output_t *p_output,
+  void *userData)
+{
+  mm_jpeg_intf_test_t *p_obj = (mm_jpeg_intf_test_t *)userData;
+
+  pthread_mutex_lock(&p_obj->lock);
+
+  if (status == JPEG_JOB_STATUS_ERROR) {
+    LOGE("Encode error");
+  } else {
+    int i = 0;
+    for (i = 0; p_obj->job_id[i] && (jobId != p_obj->job_id[i]); i++)
+      ;
+    if (!p_obj->job_id[i]) {
+      LOGE("Cannot find job ID!!!");
+      goto error;
+    }
+    LOGE("Encode success addr %p len %zu idx %d",
+       p_output->buf_vaddr, p_output->buf_filled_len, i);
+
+    p_obj->buf_filled_len[i] = p_output->buf_filled_len;
+    if (p_obj->min_out_bufs) {
+      LOGE("Saving file%s addr %p len %zu",
+           p_obj->out_filename[i],
+          p_output->buf_vaddr, p_output->buf_filled_len);
+      DUMP_TO_FILE(p_obj->out_filename[i], p_output->buf_vaddr,
+        p_output->buf_filled_len);
+    }
+  }
+  g_i++;
+
+error:
+
+  if (g_i >= g_count) {
+    LOGE("Signal the thread");
+    pthread_cond_signal(&p_obj->cond);
+  }
+  pthread_mutex_unlock(&p_obj->lock);
+}
+
+int mm_jpeg_test_alloc(buffer_t *p_buffer, int use_pmem)
+{
+  int ret = 0;
+  /*Allocate buffers*/
+  if (use_pmem) {
+    p_buffer->addr = (uint8_t *)buffer_allocate(p_buffer, 0);
+    if (NULL == p_buffer->addr) {
+      LOGE("Error");
+      return -1;
+    }
+  } else {
+    /* Allocate heap memory */
+    p_buffer->addr = (uint8_t *)malloc(p_buffer->size);
+    if (NULL == p_buffer->addr) {
+      LOGE("Error");
+      return -1;
+    }
+  }
+  return ret;
+}
+
+void mm_jpeg_test_free(buffer_t *p_buffer)
+{
+  if (p_buffer->addr == NULL)
+    return;
+
+  if (p_buffer->p_pmem_fd >= 0)
+    buffer_deallocate(p_buffer);
+  else
+    free(p_buffer->addr);
+
+  memset(p_buffer, 0x0, sizeof(buffer_t));
+}
+
+int mm_jpeg_test_read(mm_jpeg_intf_test_t *p_obj, uint32_t idx)
+{
+  FILE *fp = NULL;
+  size_t file_size = 0;
+  fp = fopen(p_obj->filename[idx], "rb");
+  if (!fp) {
+    LOGE("error");
+    return -1;
+  }
+  fseek(fp, 0, SEEK_END);
+  file_size = (size_t)ftell(fp);
+  fseek(fp, 0, SEEK_SET);
+  LOGE("input file size is %zu buf_size %zu",
+     file_size, p_obj->input[idx].size);
+
+  if (p_obj->input[idx].size > file_size) {
+    LOGE("error");
+    fclose(fp);
+    return -1;
+  }
+  fread(p_obj->input[idx].addr, 1, p_obj->input[idx].size, fp);
+  fclose(fp);
+  return 0;
+}
+
+/** mm_jpeg_test_read_qtable:
+ *
+ *  Arguments:
+ *    @filename: Qtable filename
+ *    @chroma_flag: Flag indicating chroma qtable
+ *
+ *  Return:
+ *    0 success, failure otherwise
+ *
+ *  Description:
+ *    Reads qtable from file and sets it in appropriate qtable
+ *    based on flag.
+ **/
+int mm_jpeg_test_read_qtable(const char *filename, bool chroma_flag)
+{
+  FILE *fp = NULL;
+  int i;
+
+  if (filename == NULL)
+    return 0;
+
+  fp = fopen(filename, "r");
+  if (!fp) {
+    LOGE("error cannot open file");
+    return -1;
+  }
+
+  if (chroma_flag) {
+    for (i = 0; i < QUANT_SIZE; i++)
+      fscanf(fp, "%hhu,", &DEFAULT_QTABLE_1[i]);
+  } else {
+    for (i = 0; i < QUANT_SIZE; i++)
+      fscanf(fp, "%hhu,", &DEFAULT_QTABLE_0[i]);
+  }
+
+  fclose(fp);
+  return 0;
+}
+
+static int encode_init(jpeg_test_input_t *p_input, mm_jpeg_intf_test_t *p_obj,
+  int client_id)
+{
+  int rc = -1;
+  size_t size = (size_t)(p_input->width * p_input->height);
+  mm_jpeg_encode_params_t *p_params = &p_obj->params;
+  mm_jpeg_encode_job_t *p_job_params = &p_obj->job.encode_job;
+  uint32_t i = 0;
+  uint32_t burst_mode = p_input->burst_mode;
+  jpeg_test_input_t *p_in = p_input;
+
+  do {
+    p_obj->filename[i] = p_in->filename;
+    p_obj->width = p_input->width;
+    p_obj->height = p_input->height;
+    p_obj->out_filename[i] = p_in->out_filename;
+    p_obj->use_ion = 1;
+    p_obj->min_out_bufs = p_input->min_out_bufs;
+
+    /* allocate buffers */
+    p_obj->input[i].size = size * (size_t)p_input->col_fmt.mult.numerator /
+        (size_t)p_input->col_fmt.mult.denominator;
+    rc = mm_jpeg_test_alloc(&p_obj->input[i], p_obj->use_ion);
+    if (rc) {
+      LOGE("Error");
+      return -1;
+    }
+
+
+    rc = mm_jpeg_test_read(p_obj, i);
+    if (rc) {
+      LOGE("Error, unable to read input image");
+      return -1;
+    }
+
+    mm_jpeg_test_read_qtable(p_input->qtable_luma_file, false);
+    if (rc) {
+      LOGE("Error, unable to read luma qtable");
+      return -1;
+    }
+
+    mm_jpeg_test_read_qtable(p_input->qtable_chroma_file, true);
+    if (rc) {
+      LOGE("Error, unable to read chrome qtable");
+      return -1;
+    }
+
+    /* src buffer config*/
+    p_params->src_main_buf[i].buf_size = p_obj->input[i].size;
+    p_params->src_main_buf[i].buf_vaddr = p_obj->input[i].addr;
+    p_params->src_main_buf[i].fd = p_obj->input[i].p_pmem_fd;
+    p_params->src_main_buf[i].index = i;
+    p_params->src_main_buf[i].format = MM_JPEG_FMT_YUV;
+    p_params->src_main_buf[i].offset.mp[0].len = (uint32_t)size;
+    p_params->src_main_buf[i].offset.mp[0].stride = p_input->width;
+    p_params->src_main_buf[i].offset.mp[0].scanline = p_input->height;
+    p_params->src_main_buf[i].offset.mp[1].len = (uint32_t)(size >> 1);
+
+    /* src buffer config*/
+    p_params->src_thumb_buf[i].buf_size = p_obj->input[i].size;
+    p_params->src_thumb_buf[i].buf_vaddr = p_obj->input[i].addr;
+    p_params->src_thumb_buf[i].fd = p_obj->input[i].p_pmem_fd;
+    p_params->src_thumb_buf[i].index = i;
+    p_params->src_thumb_buf[i].format = MM_JPEG_FMT_YUV;
+    p_params->src_thumb_buf[i].offset.mp[0].len = (uint32_t)size;
+    p_params->src_thumb_buf[i].offset.mp[0].stride = p_input->width;
+    p_params->src_thumb_buf[i].offset.mp[0].scanline = p_input->height;
+    p_params->src_thumb_buf[i].offset.mp[1].len = (uint32_t)(size >> 1);
+
+
+    i++;
+  } while((++p_in)->filename);
+
+  p_obj->num_bufs = i;
+
+  pthread_mutex_init(&p_obj->lock, NULL);
+  pthread_cond_init(&p_obj->cond, NULL);
+
+
+  /* set encode parameters */
+  p_params->jpeg_cb = mm_jpeg_encode_callback;
+  p_params->userdata = p_obj;
+  p_params->color_format = p_input->col_fmt.fmt;
+  p_params->thumb_color_format = p_input->col_fmt.fmt;
+
+  if (p_obj->min_out_bufs) {
+    p_params->num_dst_bufs = 2;
+  } else {
+    p_params->num_dst_bufs = p_obj->num_bufs;
+  }
+
+  for (i = 0; i < (uint32_t)p_params->num_dst_bufs; i++) {
+    p_obj->output[i].size = size * 3/2;
+    rc = mm_jpeg_test_alloc(&p_obj->output[i], 0);
+    if (rc) {
+      LOGE("Error");
+      return -1;
+    }
+    /* dest buffer config */
+    p_params->dest_buf[i].buf_size = p_obj->output[i].size;
+    p_params->dest_buf[i].buf_vaddr = p_obj->output[i].addr;
+    p_params->dest_buf[i].fd = p_obj->output[i].p_pmem_fd;
+    p_params->dest_buf[i].index = i;
+  }
+
+
+  p_params->num_src_bufs = p_obj->num_bufs;
+  p_params->num_tmb_bufs = 0;
+  g_count = p_params->num_src_bufs;
+
+  p_params->encode_thumbnail = p_input->encode_thumbnail;
+  if (p_params->encode_thumbnail) {
+      p_params->num_tmb_bufs = p_obj->num_bufs;
+  }
+  p_params->quality = (uint32_t)p_input->main_quality;
+  p_params->thumb_quality = (uint32_t)p_input->thumb_quality;
+
+  p_job_params->dst_index = 0;
+  p_job_params->src_index = 0;
+  p_job_params->rotation = 0;
+
+  /* main dimension */
+  p_job_params->main_dim.src_dim.width = p_obj->width;
+  p_job_params->main_dim.src_dim.height = p_obj->height;
+  p_job_params->main_dim.dst_dim.width = p_obj->width;
+  p_job_params->main_dim.dst_dim.height = p_obj->height;
+  p_job_params->main_dim.crop.top = 0;
+  p_job_params->main_dim.crop.left = 0;
+  p_job_params->main_dim.crop.width = p_obj->width;
+  p_job_params->main_dim.crop.height = p_obj->height;
+
+  p_params->main_dim  = p_job_params->main_dim;
+
+  /* thumb dimension */
+  p_job_params->thumb_dim.src_dim.width = p_obj->width;
+  p_job_params->thumb_dim.src_dim.height = p_obj->height;
+  p_job_params->thumb_dim.dst_dim.width = p_input->tmb_width;
+  p_job_params->thumb_dim.dst_dim.height = p_input->tmb_height;
+  p_job_params->thumb_dim.crop.top = 0;
+  p_job_params->thumb_dim.crop.left = 0;
+  p_job_params->thumb_dim.crop.width = 0;
+  p_job_params->thumb_dim.crop.height = 0;
+
+  p_params->thumb_dim  = p_job_params->thumb_dim;
+
+  p_job_params->exif_info.numOfEntries = 0;
+  p_params->burst_mode = burst_mode;
+
+  /* Qtable */
+  p_job_params->qtable[0].eQuantizationTable =
+    OMX_IMAGE_QuantizationTableLuma;
+  p_job_params->qtable[1].eQuantizationTable =
+    OMX_IMAGE_QuantizationTableChroma;
+  p_job_params->qtable_set[0] = 1;
+  p_job_params->qtable_set[1] = 1;
+
+  for (i = 0; i < QUANT_SIZE; i++) {
+    p_job_params->qtable[0].nQuantizationMatrix[i] = DEFAULT_QTABLE_0[i];
+    p_job_params->qtable[1].nQuantizationMatrix[i] = DEFAULT_QTABLE_1[i];
+  }
+
+  p_obj->pic_size.w = (uint32_t)p_input->width;
+  p_obj->pic_size.h = (uint32_t)p_input->height;
+
+  p_obj->clinet_id = client_id;
+
+  return 0;
+}
+
+static void *encode_test(void *data)
+{
+  int rc = 0;
+  mm_jpeg_intf_test_t *jpeg_obj = (mm_jpeg_intf_test_t *)data;
+  char file_name[64];
+
+  uint32_t i = 0;
+  jpeg_obj->handle = jpeg_open(&jpeg_obj->ops, NULL, jpeg_obj->pic_size, NULL);
+  if (jpeg_obj->handle == 0) {
+    LOGE("Error");
+    jpeg_obj->ret = -1;
+    goto end;
+  }
+
+  rc = jpeg_obj->ops.create_session(jpeg_obj->handle, &jpeg_obj->params,
+    &jpeg_obj->job.encode_job.session_id);
+  if (jpeg_obj->job.encode_job.session_id == 0) {
+    LOGE("Error");
+    jpeg_obj->ret = -1;
+    goto end;
+  }
+
+  for (i = 0; i < jpeg_obj->num_bufs; i++) {
+    jpeg_obj->job.job_type = JPEG_JOB_TYPE_ENCODE;
+    jpeg_obj->job.encode_job.src_index = (int32_t) i;
+    jpeg_obj->job.encode_job.dst_index = (int32_t) i;
+    jpeg_obj->job.encode_job.thumb_index = (uint32_t) i;
+
+    if (jpeg_obj->params.burst_mode && jpeg_obj->min_out_bufs) {
+      jpeg_obj->job.encode_job.dst_index = -1;
+    }
+
+    rc = jpeg_obj->ops.start_job(&jpeg_obj->job, &jpeg_obj->job_id[i]);
+    if (rc) {
+      LOGE("Error");
+      jpeg_obj->ret = rc;
+      goto end;
+    }
+  }
+  jpeg_obj->job_id[i] = 0;
+
+  /*
+  usleep(5);
+  jpeg_obj->ops.abort_job(jpeg_obj->job_id[0]);
+  */
+  pthread_mutex_lock(&jpeg_obj->lock);
+  pthread_cond_wait(&jpeg_obj->cond, &jpeg_obj->lock);
+  pthread_mutex_unlock(&jpeg_obj->lock);
+
+  jpeg_obj->ops.destroy_session(jpeg_obj->job.encode_job.session_id);
+  jpeg_obj->ops.close(jpeg_obj->handle);
+
+end:
+  for (i = 0; i < jpeg_obj->num_bufs; i++) {
+    if (!jpeg_obj->min_out_bufs) {
+      // Save output files
+      LOGE("Saving file%s addr %p len %zu",
+          jpeg_obj->out_filename[i],
+          jpeg_obj->output[i].addr, jpeg_obj->buf_filled_len[i]);
+
+      snprintf(file_name, sizeof(file_name), "%s_%d.jpg",
+        jpeg_obj->out_filename[i], jpeg_obj->clinet_id);
+      fprintf(stderr, "Output file for client %d = %s\n",
+        jpeg_obj->clinet_id, file_name);
+
+      DUMP_TO_FILE(file_name, jpeg_obj->output[i].addr,
+        jpeg_obj->buf_filled_len[i]);
+    }
+    mm_jpeg_test_free(&jpeg_obj->input[i]);
+    mm_jpeg_test_free(&jpeg_obj->output[i]);
+  }
+  return NULL;
+}
+
+#define MAX_FILE_CNT (20)
+static int mm_jpeg_test_get_input(int argc, char *argv[],
+    jpeg_test_input_t *p_test)
+{
+  int c;
+  size_t in_file_cnt = 0, out_file_cnt = 0, i;
+  int idx = 0;
+  jpeg_test_input_t *p_test_base = p_test;
+
+  char *in_files[MAX_FILE_CNT];
+  char *out_files[MAX_FILE_CNT];
+
+  while ((c = getopt(argc, argv, "-I:O:W:H:F:BTx:y:Q:J:K:C:q:")) != -1) {
+    switch (c) {
+    case 'B':
+      fprintf(stderr, "%-25s\n", "Using burst mode");
+      p_test->burst_mode = 1;
+      break;
+    case 'I':
+      for (idx = optind - 1; idx < argc; idx++) {
+        if (argv[idx][0] == '-') {
+          break;
+        }
+        in_files[in_file_cnt++] = argv[idx];
+      }
+      optind = idx -1;
+
+      break;
+    case 'O':
+      for (idx = optind - 1; idx < argc; idx++) {
+        if (argv[idx][0] == '-') {
+          break;
+        }
+        out_files[out_file_cnt++] = argv[idx];
+      }
+      optind = idx -1;
+
+      break;
+    case 'W':
+      p_test->width = atoi(optarg);
+      fprintf(stderr, "%-25s%d\n", "Width: ", p_test->width);
+      break;
+    case 'H':
+      p_test->height = atoi(optarg);
+      fprintf(stderr, "%-25s%d\n", "Height: ", p_test->height);
+      break;
+    case 'F':
+      p_test->col_fmt = color_formats[atoi(optarg)];
+      fprintf(stderr, "%-25s%s\n", "Format: ", p_test->col_fmt.str);
+      break;
+    case 'M':
+      p_test->min_out_bufs = 1;
+      fprintf(stderr, "%-25s\n", "Using minimum number of output buffers");
+      break;
+    case 'T':
+      p_test->encode_thumbnail = 1;
+      fprintf(stderr, "%-25s\n", "Encode thumbnail");
+      break;
+    case 'x':
+      p_test->tmb_width = atoi(optarg);
+      fprintf(stderr, "%-25s%d\n", "Tmb Width: ", p_test->tmb_width);
+      break;
+    case 'y':
+      p_test->tmb_height = atoi(optarg);
+      fprintf(stderr, "%-25s%d\n", "Tmb Height: ", p_test->tmb_height);
+      break;
+    case 'Q':
+      p_test->main_quality = atoi(optarg);
+      fprintf(stderr, "%-25s%d\n", "Main quality: ", p_test->main_quality);
+      break;
+    case 'q':
+      p_test->thumb_quality = atoi(optarg);
+      fprintf(stderr, "%-25s%d\n", "Thumb quality: ", p_test->thumb_quality);
+      break;
+    case 'J':
+      p_test->qtable_luma_file = optarg;
+      fprintf(stderr, "%-25s%s\n", "Qtable luma path",
+        p_test->qtable_luma_file);
+      break;
+    case 'K':
+      p_test->qtable_chroma_file = optarg;
+      fprintf(stderr, "%-25s%s\n", "Qtable chroma path",
+        p_test->qtable_chroma_file);
+      break;
+    case 'C':
+      p_test->client_cnt = atoi(optarg);
+      fprintf(stderr, "%-25s%d\n", "Number of clients ",
+        p_test->client_cnt);
+    default:;
+    }
+  }
+  fprintf(stderr, "Infiles: %zu Outfiles: %zu\n", in_file_cnt, out_file_cnt);
+
+  if (p_test->client_cnt > MAX_NUM_CLIENT) {
+    fprintf(stderr, "Clients requested exceeds max limit %d\n",
+      MAX_NUM_CLIENT);
+    return 1;
+  }
+  if (in_file_cnt > out_file_cnt) {
+    fprintf(stderr, "%-25s\n", "Insufficient number of output files!");
+    return 1;
+  }
+
+  // Discard the extra out files
+  out_file_cnt = in_file_cnt;
+
+  p_test = realloc(p_test, (in_file_cnt + 1) * sizeof(*p_test));
+  if (!p_test) {
+    LOGE("Error");
+    return 1;
+  }
+  memset(p_test+1, 0, (in_file_cnt) * sizeof(*p_test));
+
+  for (i = 0; i < in_file_cnt; i++, p_test++) {
+    memcpy(p_test, p_test_base, sizeof(*p_test));
+    p_test->filename = in_files[i];
+    p_test->out_filename = out_files[i];
+    fprintf(stderr, "Inf: %s Outf: %s\n", in_files[i], out_files[i]);
+  }
+
+  return 0;
+}
+
+static void mm_jpeg_test_print_usage()
+{
+  fprintf(stderr, "Usage: program_name [options]\n");
+  fprintf(stderr, "Mandatory options:\n");
+  fprintf(stderr, "  -I FILE1 [FILE2] [FILEN]\tList of input files\n");
+  fprintf(stderr, "  -O FILE1 [FILE2] [FILEN]\tList of output files\n");
+  fprintf(stderr, "  -W WIDTH\t\tOutput image width\n");
+  fprintf(stderr, "  -H HEIGHT\t\tOutput image height\n");
+  fprintf(stderr, "  -F \t\tColor format: \n");
+  fprintf(stderr, "\t\t\t\t%s (0), %s (1), %s (2) %s (3)\n"
+      "\t\t\t\t%s (4), %s (5), %s (6) %s (7)\n ",
+      color_formats[0].str, color_formats[1].str,
+      color_formats[2].str, color_formats[3].str,
+      color_formats[4].str, color_formats[5].str,
+      color_formats[6].str, color_formats[7].str);
+  fprintf(stderr, "Optional:\n");
+  fprintf(stderr, "  -T \t\Encode thumbnail\n");
+  fprintf(stderr, "  -x TMB_WIDTH\t\tThumbnail width\n");
+  fprintf(stderr, "  -y TMB_HEIGHT\t\tThumbnail height\n");
+  fprintf(stderr, "  -Q MAIN_QUALITY\t\tMain image quality\n");
+  fprintf(stderr, "  -q TMB_QUALITY\t\tThumbnail image quality\n");
+  fprintf(stderr, "  -B \t\tBurst mode. Utilize both encoder engines on"
+          "supported targets\n");
+  fprintf(stderr, "  -M \t\tUse minimum number of output buffers \n");
+  fprintf(stderr, "  -J \t\tLuma QTable filename. Comma separated 8x8"
+    " matrix\n");
+  fprintf(stderr, "  -K \t\tChroma QTable filename. Comma separated"
+    " 8x8 matrix\n");
+  fprintf(stderr, "  -C \t\tNumber of clients to run in parllel\n");
+  fprintf(stderr, "\n");
+}
+
+/** main:
+ *
+ *  Arguments:
+ *    @argc
+ *    @argv
+ *
+ *  Return:
+ *       0 or -ve values
+ *
+ *  Description:
+ *       main function
+ *
+ **/
+int main(int argc, char* argv[])
+{
+  jpeg_test_input_t *p_test_input;
+  mm_jpeg_intf_test_t client[MAX_NUM_CLIENT];
+  int ret = 0;
+  int i = 0;
+  int thread_cnt = 0;
+
+  if (argc > 1) {
+    p_test_input = calloc(2, sizeof(*p_test_input));
+    if (!p_test_input) {
+      LOGE("Error");
+      goto exit;
+    }
+    memcpy(p_test_input, &jpeg_input[0], sizeof(*p_test_input));
+    ret = mm_jpeg_test_get_input(argc, argv, p_test_input);
+    if (ret) {
+      LOGE("Error");
+      goto exit;
+    }
+  } else {
+    mm_jpeg_test_print_usage();
+    return 1;
+  }
+
+  for (i = 0; i < p_test_input->client_cnt; i++) {
+    memset(&client[i], 0x0, sizeof(mm_jpeg_intf_test_t));
+    ret = encode_init(p_test_input, &client[i], i);
+    if (ret) {
+      LOGE("Error");
+      return -1;
+    }
+
+    ret = pthread_create(&client[i].thread_id, NULL, encode_test,
+      &client[i]);
+    if (ret != 0) {
+       fprintf(stderr, "Error in thread creation\n");
+       break;
+    }
+  }
+
+  thread_cnt = i;
+  for (i = 0; i < thread_cnt; i++) {
+    pthread_join(client[i].thread_id, NULL);
+  }
+
+exit:
+  for (i = 0; i < thread_cnt; i++) {
+    if (!client[i].ret) {
+      fprintf(stderr, "%-25s %d %s\n", "Client", i, "Success!");
+    } else {
+      fprintf(stderr, "%-25s %d %s\n", "Client", i, "Fail!");
+    }
+  }
+
+  if (argc > 1) {
+    if (p_test_input) {
+      free(p_test_input);
+      p_test_input = NULL;
+    }
+  }
+
+  return ret;
+}
+
+
diff --git a/msmcobalt/QCamera2/stack/mm-jpeg-interface/test/mm_jpegdec_test.c b/msmcobalt/QCamera2/stack/mm-jpeg-interface/test/mm_jpegdec_test.c
new file mode 100644
index 0000000..beb62f5
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-jpeg-interface/test/mm_jpegdec_test.c
@@ -0,0 +1,479 @@
+/* Copyright (c) 2013-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <pthread.h>
+#include <stdlib.h>
+#define TIME_H <SYSTEM_HEADER_PREFIX/time.h>
+#include TIME_H
+
+// JPEG dependencies
+#include "mm_jpeg_interface.h"
+#include "mm_jpeg_ionbuf.h"
+
+// Camera dependencies
+#include "mm_camera_dbg.h"
+
+#define MIN(a,b)  (((a) < (b)) ? (a) : (b))
+#define MAX(a,b)  (((a) > (b)) ? (a) : (b))
+#define CLAMP(x, min, max) MIN(MAX((x), (min)), (max))
+
+#define TIME_IN_US(r) ((uint64_t)r.tv_sec * 1000000LL + (uint64_t)r.tv_usec)
+struct timeval dtime[2];
+
+
+/** DUMP_TO_FILE:
+ *  @filename: file name
+ *  @p_addr: address of the buffer
+ *  @len: buffer length
+ *
+ *  dump the image to the file
+ **/
+#define DUMP_TO_FILE(filename, p_addr, len) ({ \
+  size_t rc = 0; \
+  FILE *fp = fopen(filename, "w+"); \
+  if (fp) { \
+    rc = fwrite(p_addr, 1, len, fp); \
+    fclose(fp); \
+  } else { \
+    LOGE("cannot dump image"); \
+  } \
+})
+
+static int g_count = 1, g_i;
+
+typedef struct {
+  char *filename;
+  int width;
+  int height;
+  char *out_filename;
+  int format;
+} jpeg_test_input_t;
+
+typedef struct {
+  char *filename;
+  int width;
+  int height;
+  char *out_filename;
+  pthread_mutex_t lock;
+  pthread_cond_t cond;
+  buffer_t input;
+  buffer_t output;
+  int use_ion;
+  uint32_t handle;
+  mm_jpegdec_ops_t ops;
+  uint32_t job_id[5];
+  mm_jpeg_decode_params_t params;
+  mm_jpeg_job_t job;
+  uint32_t session_id;
+} mm_jpegdec_intf_test_t;
+
+typedef struct {
+  char *format_str;
+  int eColorFormat;
+} mm_jpegdec_col_fmt_t;
+
+#define ARR_SZ(a) (sizeof(a)/sizeof(a[0]))
+
+static const mm_jpegdec_col_fmt_t col_formats[] =
+{
+  { "YCRCBLP_H2V2",      (int)MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2 },
+  { "YCBCRLP_H2V2",      (int)MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2 },
+  { "YCRCBLP_H2V1",      (int)MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1 },
+  { "YCBCRLP_H2V1",      (int)MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1 },
+  { "YCRCBLP_H1V2",      (int)MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V2 },
+  { "YCBCRLP_H1V2",      (int)MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V2 },
+  { "YCRCBLP_H1V1",      (int)MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V1 },
+  { "YCBCRLP_H1V1",      (int)MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V1 }
+};
+
+static void mm_jpegdec_decode_callback(jpeg_job_status_t status,
+  uint32_t client_hdl,
+  uint32_t jobId,
+  mm_jpeg_output_t *p_output,
+  void *userData)
+{
+  mm_jpegdec_intf_test_t *p_obj = (mm_jpegdec_intf_test_t *)userData;
+
+  if (status == JPEG_JOB_STATUS_ERROR) {
+    LOGE("Decode error");
+  } else {
+    gettimeofday(&dtime[1], NULL);
+    LOGE("Decode time %llu ms",
+      ((TIME_IN_US(dtime[1]) - TIME_IN_US(dtime[0]))/1000));
+
+    LOGE("Decode success file%s addr %p len %zu",
+       p_obj->out_filename,
+      p_output->buf_vaddr, p_output->buf_filled_len);
+    DUMP_TO_FILE(p_obj->out_filename, p_output->buf_vaddr, p_output->buf_filled_len);
+  }
+  g_i++;
+  if (g_i >= g_count) {
+    LOGE("Signal the thread");
+    pthread_cond_signal(&p_obj->cond);
+  }
+}
+
+int mm_jpegdec_test_alloc(buffer_t *p_buffer, int use_pmem)
+{
+  int ret = 0;
+  /*Allocate buffers*/
+  if (use_pmem) {
+    p_buffer->addr = (uint8_t *)buffer_allocate(p_buffer, 0);
+    if (NULL == p_buffer->addr) {
+      LOGE("Error");
+      return -1;
+    }
+  } else {
+    /* Allocate heap memory */
+    p_buffer->addr = (uint8_t *)malloc(p_buffer->size);
+    if (NULL == p_buffer->addr) {
+      LOGE("Error");
+      return -1;
+    }
+  }
+  return ret;
+}
+
+void mm_jpegdec_test_free(buffer_t *p_buffer)
+{
+  if (p_buffer->addr == NULL)
+    return;
+
+  if (p_buffer->p_pmem_fd >= 0)
+    buffer_deallocate(p_buffer);
+  else
+    free(p_buffer->addr);
+
+  memset(p_buffer, 0x0, sizeof(buffer_t));
+}
+
+int mm_jpegdec_test_read(mm_jpegdec_intf_test_t *p_obj)
+{
+  int rc = 0;
+  FILE *fp = NULL;
+  size_t file_size = 0;
+  fp = fopen(p_obj->filename, "rb");
+  if (!fp) {
+    LOGE("error");
+    return -1;
+  }
+  fseek(fp, 0, SEEK_END);
+  file_size = (size_t)ftell(fp);
+  fseek(fp, 0, SEEK_SET);
+
+  LOGE("input file size is %zu",
+     file_size);
+
+  p_obj->input.size = file_size;
+
+  /* allocate buffers */
+  rc = mm_jpegdec_test_alloc(&p_obj->input, p_obj->use_ion);
+  if (rc) {
+    LOGE("Error");
+    return -1;
+  }
+
+  fread(p_obj->input.addr, 1, p_obj->input.size, fp);
+  fclose(fp);
+  return 0;
+}
+
+void chromaScale(mm_jpeg_color_format format, double *cScale)
+{
+  double scale;
+
+  switch(format) {
+    case MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2:
+    case MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2:
+      scale = 1.5;
+      break;
+    case MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1:
+    case MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1:
+    case MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V2:
+    case MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V2:
+      scale = 2.0;
+      break;
+    case MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V1:
+    case MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V1:
+      scale = 3.0;
+      break;
+    case MM_JPEG_COLOR_FORMAT_MONOCHROME:
+      scale = 1.0;
+      break;
+    default:
+      scale = 0;
+      LOGE("color format Error");
+    }
+
+  *cScale = scale;
+}
+
+static int decode_init(jpeg_test_input_t *p_input, mm_jpegdec_intf_test_t *p_obj)
+{
+  int rc = -1;
+  size_t size = (size_t)(CEILING16(p_input->width) * CEILING16(p_input->height));
+  double cScale;
+  mm_jpeg_decode_params_t *p_params = &p_obj->params;
+  mm_jpeg_decode_job_t *p_job_params = &p_obj->job.decode_job;
+
+  p_obj->filename = p_input->filename;
+  p_obj->width = p_input->width;
+  p_obj->height = p_input->height;
+  p_obj->out_filename = p_input->out_filename;
+  p_obj->use_ion = 1;
+
+  pthread_mutex_init(&p_obj->lock, NULL);
+  pthread_cond_init(&p_obj->cond, NULL);
+
+  chromaScale(p_input->format, &cScale);
+  p_obj->output.size = (size_t)((double)size * cScale);
+  rc = mm_jpegdec_test_alloc(&p_obj->output, p_obj->use_ion);
+  if (rc) {
+    LOGE("Error");
+    return -1;
+  }
+
+  rc = mm_jpegdec_test_read(p_obj);
+  if (rc) {
+    LOGE("Error");
+    return -1;
+  }
+
+  /* set encode parameters */
+  p_params->jpeg_cb = mm_jpegdec_decode_callback;
+  p_params->userdata = p_obj;
+  p_params->color_format = p_input->format;
+
+  /* dest buffer config */
+  p_params->dest_buf[0].buf_size = p_obj->output.size;
+  p_params->dest_buf[0].buf_vaddr = p_obj->output.addr;
+  p_params->dest_buf[0].fd = p_obj->output.p_pmem_fd;
+  p_params->dest_buf[0].format = MM_JPEG_FMT_YUV;
+  p_params->dest_buf[0].offset.mp[0].len = (uint32_t)size;
+  p_params->dest_buf[0].offset.mp[1].len =
+    (uint32_t)((double)size * (cScale - 1.0));
+  p_params->dest_buf[0].offset.mp[0].stride = CEILING16(p_input->width);
+  p_params->dest_buf[0].offset.mp[0].scanline = CEILING16(p_input->height);
+  p_params->dest_buf[0].offset.mp[1].stride = CEILING16(p_input->width);
+  p_params->dest_buf[0].offset.mp[1].scanline = CEILING16(p_input->height);
+  p_params->dest_buf[0].index = 0;
+  p_params->num_dst_bufs = 1;
+
+  /* src buffer config*/
+  p_params->src_main_buf[0].buf_size = p_obj->input.size;
+  p_params->src_main_buf[0].buf_vaddr = p_obj->input.addr;
+  p_params->src_main_buf[0].fd = p_obj->input.p_pmem_fd;
+  p_params->src_main_buf[0].index = 0;
+  p_params->src_main_buf[0].format = MM_JPEG_FMT_BITSTREAM;
+  /*
+  p_params->src_main_buf[0].offset.mp[0].len = size;
+  p_params->src_main_buf[0].offset.mp[1].len = size >> 1;
+  */
+  p_params->num_src_bufs = 1;
+
+  p_job_params->dst_index = 0;
+  p_job_params->src_index = 0;
+  p_job_params->rotation = 0;
+
+  /* main dimension */
+  p_job_params->main_dim.src_dim.width = p_obj->width;
+  p_job_params->main_dim.src_dim.height = p_obj->height;
+  p_job_params->main_dim.dst_dim.width = p_obj->width;
+  p_job_params->main_dim.dst_dim.height = p_obj->height;
+  p_job_params->main_dim.crop.top = 0;
+  p_job_params->main_dim.crop.left = 0;
+  p_job_params->main_dim.crop.width = p_obj->width;
+  p_job_params->main_dim.crop.height = p_obj->height;
+
+
+  return 0;
+}
+
+void omx_test_dec_print_usage()
+{
+  fprintf(stderr, "Usage: program_name [options]\n");
+  fprintf(stderr, "Mandatory options:\n");
+  fprintf(stderr, "  -I FILE\t\tPath to the input file.\n");
+  fprintf(stderr, "  -O FILE\t\tPath for the output file.\n");
+  fprintf(stderr, "  -W WIDTH\t\tOutput image width\n");
+  fprintf(stderr, "  -H HEIGHT\t\tOutput image height\n");
+  fprintf(stderr, "Optional:\n");
+  fprintf(stderr, "  -F FORMAT\t\tDefault image format:\n");
+  fprintf(stderr, "\t\t\t\t%s (0), %s (1), %s (2) %s (3)\n"
+    "%s (4), %s (5), %s (6) %s (7)\n",
+    col_formats[0].format_str, col_formats[1].format_str,
+    col_formats[2].format_str, col_formats[3].format_str,
+    col_formats[4].format_str, col_formats[5].format_str,
+    col_formats[6].format_str, col_formats[7].format_str
+    );
+
+  fprintf(stderr, "\n");
+}
+
+static int mm_jpegdec_test_get_input(int argc, char *argv[],
+    jpeg_test_input_t *p_test)
+{
+  int c;
+
+  while ((c = getopt(argc, argv, "I:O:W:H:F:")) != -1) {
+    switch (c) {
+    case 'O':
+      p_test->out_filename = optarg;
+      fprintf(stderr, "%-25s%s\n", "Output image path",
+        p_test->out_filename);
+      break;
+    case 'I':
+      p_test->filename = optarg;
+      fprintf(stderr, "%-25s%s\n", "Input image path", p_test->filename);
+      break;
+    case 'W':
+      p_test->width = atoi(optarg);
+      fprintf(stderr, "%-25s%d\n", "Default width", p_test->width);
+      break;
+    case 'H':
+      p_test->height = atoi(optarg);
+      fprintf(stderr, "%-25s%d\n", "Default height", p_test->height);
+      break;
+    case 'F': {
+      int format = 0;
+      format = atoi(optarg);
+      int num_formats = ARR_SZ(col_formats);
+      format = CLAMP(format, 0, num_formats);
+      p_test->format = col_formats[format].eColorFormat;
+      fprintf(stderr, "%-25s%s\n", "Default image format",
+        col_formats[format].format_str);
+      break;
+    }
+    default:;
+    }
+  }
+  if (!p_test->filename || !p_test->filename || !p_test->width ||
+      !p_test->height) {
+    fprintf(stderr, "Missing required arguments.\n");
+    omx_test_dec_print_usage();
+    return -1;
+  }
+  return 0;
+}
+
+static int decode_test(jpeg_test_input_t *p_input)
+{
+  int rc = 0;
+  mm_jpegdec_intf_test_t jpeg_obj;
+  int i = 0;
+
+  memset(&jpeg_obj, 0x0, sizeof(jpeg_obj));
+  rc = decode_init(p_input, &jpeg_obj);
+  if (rc) {
+    LOGE("Error");
+    return -1;
+  }
+
+  jpeg_obj.handle = jpegdec_open(&jpeg_obj.ops);
+  if (jpeg_obj.handle == 0) {
+    LOGE("Error");
+    goto end;
+  }
+
+  rc = jpeg_obj.ops.create_session(jpeg_obj.handle, &jpeg_obj.params,
+    &jpeg_obj.job.decode_job.session_id);
+  if (jpeg_obj.job.decode_job.session_id == 0) {
+    LOGE("Error");
+    goto end;
+  }
+
+  for (i = 0; i < g_count; i++) {
+    jpeg_obj.job.job_type = JPEG_JOB_TYPE_DECODE;
+
+    LOGE("Starting decode job");
+    gettimeofday(&dtime[0], NULL);
+
+    fprintf(stderr, "Starting decode of %s into %s outw %d outh %d\n\n",
+        p_input->filename, p_input->out_filename,
+        p_input->width, p_input->height);
+    rc = jpeg_obj.ops.start_job(&jpeg_obj.job, &jpeg_obj.job_id[i]);
+    if (rc) {
+      LOGE("Error");
+      goto end;
+    }
+  }
+
+  /*
+  usleep(5);
+  jpeg_obj.ops.abort_job(jpeg_obj.job_id[0]);
+  */
+  pthread_mutex_lock(&jpeg_obj.lock);
+  pthread_cond_wait(&jpeg_obj.cond, &jpeg_obj.lock);
+  pthread_mutex_unlock(&jpeg_obj.lock);
+
+  fprintf(stderr, "Decode time %llu ms\n",
+      ((TIME_IN_US(dtime[1]) - TIME_IN_US(dtime[0]))/1000));
+
+
+  jpeg_obj.ops.destroy_session(jpeg_obj.job.decode_job.session_id);
+
+  jpeg_obj.ops.close(jpeg_obj.handle);
+
+
+end:
+  mm_jpegdec_test_free(&jpeg_obj.input);
+  mm_jpegdec_test_free(&jpeg_obj.output);
+  return 0;
+}
+
+/** main:
+ *
+ *  Arguments:
+ *    @argc
+ *    @argv
+ *
+ *  Return:
+ *       0 or -ve values
+ *
+ *  Description:
+ *       main function
+ *
+ **/
+int main(int argc, char* argv[])
+{
+  jpeg_test_input_t dec_test_input;
+  int ret;
+
+  memset(&dec_test_input, 0, sizeof(dec_test_input));
+  ret = mm_jpegdec_test_get_input(argc, argv, &dec_test_input);
+
+  if (ret) {
+    return -1;
+  }
+
+  return decode_test(&dec_test_input);
+}
+
+
diff --git a/msmcobalt/QCamera2/stack/mm-lib2d-interface/Android.mk b/msmcobalt/QCamera2/stack/mm-lib2d-interface/Android.mk
new file mode 100644
index 0000000..696f04a
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-lib2d-interface/Android.mk
@@ -0,0 +1,39 @@
+OLD_LOCAL_PATH := $(LOCAL_PATH)
+LOCAL_PATH := $(call my-dir)
+
+include $(LOCAL_PATH)/../../../common.mk
+include $(CLEAR_VARS)
+
+LOCAL_32_BIT_ONLY := $(BOARD_QTI_CAMERA_32BIT_ONLY)
+LOCAL_CFLAGS+= -D_ANDROID_ -DQCAMERA_REDEFINE_LOG
+
+LOCAL_CFLAGS += -Wall -Wextra -Werror -Wno-unused-parameter
+
+LOCAL_C_INCLUDES+= $(kernel_includes)
+LOCAL_ADDITIONAL_DEPENDENCIES := $(common_deps)
+
+IMGLIB_HEADER_PATH := $(TARGET_OUT_INTERMEDIATES)/include/mm-camera/imglib
+
+LOCAL_C_INCLUDES += \
+    $(IMGLIB_HEADER_PATH) \
+    $(LOCAL_PATH)/inc \
+    $(LOCAL_PATH)/../common \
+    $(LOCAL_PATH)/../mm-camera-interface/inc \
+
+ifeq ($(strip $(TARGET_USES_ION)),true)
+    LOCAL_CFLAGS += -DUSE_ION
+endif
+
+
+LOCAL_SRC_FILES := \
+    src/mm_lib2d.c
+
+LOCAL_MODULE           := libmmlib2d_interface
+LOCAL_PRELINK_MODULE   := false
+LOCAL_SHARED_LIBRARIES := libdl libcutils liblog libmmcamera_interface
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_32_BIT_ONLY := $(BOARD_QTI_CAMERA_32BIT_ONLY)
+include $(BUILD_SHARED_LIBRARY)
+
+LOCAL_PATH := $(OLD_LOCAL_PATH)
diff --git a/msmcobalt/QCamera2/stack/mm-lib2d-interface/inc/mm_lib2d.h b/msmcobalt/QCamera2/stack/mm-lib2d-interface/inc/mm_lib2d.h
new file mode 100644
index 0000000..d1e69b4
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-lib2d-interface/inc/mm_lib2d.h
@@ -0,0 +1,209 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef MM_LIB2D_H_
+#define MM_LIB2D_H_
+
+#include "cam_types.h"
+#ifdef QCAMERA_REDEFINE_LOG
+#ifndef CAM_MODULE
+#define CAM_MODULE CAM_NO_MODULE
+#endif
+// Camera dependencies
+#include "mm_camera_dbg.h"
+#endif
+
+/** lib2d_error
+ * @MM_LIB2D_SUCCESS: Success
+ * @MM_LIB2D_ERR_GENERAL: General Error
+ * @MM_LIB2D_ERR_MEMORY: Insufficient memory error
+ * @MM_LIB2D_ERR_BAD_PARAM: Bad params error
+**/
+typedef enum lib2d_error_t {
+  MM_LIB2D_SUCCESS,
+  MM_LIB2D_ERR_GENERAL,
+  MM_LIB2D_ERR_MEMORY,
+  MM_LIB2D_ERR_BAD_PARAM,
+} lib2d_error;
+
+/** lib2d_mode
+ * @MM_LIB2D_SYNC_MODE: Synchronous mode
+ * @MM_LIB2D_ASYNC_MODE: Asynchronous mode
+**/
+typedef enum mm_lib2d_mode_t {
+  MM_LIB2D_SYNC_MODE,
+  MM_LIB2D_ASYNC_MODE,
+} lib2d_mode;
+
+/** mm_lib2d_buffer_type
+ * @MM_LIB2D_BUFFER_TYPE_RGB: RGB Buffer type
+ * @MM_LIB2D_BUFFER_TYPE_YUV: YUV buffer type
+**/
+typedef enum mm_lib2d_buffer_type_t {
+  MM_LIB2D_BUFFER_TYPE_RGB,
+  MM_LIB2D_BUFFER_TYPE_YUV,
+} mm_lib2d_buffer_type;
+
+/** mm_lib2d_rgb_buffer
+ * @fd: handle to the buffer memory
+ * @format: RGB color format
+ * @width: defines width in pixels
+ * @height: defines height in pixels
+ * @buffer: pointer to the RGB buffer
+ * @phys: gpu mapped physical address
+ * @stride: defines stride in bytes
+**/
+typedef struct mm_lib2d_rgb_buffer_t {
+  int32_t      fd;
+  cam_format_t format;
+  uint32_t     width;
+  uint32_t     height;
+  void        *buffer;
+  void        *phys;
+  int32_t      stride;
+} mm_lib2d_rgb_buffer;
+
+/** mm_lib2d_yuv_buffer
+ * @fd: handle to the buffer memory
+ * @format: YUV color format
+ * @width: defines width in pixels
+ * @height: defines height in pixels
+ * @plane0: holds the whole buffer if YUV format is not planar
+ * @phys0: gpu mapped physical address
+ * @stride0: stride in bytes
+ * @plane1: holds UV or VU plane for planar interleaved
+ * @phys2: gpu mapped physical address
+ * @stride1: stride in bytes
+ * @plane2: holds the 3. plane, ignored if YUV format is not planar
+ * @phys2: gpu mapped physical address
+ * @stride2: stride in bytes
+**/
+typedef struct mm_lib2d_yuv_buffer_t {
+  int32_t      fd;
+  cam_format_t format;
+  uint32_t     width;
+  uint32_t     height;
+  void        *plane0;
+  void        *phys0;
+  int32_t      stride0;
+  void        *plane1;
+  void        *phys1;
+  int32_t      stride1;
+  void        *plane2;
+  void        *phys2;
+  int32_t      stride2;
+} mm_lib2d_yuv_buffer;
+
+/** mm_lib2d_buffer
+ * @buffer_type: Buffer type. whether RGB or YUV
+ * @rgb_buffer: RGB buffer handle
+ * @yuv_buffer: YUV buffer handle
+**/
+typedef struct mm_lib2d_buffer_t {
+  mm_lib2d_buffer_type buffer_type;
+  union {
+    mm_lib2d_rgb_buffer rgb_buffer;
+    mm_lib2d_yuv_buffer yuv_buffer;
+  };
+} mm_lib2d_buffer;
+
+/** lib2d_client_cb
+ * @userdata: App userdata
+ * @jobid: job id
+**/
+typedef lib2d_error (*lib2d_client_cb) (void *userdata, int jobid);
+
+/**
+ * Function: mm_lib2d_init
+ *
+ * Description: Initialization function for Lib2D. src_format, dst_format
+ *     are hints to the underlying component to initialize.
+ *
+ * Input parameters:
+ *   mode - Mode (sync/async) in which App wants lib2d to run.
+ *   src_format - source surface format
+ *   dst_format - Destination surface format
+ *   my_obj - handle that will be returned on succesful Init. App has to
+ *       call other lib2d functions by passing this handle.
+ *
+ * Return values:
+ *   MM_LIB2D_SUCCESS
+ *   MM_LIB2D_ERR_MEMORY
+ *   MM_LIB2D_ERR_BAD_PARAM
+ *   MM_LIB2D_ERR_GENERAL
+ *
+ * Notes: none
+ **/
+lib2d_error mm_lib2d_init(lib2d_mode mode, cam_format_t src_format,
+  cam_format_t dst_format, void **lib2d_obj_handle);
+
+/**
+ * Function: mm_lib2d_deinit
+ *
+ * Description: De-Initialization function for Lib2D
+ *
+ * Input parameters:
+ *   lib2d_obj_handle - handle tto the lib2d object
+ *
+ * Return values:
+ *   MM_LIB2D_SUCCESS
+ *   MM_LIB2D_ERR_GENERAL
+ *
+ * Notes: none
+ **/
+lib2d_error mm_lib2d_deinit(void *lib2d_obj_handle);
+
+/**
+ * Function: mm_lib2d_start_job
+ *
+ * Description: Start executing the job
+ *
+ * Input parameters:
+ *   lib2d_obj_handle - handle tto the lib2d object
+ *   src_buffer - pointer to the source buffer
+ *   dst_buffer - pointer to the destination buffer
+ *   jobid - job id of this request
+ *   userdata - userdata that will be pass through callback function
+ *   cb - callback function that will be called on completion of this job
+ *   rotation - rotation to be applied
+ *
+ * Return values:
+ *   MM_LIB2D_SUCCESS
+ *   MM_LIB2D_ERR_MEMORY
+ *   MM_LIB2D_ERR_GENERAL
+ *
+ * Notes: none
+ **/
+lib2d_error mm_lib2d_start_job(void *lib2d_obj_handle,
+    mm_lib2d_buffer* src_buffer, mm_lib2d_buffer* dst_buffer,
+    int jobid, void *userdata, lib2d_client_cb cb, uint32_t rotation);
+
+#endif /* MM_LIB2D_H_ */
+
+
diff --git a/msmcobalt/QCamera2/stack/mm-lib2d-interface/src/mm_lib2d.c b/msmcobalt/QCamera2/stack/mm-lib2d-interface/src/mm_lib2d.c
new file mode 100644
index 0000000..bd322d2
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-lib2d-interface/src/mm_lib2d.c
@@ -0,0 +1,606 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// To remove
+#include <utils/Log.h>
+
+// System dependencies
+#include <errno.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <pthread.h>
+#include <sys/ioctl.h>
+#include <sys/prctl.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+
+// Camera dependencies
+#include "img_common.h"
+#include "img_comp.h"
+#include "img_comp_factory.h"
+#include "img_buffer.h"
+#include "lib2d.h"
+#include "mm_lib2d.h"
+#include "img_meta.h"
+
+/** lib2d_job_private_info
+ * @jobid: Job id of this process request
+ * @userdata: Client userdata that will be passed on callback
+ * @lib2d_client_cb: Application's callback function pointer
+ *     which will be called upon completion of current job.
+**/
+typedef struct lib2d_job_private_info_t {
+  int   jobid;
+  void *userdata;
+  lib2d_error (*lib2d_client_cb) (void *userdata, int jobid);
+} lib2d_job_private_info;
+
+/** img_lib_t
+ * @ptr: handle to imglib library
+ * @img_core_get_comp: function pointer for img_core_get_comp
+ * @img_wait_for_completion: function pointer for img_wait_for_completion
+**/
+typedef struct {
+  void *ptr;
+  int (*img_core_get_comp) (img_comp_role_t role, char *name,
+    img_core_ops_t *p_ops);
+  int (*img_wait_for_completion) (pthread_cond_t *p_cond,
+    pthread_mutex_t *p_mutex, int32_t ms);
+} img_lib_t;
+
+/** mm_lib2d_obj
+ * @core_ops: image core ops structure handle
+ * @comp: component structure handle
+ * @comp_mode: underlying component mode
+ * @lib2d_mode: lib2d mode requested by client
+ * @img_lib: imglib library, function ptrs handle
+ * @mutex: lib2d mutex used for synchronization
+ * @cond: librd cond used for synchronization
+**/
+typedef struct mm_lib2d_obj_t {
+  img_core_ops_t      core_ops;
+  img_component_ops_t comp;
+  img_comp_mode_t     comp_mode;
+  lib2d_mode          lib2d_mode;
+  img_lib_t           img_lib;
+  pthread_mutex_t     mutex;
+  pthread_cond_t      cond;
+} mm_lib2d_obj;
+
+
+/**
+ * Function: lib2d_event_handler
+ *
+ * Description: Event handler. All the component events
+ *     are received here.
+ *
+ * Input parameters:
+ *   p_appdata - lib2d test object
+ *   p_event - pointer to the event
+ *
+ * Return values:
+ *   IMG_SUCCESS
+ *   IMG_ERR_INVALID_INPUT
+ *
+ * Notes: none
+ **/
+int lib2d_event_handler(void* p_appdata, img_event_t *p_event)
+{
+  mm_lib2d_obj *lib2d_obj = (mm_lib2d_obj *)p_appdata;
+
+  if ((NULL == p_event) || (NULL == p_appdata)) {
+    LOGE("invalid event");
+    return IMG_ERR_INVALID_INPUT;
+  }
+
+  LOGD("type %d", p_event->type);
+
+  switch (p_event->type) {
+    case QIMG_EVT_DONE:
+      pthread_cond_signal(&lib2d_obj->cond);
+      break;
+    default:;
+  }
+  return IMG_SUCCESS;
+}
+
+/**
+ * Function: lib2d_callback_handler
+ *
+ * Description: Callback handler. Registered with Component
+ *     on IMG_COMP_INIT. Will be called when processing
+ *     of current request is completed. If component running in
+ *     async mode, this is where client will know the execution
+ *     is finished for in, out frames.
+ *
+ * Input parameters:
+ *   p_appdata - lib2d test object
+ *   p_in_frame - pointer to input frame
+ *   p_out_frame - pointer to output frame
+ *   p_meta - pointer to meta data
+ *
+ * Return values:
+ *   IMG_SUCCESS
+ *   IMG_ERR_GENERAL
+ *
+ * Notes: none
+ **/
+int lib2d_callback_handler(void *userdata, img_frame_t *p_in_frame,
+  img_frame_t *p_out_frame, img_meta_t *p_meta)
+{
+  mm_lib2d_obj *lib2d_obj = (mm_lib2d_obj *)userdata;
+  lib2d_job_private_info *job_info = NULL;
+
+  if (NULL == userdata) {
+    LOGE("invalid event");
+    return IMG_ERR_INVALID_INPUT;
+  }
+
+  // assert(p_in_frame->private_data == p_out_frame->private_data);
+
+  job_info = (lib2d_job_private_info *)p_in_frame->private_data;
+  if (job_info->lib2d_client_cb != NULL) {
+    job_info->lib2d_client_cb(job_info->userdata, job_info->jobid);
+  }
+
+  free(p_in_frame->private_data);
+  free(p_in_frame);
+  free(p_out_frame);
+  free(p_meta);
+
+  return IMG_SUCCESS;
+}
+
+/**
+ * Function: lib2d_fill_img_frame
+ *
+ * Description: Setup img_frame_t for given buffer
+ *
+ * Input parameters:
+ *   p_frame - pointer to img_frame_t that needs to be setup
+ *   lib2d_buffer - pointer to input buffer
+ *   jobid - job id
+ *
+ * Return values:
+ *   MM_LIB2D_SUCCESS
+ *   MM_LIB2D_ERR_GENERAL
+ *
+ * Notes: none
+ **/
+lib2d_error lib2d_fill_img_frame(img_frame_t *p_frame,
+  mm_lib2d_buffer* lib2d_buffer, int jobid)
+{
+  // use job id for now
+  p_frame->frame_cnt = jobid;
+  p_frame->idx       = jobid;
+  p_frame->frame_id  = jobid;
+
+  if (lib2d_buffer->buffer_type == MM_LIB2D_BUFFER_TYPE_RGB) {
+    mm_lib2d_rgb_buffer *rgb_buffer = &lib2d_buffer->rgb_buffer;
+
+    p_frame->info.num_planes = 1;
+    p_frame->info.width      = rgb_buffer->width;
+    p_frame->info.height     = rgb_buffer->height;
+
+    p_frame->frame[0].plane_cnt = 1;
+    p_frame->frame[0].plane[0].plane_type = PLANE_ARGB;
+    p_frame->frame[0].plane[0].addr       = rgb_buffer->buffer;
+    p_frame->frame[0].plane[0].stride     = rgb_buffer->stride;
+    p_frame->frame[0].plane[0].length     = (rgb_buffer->stride *
+                                             rgb_buffer->height);
+    p_frame->frame[0].plane[0].fd         = rgb_buffer->fd;
+    p_frame->frame[0].plane[0].height     = rgb_buffer->height;
+    p_frame->frame[0].plane[0].width      = rgb_buffer->width;
+    p_frame->frame[0].plane[0].offset     = 0;
+    p_frame->frame[0].plane[0].scanline   = rgb_buffer->height;
+  } else if (lib2d_buffer->buffer_type == MM_LIB2D_BUFFER_TYPE_YUV) {
+    mm_lib2d_yuv_buffer *yuv_buffer = &lib2d_buffer->yuv_buffer;
+
+    p_frame->info.num_planes = 2;
+    p_frame->info.width      = yuv_buffer->width;
+    p_frame->info.height     = yuv_buffer->height;
+
+    p_frame->frame[0].plane_cnt = 2;
+    p_frame->frame[0].plane[0].plane_type = PLANE_Y;
+    p_frame->frame[0].plane[0].addr       = yuv_buffer->plane0;
+    p_frame->frame[0].plane[0].stride     = yuv_buffer->stride0;
+    p_frame->frame[0].plane[0].length     = (yuv_buffer->stride0 *
+                                             yuv_buffer->height);
+    p_frame->frame[0].plane[0].fd         = yuv_buffer->fd;
+    p_frame->frame[0].plane[0].height     = yuv_buffer->height;
+    p_frame->frame[0].plane[0].width      = yuv_buffer->width;
+    p_frame->frame[0].plane[0].offset     = 0;
+    p_frame->frame[0].plane[0].scanline   = yuv_buffer->height;
+
+    if (yuv_buffer->format == CAM_FORMAT_YUV_420_NV12) {
+      p_frame->frame[0].plane[1].plane_type = PLANE_CB_CR;
+    } else if(yuv_buffer->format == CAM_FORMAT_YUV_420_NV21) {
+      p_frame->frame[0].plane[1].plane_type = PLANE_CR_CB;
+    }
+    p_frame->frame[0].plane[1].addr       = yuv_buffer->plane1;
+    p_frame->frame[0].plane[1].stride     = yuv_buffer->stride1;
+    p_frame->frame[0].plane[1].length     = (yuv_buffer->stride1 *
+                                             yuv_buffer->height / 2);
+    p_frame->frame[0].plane[1].fd         = yuv_buffer->fd;
+    p_frame->frame[0].plane[1].height     = yuv_buffer->height;
+    p_frame->frame[0].plane[1].width      = yuv_buffer->width;
+    p_frame->frame[0].plane[1].offset     = 0;
+    p_frame->frame[0].plane[1].scanline   = yuv_buffer->height;
+  } else {
+    return MM_LIB2D_ERR_GENERAL;
+  }
+
+  return MM_LIB2D_SUCCESS;
+}
+
+/**
+ * Function: mm_lib2d_init
+ *
+ * Description: Initialization function for Lib2D. src_format, dst_format
+ *     are hints to the underlying component to initialize.
+ *
+ * Input parameters:
+ *   mode - Mode (sync/async) in which App wants lib2d to run.
+ *   src_format - source surface format
+ *   dst_format - Destination surface format
+ *   my_obj - handle that will be returned on succesful Init. App has to
+ *       call other lib2d functions by passing this handle.
+ *
+ * Return values:
+ *   MM_LIB2D_SUCCESS
+ *   MM_LIB2D_ERR_MEMORY
+ *   MM_LIB2D_ERR_BAD_PARAM
+ *   MM_LIB2D_ERR_GENERAL
+ *
+ * Notes: none
+ **/
+
+lib2d_error mm_lib2d_init(lib2d_mode mode, cam_format_t src_format,
+  cam_format_t dst_format, void **my_obj)
+{
+  int32_t              rc         = IMG_SUCCESS;
+  mm_lib2d_obj        *lib2d_obj  = NULL;
+  img_core_ops_t      *p_core_ops = NULL;
+  img_component_ops_t *p_comp     = NULL;
+
+  if (my_obj == NULL) {
+    return MM_LIB2D_ERR_BAD_PARAM;
+  }
+
+  // validate src_format, dst_format to check whether we support these.
+  // Currently support NV21 to ARGB conversions only. Others not tested.
+  if ((src_format != CAM_FORMAT_YUV_420_NV21) ||
+    (dst_format != CAM_FORMAT_8888_ARGB)) {
+    LOGE("Formats conversion from %d to %d not supported",
+        src_format, dst_format);
+  }
+
+  lib2d_obj = malloc(sizeof(mm_lib2d_obj));
+  if (lib2d_obj == NULL) {
+    return MM_LIB2D_ERR_MEMORY;
+  }
+
+  // Open libmmcamera_imglib
+  lib2d_obj->img_lib.ptr = dlopen("libmmcamera_imglib.so", RTLD_NOW);
+  if (!lib2d_obj->img_lib.ptr) {
+    LOGE("ERROR: couldn't dlopen libmmcamera_imglib.so: %s",
+       dlerror());
+    goto FREE_LIB2D_OBJ;
+  }
+
+  /* Get function pointer for functions supported by C2D */
+  *(void **)&lib2d_obj->img_lib.img_core_get_comp =
+      dlsym(lib2d_obj->img_lib.ptr, "img_core_get_comp");
+  *(void **)&lib2d_obj->img_lib.img_wait_for_completion =
+      dlsym(lib2d_obj->img_lib.ptr, "img_wait_for_completion");
+
+  /* Validate function pointers */
+  if ((lib2d_obj->img_lib.img_core_get_comp == NULL) ||
+    (lib2d_obj->img_lib.img_wait_for_completion == NULL)) {
+    LOGE(" ERROR mapping symbols from libc2d2.so");
+    goto FREE_LIB2D_OBJ;
+  }
+
+  p_core_ops = &lib2d_obj->core_ops;
+  p_comp     = &lib2d_obj->comp;
+
+  pthread_mutex_init(&lib2d_obj->mutex, NULL);
+  pthread_cond_init(&lib2d_obj->cond, NULL);
+
+  rc = lib2d_obj->img_lib.img_core_get_comp(IMG_COMP_LIB2D,
+    "qti.lib2d", p_core_ops);
+  if (rc != IMG_SUCCESS) {
+    LOGE("rc %d", rc);
+    goto FREE_LIB2D_OBJ;
+  }
+
+  rc = IMG_COMP_LOAD(p_core_ops, NULL);
+  if (rc != IMG_SUCCESS) {
+    LOGE("rc %d", rc);
+    goto FREE_LIB2D_OBJ;
+  }
+
+  rc = IMG_COMP_CREATE(p_core_ops, p_comp);
+  if (rc != IMG_SUCCESS) {
+    LOGE("rc %d", rc);
+    goto COMP_UNLOAD;
+  }
+
+  rc = IMG_COMP_INIT(p_comp, (void *)lib2d_obj, lib2d_callback_handler);
+  if (rc != IMG_SUCCESS) {
+    LOGE("rc %d", rc);
+    goto COMP_UNLOAD;
+  }
+
+  rc = IMG_COMP_SET_CB(p_comp, lib2d_event_handler);
+  if (rc != IMG_SUCCESS) {
+    LOGE("rc %d", rc);
+    goto COMP_DEINIT;
+  }
+
+  lib2d_obj->lib2d_mode = mode;
+  img_comp_mode_t comp_mode;
+  if (lib2d_obj->lib2d_mode == MM_LIB2D_SYNC_MODE) {
+    comp_mode = IMG_SYNC_MODE;
+  } else {
+    comp_mode = IMG_ASYNC_MODE;
+  }
+
+  // Set source format
+  rc = IMG_COMP_SET_PARAM(p_comp, QLIB2D_SOURCE_FORMAT, (void *)&src_format);
+  if (rc != IMG_SUCCESS) {
+    LOGE("rc %d", rc);
+    goto COMP_DEINIT;
+  }
+
+  // Set destination format
+  rc = IMG_COMP_SET_PARAM(p_comp, QLIB2D_DESTINATION_FORMAT,
+    (void *)&dst_format);
+  if (rc != IMG_SUCCESS) {
+    LOGE("rc %d", rc);
+    goto COMP_DEINIT;
+  }
+
+  // Try setting the required mode.
+  rc = IMG_COMP_SET_PARAM(p_comp, QIMG_PARAM_MODE, (void *)&comp_mode);
+  if (rc != IMG_SUCCESS) {
+    LOGE("rc %d", rc);
+    goto COMP_DEINIT;
+  }
+
+  // Get the mode to make sure whether the component is really running
+  // in the mode what we set.
+  rc = IMG_COMP_GET_PARAM(p_comp, QIMG_PARAM_MODE,
+    (void *)&lib2d_obj->comp_mode);
+  if (rc != IMG_SUCCESS) {
+    LOGE("rc %d", rc);
+    goto COMP_DEINIT;
+  }
+
+  if (comp_mode != lib2d_obj->comp_mode) {
+    LOGD("Component is running in %d mode",
+      lib2d_obj->comp_mode);
+  }
+
+  *my_obj = (void *)lib2d_obj;
+
+  return MM_LIB2D_SUCCESS;
+
+COMP_DEINIT :
+  rc = IMG_COMP_DEINIT(p_comp);
+  if (rc != IMG_SUCCESS) {
+    LOGE("rc %d", rc);
+    return MM_LIB2D_ERR_GENERAL;
+  }
+
+COMP_UNLOAD :
+  rc = IMG_COMP_UNLOAD(p_core_ops);
+  if (rc != IMG_SUCCESS) {
+    LOGE("rc %d", rc);
+    return MM_LIB2D_ERR_GENERAL;
+  }
+
+FREE_LIB2D_OBJ :
+  free(lib2d_obj);
+  return MM_LIB2D_ERR_GENERAL;
+}
+
+/**
+ * Function: mm_lib2d_deinit
+ *
+ * Description: De-Initialization function for Lib2D
+ *
+ * Input parameters:
+ *   lib2d_obj_handle - handle tto the lib2d object
+ *
+ * Return values:
+ *   MM_LIB2D_SUCCESS
+ *   MM_LIB2D_ERR_GENERAL
+ *
+ * Notes: none
+ **/
+lib2d_error mm_lib2d_deinit(void *lib2d_obj_handle)
+{
+  mm_lib2d_obj        *lib2d_obj  = (mm_lib2d_obj *)lib2d_obj_handle;
+  int                  rc         = IMG_SUCCESS;
+  img_core_ops_t      *p_core_ops = &lib2d_obj->core_ops;
+  img_component_ops_t *p_comp     = &lib2d_obj->comp;
+
+  rc = IMG_COMP_DEINIT(p_comp);
+  if (rc != IMG_SUCCESS) {
+    LOGE("rc %d", rc);
+    return MM_LIB2D_ERR_GENERAL;
+  }
+
+  rc = IMG_COMP_UNLOAD(p_core_ops);
+  if (rc != IMG_SUCCESS) {
+    LOGE("rc %d", rc);
+    return MM_LIB2D_ERR_GENERAL;
+  }
+
+  dlclose(lib2d_obj->img_lib.ptr);
+  free(lib2d_obj);
+
+  return MM_LIB2D_SUCCESS;
+}
+
+/**
+ * Function: mm_lib2d_start_job
+ *
+ * Description: Start executing the job
+ *
+ * Input parameters:
+ *   lib2d_obj_handle - handle tto the lib2d object
+ *   src_buffer - pointer to the source buffer
+ *   dst_buffer - pointer to the destination buffer
+ *   jobid - job id of this request
+ *   userdata - userdata that will be pass through callback function
+ *   cb - callback function that will be called on completion of this job
+ *   rotation - rotation to be applied
+ *
+ * Return values:
+ *   MM_LIB2D_SUCCESS
+ *   MM_LIB2D_ERR_MEMORY
+ *   MM_LIB2D_ERR_GENERAL
+ *
+ * Notes: none
+ **/
+lib2d_error mm_lib2d_start_job(void *lib2d_obj_handle,
+  mm_lib2d_buffer* src_buffer, mm_lib2d_buffer* dst_buffer,
+  int jobid, void *userdata, lib2d_client_cb cb, uint32_t rotation)
+{
+  mm_lib2d_obj        *lib2d_obj  = (mm_lib2d_obj *)lib2d_obj_handle;
+  int                  rc         = IMG_SUCCESS;
+  img_core_ops_t      *p_core_ops = &lib2d_obj->core_ops;
+  img_component_ops_t *p_comp     = &lib2d_obj->comp;
+
+  img_frame_t *p_in_frame = malloc(sizeof(img_frame_t));
+  if (p_in_frame == NULL) {
+    return MM_LIB2D_ERR_MEMORY;
+  }
+
+  img_frame_t *p_out_frame = malloc(sizeof(img_frame_t));
+  if (p_out_frame == NULL) {
+    free(p_in_frame);
+    return MM_LIB2D_ERR_MEMORY;
+  }
+
+  img_meta_t *p_meta = malloc(sizeof(img_meta_t));
+  if (p_meta == NULL) {
+    free(p_in_frame);
+    free(p_out_frame);
+    return MM_LIB2D_ERR_MEMORY;
+  }
+
+  lib2d_job_private_info *p_job_info = malloc(sizeof(lib2d_job_private_info));
+  if (p_out_frame == NULL) {
+    free(p_in_frame);
+    free(p_out_frame);
+    free(p_meta);
+    return MM_LIB2D_ERR_MEMORY;
+  }
+
+  memset(p_in_frame,  0x0, sizeof(img_frame_t));
+  memset(p_out_frame, 0x0, sizeof(img_frame_t));
+  memset(p_meta, 0x0, sizeof(img_meta_t));
+  memset(p_job_info,  0x0, sizeof(lib2d_job_private_info));
+
+  // Fill up job info private data structure that can be used in callback to
+  // inform back to the client.
+  p_job_info->jobid           = jobid;
+  p_job_info->userdata        = userdata;
+  p_job_info->lib2d_client_cb = cb;
+
+  p_in_frame->private_data  = (void *)p_job_info;
+  p_out_frame->private_data = (void *)p_job_info;
+
+  // convert the input info into component understandble data structures
+
+  // Prepare Input, output frames
+  lib2d_fill_img_frame(p_in_frame, src_buffer, jobid);
+  lib2d_fill_img_frame(p_out_frame, dst_buffer, jobid);
+
+  p_meta->frame_id = jobid;
+  p_meta->rotation.device_rotation = (int32_t)rotation;
+  p_meta->rotation.frame_rotation = (int32_t)rotation;
+
+  // call set_param to set the source, destination formats
+
+  rc = IMG_COMP_Q_BUF(p_comp, p_in_frame, IMG_IN);
+  if (rc != IMG_SUCCESS) {
+    LOGE("rc %d", rc);
+    goto ERROR;
+  }
+
+  rc = IMG_COMP_Q_BUF(p_comp, p_out_frame, IMG_OUT);
+  if (rc != IMG_SUCCESS) {
+    LOGE("rc %d", rc);
+    goto ERROR;
+  }
+
+  rc = IMG_COMP_Q_META_BUF(p_comp, p_meta);
+  if (rc != IMG_SUCCESS) {
+    LOGE("rc %d", rc);
+    goto ERROR;
+  }
+
+  rc = IMG_COMP_START(p_comp, NULL);
+  if (rc != IMG_SUCCESS) {
+    LOGE("rc %d", rc);
+    goto ERROR;
+  }
+
+  if (lib2d_obj->lib2d_mode == MM_LIB2D_SYNC_MODE) {
+    if (lib2d_obj->comp_mode == IMG_ASYNC_MODE) {
+      LOGD("before wait rc %d", rc);
+      rc = lib2d_obj->img_lib.img_wait_for_completion(&lib2d_obj->cond,
+        &lib2d_obj->mutex, 10000);
+      if (rc != IMG_SUCCESS) {
+        LOGE("rc %d", rc);
+        goto ERROR;
+      }
+    }
+  }
+
+  rc = IMG_COMP_ABORT(p_comp, NULL);
+  if (IMG_ERROR(rc)) {
+    LOGE("comp abort failed %d", rc);
+    return rc;
+  }
+
+  return MM_LIB2D_SUCCESS;
+ERROR:
+  free(p_in_frame);
+  free(p_out_frame);
+  free(p_meta);
+  free(p_job_info);
+
+  return MM_LIB2D_ERR_GENERAL;
+}
+
diff --git a/msmcobalt/QCamera2/stack/mm-lib2d-interface/test/Android.mk b/msmcobalt/QCamera2/stack/mm-lib2d-interface/test/Android.mk
new file mode 100644
index 0000000..63cd5b7
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-lib2d-interface/test/Android.mk
@@ -0,0 +1,37 @@
+#lib2d sample test
+OLD_LOCAL_PATH := $(LOCAL_PATH)
+MM_LIB2D_TEST_PATH := $(call my-dir)
+
+include $(LOCAL_PATH)/../../common.mk
+include $(CLEAR_VARS)
+LOCAL_PATH := $(MM_LIB2D_TEST_PATH)
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_CFLAGS := -DCAMERA_ION_HEAP_ID=ION_IOMMU_HEAP_ID
+LOCAL_CFLAGS += -Wall -Wextra -Werror -Wno-unused-parameter
+LOCAL_CFLAGS += -D_ANDROID_
+
+ifeq ($(strip $(TARGET_USES_ION)),true)
+LOCAL_CFLAGS += -DUSE_ION
+endif
+
+IMGLIB_HEADER_PATH := $(TARGET_OUT_INTERMEDIATES)/include/mm-camera/imglib
+
+LOCAL_C_INCLUDES += \
+    $(IMGLIB_HEADER_PATH) \
+    $(LOCAL_PATH)/../../common \
+    $(LOCAL_PATH)/../inc
+
+LOCAL_C_INCLUDES+= $(kernel_includes)
+LOCAL_ADDITIONAL_DEPENDENCIES := $(common_deps)
+
+LOCAL_SRC_FILES := mm_lib2d_test.c
+
+LOCAL_32_BIT_ONLY := $(BOARD_QTI_CAMERA_32BIT_ONLY)
+LOCAL_MODULE           := mm-lib2d-interface-test
+LOCAL_PRELINK_MODULE   := false
+LOCAL_SHARED_LIBRARIES := libcutils libdl libmmlib2d_interface
+
+include $(BUILD_EXECUTABLE)
+
+LOCAL_PATH := $(OLD_LOCAL_PATH)
diff --git a/msmcobalt/QCamera2/stack/mm-lib2d-interface/test/mm_lib2d_test.c b/msmcobalt/QCamera2/stack/mm-lib2d-interface/test/mm_lib2d_test.c
new file mode 100644
index 0000000..908a4a6
--- /dev/null
+++ b/msmcobalt/QCamera2/stack/mm-lib2d-interface/test/mm_lib2d_test.c
@@ -0,0 +1,543 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+// System dependencies
+#include <dlfcn.h>
+#include <stdbool.h>
+#include <stdlib.h>
+#include <sys/time.h>
+
+// Camera dependencies
+#include "img_buffer.h"
+#include "mm_lib2d.h"
+
+
+#define ENABLE_OUTPUT_DUMP 1
+#define ALIGN4K 4032
+#define ALIGN(a, b) (((a) + (b)) & ~(b))
+
+
+/** DUMP_TO_FILE:
+ *  @filename: file name
+ *  @p_addr: address of the buffer
+ *  @len: buffer length
+ *
+ *  dump the image to the file
+ **/
+#define DUMP_TO_FILE(filename, p_addr, len) ({ \
+  size_t rc = 0; \
+  FILE *fp = fopen(filename, "w+"); \
+  if (fp) { \
+    rc = fwrite(p_addr, 1, len, fp); \
+    printf(" ] written size %zu \n",  __LINE__, len); \
+    fclose(fp); \
+  } else { \
+    printf(" ] open %s failed \n",  __LINE__, filename); \
+  } \
+})
+
+/** DUMP_TO_FILE2:
+ *  @filename: file name
+ *  @p_addr: address of the buffer
+ *  @len: buffer length
+ *
+ *  dump the image to the file if the memory is non-contiguous
+ **/
+#define DUMP_TO_FILE2(filename, p_addr1, len1, p_addr2, len2) ({ \
+  size_t rc = 0; \
+  FILE *fp = fopen(filename, "w+"); \
+  if (fp) { \
+    rc = fwrite(p_addr1, 1, len1, fp); \
+    rc = fwrite(p_addr2, 1, len2, fp); \
+    printf(" ] written %zu %zu \n",  __LINE__, len1, len2); \
+    fclose(fp); \
+  } else { \
+    printf(" ] open %s failed \n",  __LINE__, filename); \
+  } \
+})
+
+/** img_lib_buffert
+ * @ptr: handle to the imglib library
+ * @img_buffer_get: function pointer to img_buffer_get
+ * @img_buffer_release: function pointer to img_buffer_release
+ * @img_buffer_cacheops: function pointer to img_buffer_cacheops
+**/
+typedef struct {
+  void *ptr;
+  int (*img_buffer_get)(img_buf_type_t type, int heapid, int8_t cached, int length,
+    img_mem_handle_t *p_handle);
+  int (*img_buffer_release)(img_mem_handle_t *p_handle);
+  int (*img_buffer_cacheops)(img_mem_handle_t *p_handle, img_cache_ops_t ops,
+  img_mem_alloc_type_t mem_alloc_type);
+} img_lib_buffert;
+
+/** input_yuv_data
+ * @filename: input test filename
+ * @format: format of the input yuv frame
+ * @wdith: wdith of the input yuv frame
+ * @height: height of the input yuv frame
+ * @stride: stride of the input yuv frame
+ * @offset: offset to the yuv data in the input file
+**/
+typedef struct input_yuv_data_t {
+  char filename[512];
+  cam_format_t format;
+  int32_t wdith;
+  int32_t height;
+  int32_t stride;
+  int32_t offset;
+} input_yuv_data;
+
+input_yuv_data input_nv21[] = {
+  {"sample0_768x512.yuv",                             CAM_FORMAT_YUV_420_NV21, 768,  512,  768,  0},
+  {"sample1_3200x2400.yuv",                           CAM_FORMAT_YUV_420_NV21, 3200, 2400, 3200, 0},
+  {"sample2_1920x1080.yuv",                           CAM_FORMAT_YUV_420_NV21, 1920, 1080, 1920, 0},
+  {"sample3_3200x2400.yuv",                           CAM_FORMAT_YUV_420_NV21, 3200, 2400, 3200, 0},
+  {"sample4_4208x3120.yuv",                           CAM_FORMAT_YUV_420_NV21, 4208, 3120, 4208, 0},
+  {"sample5_1984x2592.yuv",                           CAM_FORMAT_YUV_420_NV21, 1984, 2592, 1984, 0},
+  {"sample6_4000_3000.yuv",                           CAM_FORMAT_YUV_420_NV21, 4000, 3000, 4000, 0},
+  {"sample7_3200_2400.yuv",                           CAM_FORMAT_YUV_420_NV21, 3200, 2400, 3200, 0},
+  {"sample8_3008_4000.yuv",                           CAM_FORMAT_YUV_420_NV21, 3008, 4000, 3008, 0},
+  {"sample9_5312x2988.yuv",                           CAM_FORMAT_YUV_420_NV21, 5312, 2988, 5312, 0},
+  {"sample10_4128x3096.yuv",                          CAM_FORMAT_YUV_420_NV21, 4128, 3096, 4128, 0},
+  {"sample11_4208x3120.yuv",                          CAM_FORMAT_YUV_420_NV21, 4208, 3120, 4208, 0},
+  {"sample12_3200x2400.yuv",                          CAM_FORMAT_YUV_420_NV21, 3200, 2400, 3200, 0},
+  {"sample13_width_1080_height_1440_stride_1088.yuv", CAM_FORMAT_YUV_420_NV21, 1080, 1440, 1088, 0},
+  {"sample14_width_1080_height_1920_stride_1088.yuv", CAM_FORMAT_YUV_420_NV21, 1080, 1920, 1088, 0},
+  {"sample15_width_1944_height_2592_stride_1984.yuv", CAM_FORMAT_YUV_420_NV21, 1944, 2592, 1984, 0},
+  {"sample16_width_3000_height_4000_stride_3008.yuv", CAM_FORMAT_YUV_420_NV21, 3000, 4000, 3008, 0},
+  {"sample17_width_3120_height_4208_stride_3136.yuv", CAM_FORMAT_YUV_420_NV21, 3120, 4208, 3136, 0},
+  {"sample18_width_3200_height_2400_stride_3200.yuv", CAM_FORMAT_YUV_420_NV21, 3200, 2400, 3200, 0},
+  {"sample19_width_1944_height_2592_stride_1984.yuv", CAM_FORMAT_YUV_420_NV21, 1944, 2592, 1984, 0},
+};
+
+// assuming buffer format is always ARGB
+void lib2d_dump_tga(void *addr, cam_format_t format, int width,
+  int height, int stride, char *fname)
+{
+  int i, j;
+  FILE *f;
+  unsigned char *pb = (unsigned char *)addr;
+  uint32_t *pd = (uint32_t *)addr;
+  int bpp = 32;
+
+  f = fopen(fname, "wb");
+  if (f) {
+    // header
+    fprintf(f, "%c%c%c%c", 0, 0, 2, 0);
+    fprintf(f, "%c%c%c%c", 0, 0, 0, 0);
+    fprintf(f, "%c%c%c%c", 0, 0, 0, 0);
+    fprintf(f, "%c%c%c%c", width & 0xff, width >> 8, height & 0xff, height >> 8);
+    fprintf(f, "%c%c", bpp, 32);
+
+    for (i = 0; i < height; i++) {
+      for (j = 0; j < width; j++) {
+        fprintf(f, "%c%c%c%c",
+          pd[(i*stride>>2)+j] & 0xff,           // b
+          (pd[(i*stride>>2)+j] >> 8) & 0xff,    // g
+          (pd[(i*stride>>2)+j] >> 16) & 0xff,   // r
+          (pd[(i*stride>>2)+j] >> 24) & 0xff);  // a
+      }
+    }
+    fclose(f);
+  }
+}
+
+/**
+ * Function: lib2d_test_client_cb
+ *
+ * Description: Callback that is called on completion of requested job.
+ *
+ * Input parameters:
+ *   userdata - App userdata
+ *   jobid - job id that is finished execution
+ *
+ * Return values:
+ *   MM_LIB2D_SUCCESS
+ *   MM_LIB2D_ERR_GENERAL
+ *
+ * Notes: none
+ **/
+lib2d_error lib2d_test_client_cb(void *userdata, int jobid)
+{
+  printf("%s %d, jobid=%d \n",  __LINE__, jobid);
+  return MM_LIB2D_SUCCESS;
+}
+
+/**
+ * Function: lib2d_test_load_input_yuv_data
+ *
+ * Description: Loads yuv data from input file.
+ *
+ * Input parameters:
+ *   fileName - input yuv filename
+ *   offset - offset to the yuv data in the input file
+ *   y_size - y plane size in input yuv file
+ *   crcb_size - crcb plane size in input yuv file
+ *   crcb_offset - crcb offset in the memory at
+ *       which crcb data need to be loaded
+ *   addr - y plane memory address where y plane
+ *       data need to be loaded.
+ *
+ * Return values:
+ *   MM_LIB2D_SUCCESS
+ *   MM_LIB2D_ERR_GENERAL
+ *
+ * Notes: none
+ **/
+lib2d_error lib2d_test_load_input_yuv_data(char *fileName, int offset,
+    int32_t y_size, int32_t crcb_size, int32_t crcb_offset,
+    void *addr)
+{
+  size_t i;
+  FILE  *fp       = 0;
+  void  *y_ptr    = addr;
+  void  *crcb_ptr = (uint8_t *)addr + crcb_offset;
+
+  printf("y_ptr=%p, crcb_ptr=%p \n", y_ptr, crcb_ptr);
+
+  fp = fopen(fileName, "rb");
+  if(fp) {
+    if(offset) {
+      fseek(fp, offset, SEEK_SET);
+    }
+    i = fread(y_ptr, 1, y_size, fp);
+    i = fread(crcb_ptr, 1, crcb_size, fp);
+
+    fclose( fp );
+  } else {
+    printf("failed to open file %s \n", fileName);
+    return MM_LIB2D_ERR_GENERAL;
+  }
+
+  return MM_LIB2D_SUCCESS;
+}
+
+/**
+ * Function: lib2d_test_load_input_yuv_data
+ *
+ * Description: Loads yuv data from input file.
+ *
+ * Input parameters:
+ *   fileName - input yuv filename
+ *   offset - offset to the yuv data in the input file
+ *   input_yuv_stride - y plane stride in input yuv file
+ *   y_plane_stride - y plane stride in buffer memory
+ *   height - height of yuv image
+ *   crcb_offset - crcb offset in the memory at
+ *       which crcb data need to be loaded
+ *   addr - y plane memory address where y plane
+ *       data need to be loaded.
+ *
+ * Return values:
+ *   MM_LIB2D_SUCCESS
+ *   MM_LIB2D_ERR_GENERAL
+ *
+ * Notes: none
+ **/
+lib2d_error lib2d_test_load_input_yuv_data_linebyline(char *fileName,
+    int offset, int32_t input_yuv_stride, int32_t y_plane_stride,
+    int32_t height, int32_t crcb_offset, void *addr)
+{
+  size_t i;
+  FILE  *fp       = 0;
+  void  *y_ptr    = addr;
+  void  *crcb_ptr = (uint8_t *)addr + crcb_offset;
+
+  printf("y_ptr=%p, crcb_ptr=%p \n", y_ptr, crcb_ptr);
+
+  fp = fopen(fileName, "rb");
+  if(fp) {
+    if(offset) {
+      fseek(fp, offset, SEEK_SET);
+    }
+    if (input_yuv_stride == y_plane_stride) {
+      //load y plane
+      i = fread(y_ptr, 1, (input_yuv_stride * height), fp);
+      // load UV plane
+      i = fread(crcb_ptr, 1, (input_yuv_stride * height / 2), fp);
+    } else {
+      int line = 0;
+      // load Y plane
+      for (line = 0;line < height; line++) {
+        i = fread(y_ptr, 1, input_yuv_stride, fp);
+        y_ptr = (void *)((uint8_t *)y_ptr + y_plane_stride);
+      }
+      for (line = 0;line < height; line++) {
+        i = fread(crcb_ptr, 1, input_yuv_stride, fp);
+        crcb_ptr = (void *)((uint8_t *)crcb_ptr + y_plane_stride);
+      }
+    }
+
+    fclose( fp );
+  } else {
+    printf("failed to open file %s \n", fileName);
+    return MM_LIB2D_ERR_GENERAL;
+  }
+
+  return MM_LIB2D_SUCCESS;
+}
+
+/**
+ * Function: main
+ *
+ * Description: main function for execution
+ *
+ * Input parameters:
+ *   argc - no.of input arguments
+ *   argv - list of arguments
+ *
+ * Return values:
+ *   0 on success
+ *   -1 on failure
+ *
+ * Notes: none
+ **/
+int main(int32_t argc, const char * argv[])
+{
+  void            *lib2d_handle       = NULL;
+  lib2d_error      lib2d_err          = MM_LIB2D_SUCCESS;
+  mm_lib2d_buffer  src_buffer         = {0};
+  mm_lib2d_buffer  dst_buffer         = {0};
+  int8_t           ret                = IMG_SUCCESS;
+  int32_t          width              = 0;
+  int32_t          height             = 0;
+  int32_t          input_yuv_stride   = 0;
+  int32_t          stride             = 0;
+  int32_t          y_plane_stride     = 0;
+  int32_t          crcb_plane_stride  = 0;
+  int32_t          y_plane_size       = 0;
+  int32_t          y_plane_size_align = 0;
+  int32_t          crcb_plane_size    = 0;
+  int32_t          yuv_size           = 0;
+  int32_t          rgb_size           = 0;
+  img_mem_handle_t m_yuv_memHandle    = { 0 };
+  img_mem_handle_t m_rgb_memHandle    = { 0 };
+  char             filename_in[512]   = { 0 };
+  char             filename_out[512]  = { 0 };
+  char             filename_raw[512]  = { 0 };
+  int32_t          offset             = 0;
+  unsigned int     total_tests        = 1;
+  cam_format_t     format             = CAM_FORMAT_YUV_420_NV21;
+  unsigned int     index;
+  const char      *filename;
+
+  // Open Imglib library and get the function pointers for
+  // buffer allocation, free, cacheops
+  img_lib_buffert  img_lib;
+  img_lib.ptr = dlopen("libmmcamera_imglib.so", RTLD_NOW);
+  if (!img_lib.ptr) {
+    printf("%s ERROR: couldn't dlopen libmmcamera_imglib.so: %s",
+       dlerror());
+    return -1;
+  }
+
+  /* Get function pointer for functions to allocate ion memory */
+  *(void **)&img_lib.img_buffer_get =
+      dlsym(img_lib.ptr, "img_buffer_get");
+  *(void **)&img_lib.img_buffer_release =
+      dlsym(img_lib.ptr, "img_buffer_release");
+  *(void **)&img_lib.img_buffer_cacheops =
+      dlsym(img_lib.ptr, "img_buffer_cacheops");
+
+  /* Validate function pointers */
+  if ((img_lib.img_buffer_get == NULL) ||
+    (img_lib.img_buffer_release == NULL) ||
+    (img_lib.img_buffer_cacheops == NULL)) {
+    printf(" ERROR mapping symbols from libmmcamera_imglib.so");
+    dlclose(img_lib.ptr);
+    return -1;
+  }
+
+  lib2d_err = mm_lib2d_init(MM_LIB2D_SYNC_MODE, CAM_FORMAT_YUV_420_NV21,
+    CAM_FORMAT_8888_ARGB, &lib2d_handle);
+  if ((lib2d_err != MM_LIB2D_SUCCESS) || (lib2d_handle == NULL)) {
+    return -1;
+  }
+
+  bool run_default = FALSE;
+
+  if ( argc == 7) {
+    filename         = argv[1];
+    width            = (uint32_t)atoi(argv[2]);
+    height           = (uint32_t)atoi(argv[3]);
+    input_yuv_stride = (uint32_t)atoi(argv[4]);
+    offset           = (uint32_t)atoi(argv[5]);
+    format           = (uint32_t)atoi(argv[6]);
+    run_default      = TRUE;
+    printf("Running user provided conversion \n");
+  }
+  else {
+    total_tests = sizeof(input_nv21)/sizeof(input_yuv_data);
+    printf("usage: <binary> <filname> <width> <height> "
+      "<stride> <offset> <format> \n");
+  }
+
+  for (index = 0; index < total_tests; index++)
+  {
+    if(run_default == FALSE) {
+      filename         = input_nv21[index].filename;
+      width            = input_nv21[index].wdith;
+      height           = input_nv21[index].height;
+      input_yuv_stride = input_nv21[index].stride;
+      offset           = input_nv21[index].offset;
+      format           = input_nv21[index].format;
+    }
+
+    snprintf(filename_in, 512, "/data/lib2d/input/%s", filename);
+    snprintf(filename_out, 512, "/data/lib2d/output/%s.tga", filename);
+    snprintf(filename_raw, 512, "/data/lib2d/output/%s.rgba", filename);
+
+    printf("-----------------Running test=%d/%d------------------------- \n",
+      index+1, total_tests);
+    printf("filename=%s, full path=%s, width=%d, height=%d, stride=%d \n",
+      filename, filename_in, width, height, stride);
+
+    // Allocate NV12 buffer
+    y_plane_stride     = ALIGN(width, 32);
+    y_plane_size       = y_plane_stride * height;
+    y_plane_size_align = ALIGN(y_plane_size, ALIGN4K);
+    crcb_plane_stride  = y_plane_stride;
+    crcb_plane_size    = crcb_plane_stride * height / 2;
+    yuv_size           = y_plane_size_align + crcb_plane_size;
+    ret = img_lib.img_buffer_get(IMG_BUFFER_ION_IOMMU, -1, TRUE,
+          yuv_size, &m_yuv_memHandle);
+    if (ret != IMG_SUCCESS) {
+      printf(" ] Error, img buf get failed \n");
+      goto deinit;
+    }
+
+    printf("%s %d yuv buffer properties : w=%d, h=%d, y_stride=%d, "
+      "crcb_stride=%d, y_size=%d, crcb_size=%d, yuv_size=%d, "
+      "crcb_offset=%d \n",
+       __LINE__,
+      width, height, y_plane_stride, crcb_plane_stride, y_plane_size,
+      crcb_plane_size, yuv_size, y_plane_size_align);
+    printf("%s %d yuv buffer properties : fd=%d, ptr=%p, size=%d \n",
+       __LINE__, m_yuv_memHandle.fd, m_yuv_memHandle.vaddr,
+      m_yuv_memHandle.length);
+
+    // Allocate ARGB buffer
+    stride   = width * 4;
+    stride   = ALIGN(stride, 32);
+    rgb_size = stride * height;
+    ret = img_lib.img_buffer_get(IMG_BUFFER_ION_IOMMU, -1, TRUE,
+          rgb_size, &m_rgb_memHandle);
+    if (ret != IMG_SUCCESS) {
+      printf(" ] Error, img buf get failed");
+      img_lib.img_buffer_release(&m_yuv_memHandle);
+      goto deinit;
+    }
+
+    printf("%s %d rgb buffer properties : w=%d, h=%d, stride=%d, size=%d \n",
+       __LINE__, width, height, stride, rgb_size);
+    printf("%s %d rgb buffer properties : fd=%d, ptr=%p, size=%d \n",
+       __LINE__, m_rgb_memHandle.fd, m_rgb_memHandle.vaddr,
+      m_rgb_memHandle.length);
+
+#if 0
+    lib2d_err = lib2d_test_load_input_yuv_data(filename_in, offset,
+      (input_yuv_stride * height), (input_yuv_stride * height / 2), y_plane_size_align,
+      m_yuv_memHandle.vaddr);
+    if (lib2d_err != MM_LIB2D_SUCCESS) {
+      printf(" ] Error loading the input buffer \n");
+      goto release;
+    }
+#else
+    lib2d_err = lib2d_test_load_input_yuv_data_linebyline(filename_in, offset,
+      input_yuv_stride, y_plane_stride,height, y_plane_size_align,
+      m_yuv_memHandle.vaddr);
+    if (lib2d_err != MM_LIB2D_SUCCESS) {
+      printf(" ] Error loading the input buffer \n");
+      goto release;
+    }
+#endif
+    // Setup source buffer
+    src_buffer.buffer_type = MM_LIB2D_BUFFER_TYPE_YUV;
+    src_buffer.yuv_buffer.fd      = m_yuv_memHandle.fd;
+    src_buffer.yuv_buffer.format  = format;
+    src_buffer.yuv_buffer.width   = width;
+    src_buffer.yuv_buffer.height  = height;
+    src_buffer.yuv_buffer.plane0  = m_yuv_memHandle.vaddr;
+    src_buffer.yuv_buffer.stride0 = y_plane_stride;
+    src_buffer.yuv_buffer.plane1  = (int8_t *)m_yuv_memHandle.vaddr +
+                                    y_plane_size_align;
+    src_buffer.yuv_buffer.stride1 = crcb_plane_stride;
+
+    // Setup dst buffer
+    dst_buffer.buffer_type = MM_LIB2D_BUFFER_TYPE_RGB;
+    dst_buffer.rgb_buffer.fd     = m_rgb_memHandle.fd;
+    dst_buffer.rgb_buffer.format = CAM_FORMAT_8888_ARGB;
+    dst_buffer.rgb_buffer.width  = width;
+    dst_buffer.rgb_buffer.height = height;
+    dst_buffer.rgb_buffer.buffer = m_rgb_memHandle.vaddr;
+    dst_buffer.rgb_buffer.stride = stride;
+
+    img_lib.img_buffer_cacheops(&m_yuv_memHandle,
+      IMG_CACHE_CLEAN_INV, IMG_INTERNAL);
+
+    lib2d_err = mm_lib2d_start_job(lib2d_handle, &src_buffer, &dst_buffer,
+      index, NULL, lib2d_test_client_cb, 0);
+    if (lib2d_err != MM_LIB2D_SUCCESS) {
+      printf(" ] Error in mm_lib2d_start_job \n");
+      goto release;
+    }
+
+    img_lib.img_buffer_cacheops(&m_rgb_memHandle,
+      IMG_CACHE_CLEAN_INV, IMG_INTERNAL);
+
+#ifdef ENABLE_OUTPUT_DUMP
+    // Dump output files
+    // snprintf(filename_in, 512, "/data/lib2d/output/%s", filename);
+    // DUMP_TO_FILE2(filename_in, src_buffer.yuv_buffer.plane0, y_plane_size, src_buffer.yuv_buffer.plane1, crcb_plane_size);
+    // DUMP_TO_FILE(filename_raw, dst_buffer.rgb_buffer.buffer, rgb_size);
+    printf("Dumping output file %s \n", filename_out);
+    lib2d_dump_tga(dst_buffer.rgb_buffer.buffer, 1,
+      width, height, stride, filename_out);
+#endif
+
+    img_lib.img_buffer_release(&m_rgb_memHandle);
+    img_lib.img_buffer_release(&m_yuv_memHandle);
+  }
+
+  mm_lib2d_deinit(lib2d_handle);
+
+  return 0;
+
+release:
+  img_lib.img_buffer_release(&m_rgb_memHandle);
+  img_lib.img_buffer_release(&m_yuv_memHandle);
+deinit:
+  mm_lib2d_deinit(lib2d_handle);
+  printf("%s %d some error happened, tests completed = %d/%d \n",
+     __LINE__, index - 1, total_tests);
+  return -1;
+}
+
+
diff --git a/msmcobalt/QCamera2/util/QCameraBufferMaps.cpp b/msmcobalt/QCamera2/util/QCameraBufferMaps.cpp
new file mode 100644
index 0000000..5cd8159
--- /dev/null
+++ b/msmcobalt/QCamera2/util/QCameraBufferMaps.cpp
@@ -0,0 +1,251 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCameraBufferMaps"
+
+// System dependencies
+#include <utils/Errors.h>
+#include <stdlib.h>
+#include <string.h>
+
+// Camera dependencies
+#include "QCameraBufferMaps.h"
+
+using namespace android;
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : QCameraBufferMaps
+ *
+ * DESCRIPTION: default constructor of QCameraBufferMaps
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraBufferMaps::QCameraBufferMaps()
+{
+    memset(&mBufMapList, 0, sizeof(mBufMapList));
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraBufferMaps
+ *
+ * DESCRIPTION: copy constructor of QCameraBufferMaps
+ *
+ * PARAMETERS :
+ *   @pBufferMaps : object to be copied
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraBufferMaps::QCameraBufferMaps(const QCameraBufferMaps& pBufferMaps)
+{
+    memcpy(&mBufMapList, &pBufferMaps.mBufMapList, sizeof(mBufMapList));
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraBufferMaps
+ *
+ * DESCRIPTION: constructor of QCameraBufferMaps
+ *
+ * PARAMETERS :
+ *   @pBufMapList : list of buffer maps
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraBufferMaps::QCameraBufferMaps(const cam_buf_map_type_list& pBufMapList)
+{
+    memcpy(&mBufMapList, &pBufMapList, sizeof(mBufMapList));
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraBufferMaps
+ *
+ * DESCRIPTION: constructor of QCameraBufferMaps
+ *
+ * PARAMETERS :
+ *   @pType   : Type of buffer
+ *   @pStreamId : Stream id
+ *   @pFrameIndex : Frame index
+ *   @pPlaneIndex : Plane index
+ *   @pCookie   : Could be job_id to identify mapping job
+ *   @pFd   : Origin file descriptor
+ *   @pSize   : Size of the buffer
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraBufferMaps::QCameraBufferMaps(cam_mapping_buf_type pType,
+        uint32_t pStreamId,
+        uint32_t pFrameIndex,
+        int32_t pPlaneIndex,
+        uint32_t pCookie,
+        int32_t pFd,
+        size_t pSize,
+        void *buffer)
+{
+    memset(&mBufMapList, 0, sizeof(mBufMapList));
+    enqueue(pType, pStreamId, pFrameIndex, pPlaneIndex, pCookie, pFd, pSize, buffer);
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraBufferMaps
+ *
+ * DESCRIPTION: destructor of QCameraBufferMaps
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraBufferMaps::~QCameraBufferMaps()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : operator=
+ *
+ * DESCRIPTION: assignment operator of QCameraBufferMaps
+ *
+ * PARAMETERS :
+ *   @pBufferMaps : object to be copied
+ *
+ * RETURN     : *this, with updated contents
+ *==========================================================================*/
+QCameraBufferMaps& QCameraBufferMaps::operator=(const QCameraBufferMaps& pBufferMaps)
+{
+    if (&pBufferMaps != this) {
+        memcpy(&mBufMapList, &pBufferMaps.mBufMapList, sizeof(mBufMapList));
+    }
+    return *this;
+}
+
+/*===========================================================================
+ * FUNCTION   : enqueue
+ *
+ * DESCRIPTION: Add a buffer map
+ *
+ * PARAMETERS :
+ *   @pType   : Type of buffer
+ *   @pStreamId : Stream id
+ *   @pFrameIndex : Frame index
+ *   @pPlaneIndex : Plane index
+ *   @pCookie   : Could be job_id to identify mapping job
+ *   @pFd   : Origin file descriptor
+ *   @pSize   : Size of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+uint32_t QCameraBufferMaps::enqueue(cam_mapping_buf_type pType,
+        uint32_t pStreamId,
+        uint32_t pFrameIndex,
+        int32_t pPlaneIndex,
+        uint32_t pCookie,
+        int32_t pFd,
+        size_t pSize,
+        void *buffer)
+{
+    uint32_t pos = mBufMapList.length++;
+    mBufMapList.buf_maps[pos].type = pType;
+    mBufMapList.buf_maps[pos].stream_id = pStreamId;
+    mBufMapList.buf_maps[pos].frame_idx = pFrameIndex;
+    mBufMapList.buf_maps[pos].plane_idx = pPlaneIndex;
+    mBufMapList.buf_maps[pos].cookie = pCookie;
+    mBufMapList.buf_maps[pos].fd = pFd;
+    mBufMapList.buf_maps[pos].size = pSize;
+    mBufMapList.buf_maps[pos].buffer = buffer;
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCamBufMapList
+ *
+ * DESCRIPTION: Populate the list
+ *
+ * PARAMETERS :
+ *   @pBufMapList : [output] the list of buffer maps
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+uint32_t QCameraBufferMaps::getCamBufMapList(cam_buf_map_type_list& pBufMapList) const
+{
+    memcpy(&pBufMapList, &mBufMapList, sizeof(pBufMapList));
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : makeSingletonBufMapList
+ *
+ * DESCRIPTION: Create a buffer map list of a single element
+ *
+ * PARAMETERS :
+ *   @pType   : Type of buffer
+ *   @pStreamId : Stream id
+ *   @pFrameIndex : Frame index
+ *   @pPlaneIndex : Plane index
+ *   @pCookie   : Could be job_id to identify mapping job
+ *   @pFd   : Origin file descriptor
+ *   @pSize   : Size of the buffer
+ *   @pBufMapList : [output] the list of buffer maps
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+uint32_t QCameraBufferMaps::makeSingletonBufMapList(cam_mapping_buf_type pType,
+        uint32_t pStreamId,
+        uint32_t pFrameIndex,
+        int32_t pPlaneIndex,
+        uint32_t pCookie,
+        int32_t pFd,
+        size_t pSize,
+        cam_buf_map_type_list& pBufMapList,
+        void *buffer)
+{
+    uint32_t rc = NO_ERROR;
+
+    QCameraBufferMaps bufferMaps(pType,
+            pStreamId,
+            pFrameIndex,
+            pPlaneIndex,
+            pCookie,
+            pFd,
+            pSize,
+            buffer);
+    rc = bufferMaps.getCamBufMapList(pBufMapList);
+
+    return rc;
+}
+
+}; // namespace qcamera
diff --git a/msmcobalt/QCamera2/util/QCameraBufferMaps.h b/msmcobalt/QCamera2/util/QCameraBufferMaps.h
new file mode 100644
index 0000000..a0bdfce
--- /dev/null
+++ b/msmcobalt/QCamera2/util/QCameraBufferMaps.h
@@ -0,0 +1,83 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_BUFFERMAPS_H__
+#define __QCAMERA_BUFFERMAPS_H__
+
+// Camera dependencies
+#include "cam_types.h"
+
+namespace qcamera {
+
+class QCameraBufferMaps {
+public:
+    QCameraBufferMaps();
+    QCameraBufferMaps(const QCameraBufferMaps& pBufferMaps);
+    QCameraBufferMaps(const cam_buf_map_type_list& pBufMapList);
+    QCameraBufferMaps(cam_mapping_buf_type pType,
+            uint32_t pStreamId,
+            uint32_t pFrameIndex,
+            int32_t pPlaneIndex,
+            uint32_t pCookie,
+            int32_t pFd,
+            size_t pSize,
+            void *buffer);
+
+    ~QCameraBufferMaps();
+
+    QCameraBufferMaps& operator=(const QCameraBufferMaps& pBufferMaps);
+
+    uint32_t enqueue(cam_mapping_buf_type pType,
+            uint32_t pStreamId,
+            uint32_t pFrameIndex,
+            int32_t pPlaneIndex,
+            uint32_t pCookie,
+            int32_t pFd,
+            size_t pSize,
+            void *buffer);
+
+    uint32_t getCamBufMapList(cam_buf_map_type_list& pBufMapList) const;
+
+    static uint32_t makeSingletonBufMapList(cam_mapping_buf_type pType,
+            uint32_t pStreamId,
+            uint32_t pFrameIndex,
+            int32_t pPlaneIndex,
+            uint32_t pCookie,
+            int32_t pFd,
+            size_t pSize,
+            cam_buf_map_type_list& pBufMapList,
+            void *buffer);
+
+private:
+    cam_buf_map_type_list mBufMapList;
+};
+
+}; // namespace qcamera
+#endif /* __QCAMERA_BUFFERMAPS_H__ */
+
diff --git a/msmcobalt/QCamera2/util/QCameraCmdThread.cpp b/msmcobalt/QCamera2/util/QCameraCmdThread.cpp
new file mode 100644
index 0000000..8b191b0
--- /dev/null
+++ b/msmcobalt/QCamera2/util/QCameraCmdThread.cpp
@@ -0,0 +1,225 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+// System dependencies
+#include <string.h>
+#include <utils/Errors.h>
+#define PRCTL_H <SYSTEM_HEADER_PREFIX/prctl.h>
+#include PRCTL_H
+
+// Camera dependencies
+#include "QCameraCmdThread.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+using namespace android;
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : QCameraCmdThread
+ *
+ * DESCRIPTION: default constructor of QCameraCmdThread
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraCmdThread::QCameraCmdThread() :
+    cmd_queue()
+{
+    cmd_pid = 0;
+    cam_sem_init(&sync_sem, 0);
+    cam_sem_init(&cmd_sem, 0);
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraCmdThread
+ *
+ * DESCRIPTION: deconstructor of QCameraCmdThread
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraCmdThread::~QCameraCmdThread()
+{
+    exit();
+    cam_sem_destroy(&sync_sem);
+    cam_sem_destroy(&cmd_sem);
+}
+
+/*===========================================================================
+ * FUNCTION   : launch
+ *
+ * DESCRIPTION: launch Cmd Thread
+ *
+ * PARAMETERS :
+ *   @start_routine : thread routine function ptr
+ *   @user_data     : user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCmdThread::launch(void *(*start_routine)(void *),
+                                 void* user_data)
+{
+    /* launch the thread */
+    pthread_create(&cmd_pid,
+                   NULL,
+                   start_routine,
+                   user_data);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setName
+ *
+ * DESCRIPTION: name the cmd thread
+ *
+ * PARAMETERS :
+ *   @name : desired name for the thread
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCmdThread::setName(const char* name)
+{
+    /* name the thread */
+    prctl(PR_SET_NAME, (unsigned long)name, 0, 0, 0);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : sendCmd
+ *
+ * DESCRIPTION: send a command to the Cmd Thread
+ *
+ * PARAMETERS :
+ *   @cmd     : command to be executed.
+ *   @sync_cmd: flag to indicate if this is a synchorinzed cmd. If true, this call
+ *              will wait until signal is set after the command is completed.
+ *   @priority: flag to indicate if this is a cmd with priority. If true, the cmd
+ *              will be enqueued to the head with priority.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCmdThread::sendCmd(camera_cmd_type_t cmd, uint8_t sync_cmd, uint8_t priority)
+{
+    camera_cmd_t *node = (camera_cmd_t *)malloc(sizeof(camera_cmd_t));
+    if (NULL == node) {
+        LOGE("No memory for camera_cmd_t");
+        return NO_MEMORY;
+    }
+    memset(node, 0, sizeof(camera_cmd_t));
+    node->cmd = cmd;
+
+    if (priority) {
+        if (!cmd_queue.enqueueWithPriority((void *)node)) {
+            free(node);
+            node = NULL;
+        }
+    } else {
+        if (!cmd_queue.enqueue((void *)node)) {
+            free(node);
+            node = NULL;
+        }
+    }
+    cam_sem_post(&cmd_sem);
+
+    /* if is a sync call, need to wait until it returns */
+    if (sync_cmd) {
+        cam_sem_wait(&sync_sem);
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCmd
+ *
+ * DESCRIPTION: dequeue a cmommand from cmd queue
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : cmd dequeued
+ *==========================================================================*/
+camera_cmd_type_t QCameraCmdThread::getCmd()
+{
+    camera_cmd_type_t cmd = CAMERA_CMD_TYPE_NONE;
+    camera_cmd_t *node = (camera_cmd_t *)cmd_queue.dequeue();
+    if (NULL == node) {
+        LOGD("No notify avail");
+        return CAMERA_CMD_TYPE_NONE;
+    } else {
+        cmd = node->cmd;
+        free(node);
+    }
+    return cmd;
+}
+
+/*===========================================================================
+ * FUNCTION   : exit
+ *
+ * DESCRIPTION: exit the CMD thread
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCmdThread::exit()
+{
+    int32_t rc = NO_ERROR;
+
+    if (cmd_pid == 0) {
+        return rc;
+    }
+
+    rc = sendCmd(CAMERA_CMD_TYPE_EXIT, 0, 1);
+    if (NO_ERROR != rc) {
+        LOGE("Error during exit, rc = %d", rc);
+        return rc;
+    }
+
+    /* wait until cmd thread exits */
+    if (pthread_join(cmd_pid, NULL) != 0) {
+        LOGD("pthread dead already\n");
+    }
+    cmd_pid = 0;
+    return rc;
+}
+
+}; // namespace qcamera
diff --git a/msmcobalt/QCamera2/util/QCameraCmdThread.h b/msmcobalt/QCamera2/util/QCameraCmdThread.h
new file mode 100644
index 0000000..b0764b6
--- /dev/null
+++ b/msmcobalt/QCamera2/util/QCameraCmdThread.h
@@ -0,0 +1,76 @@
+/* Copyright (c) 2012, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_CMD_THREAD_H__
+#define __QCAMERA_CMD_THREAD_H__
+
+// System dependencies
+#include <pthread.h>
+
+// Camera dependencies
+#include "cam_semaphore.h"
+#include "cam_types.h"
+#include "QCameraQueue.h"
+
+namespace qcamera {
+
+typedef enum
+{
+    CAMERA_CMD_TYPE_NONE,
+    CAMERA_CMD_TYPE_START_DATA_PROC,
+    CAMERA_CMD_TYPE_STOP_DATA_PROC,
+    CAMERA_CMD_TYPE_DO_NEXT_JOB,
+    CAMERA_CMD_TYPE_EXIT,
+    CAMERA_CMD_TYPE_MAX
+} camera_cmd_type_t;
+
+typedef struct {
+    camera_cmd_type_t cmd;
+} camera_cmd_t;
+
+class QCameraCmdThread {
+public:
+    QCameraCmdThread();
+    ~QCameraCmdThread();
+
+    int32_t launch(void *(*start_routine)(void *), void* user_data);
+    int32_t setName(const char* name);
+    int32_t exit();
+    int32_t sendCmd(camera_cmd_type_t cmd, uint8_t sync_cmd, uint8_t priority);
+    camera_cmd_type_t getCmd();
+
+    QCameraQueue cmd_queue;      /* cmd queue */
+    pthread_t cmd_pid;           /* cmd thread ID */
+    cam_semaphore_t cmd_sem;               /* semaphore for cmd thread */
+    cam_semaphore_t sync_sem;              /* semaphore for synchronized call signal */
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_CMD_THREAD_H__ */
diff --git a/msmcobalt/QCamera2/util/QCameraCommon.cpp b/msmcobalt/QCamera2/util/QCameraCommon.cpp
new file mode 100644
index 0000000..0cc2654
--- /dev/null
+++ b/msmcobalt/QCamera2/util/QCameraCommon.cpp
@@ -0,0 +1,226 @@
+/* Copyright (c) 2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCameraCommon"
+
+// System dependencies
+#include <utils/Errors.h>
+#include <stdlib.h>
+#include <string.h>
+#include <utils/Log.h>
+
+// Camera dependencies
+#include "QCameraCommon.h"
+
+using namespace android;
+
+namespace qcamera {
+
+#ifndef TRUE
+#define TRUE 1
+#endif
+
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+/*===========================================================================
+ * FUNCTION   : QCameraCommon
+ *
+ * DESCRIPTION: default constructor of QCameraCommon
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraCommon::QCameraCommon() :
+    m_pCapability(NULL)
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraCommon
+ *
+ * DESCRIPTION: destructor of QCameraCommon
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraCommon::~QCameraCommon()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : init
+ *
+ * DESCRIPTION: Init function for QCameraCommon
+ *
+ * PARAMETERS :
+ *   @pCapability : Capabilities
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCommon::init(cam_capability_t *pCapability)
+{
+    m_pCapability = pCapability;
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : calculateLCM
+ *
+ * DESCRIPTION: Get the LCM of 2 numbers
+ *
+ * PARAMETERS :
+ *   @num1   : First number
+ *   @num2   : second number
+ *
+ * RETURN     : int32_t type (LCM)
+ *
+ *==========================================================================*/
+uint32_t QCameraCommon::calculateLCM(int32_t num1, int32_t num2)
+{
+   uint32_t lcm = 0;
+   uint32_t temp = 0;
+
+   if ((num1 < 1) && (num2 < 1)) {
+       return 0;
+   } else if (num1 < 1) {
+       return num2;
+   } else if (num2 < 1) {
+       return num1;
+   }
+
+   if (num1 > num2) {
+       lcm = num1;
+   } else {
+       lcm = num2;
+   }
+   temp = lcm;
+
+   while (1) {
+       if (((lcm % num1) == 0) && ((lcm % num2) == 0)) {
+           break;
+       }
+       lcm += temp;
+   }
+   return lcm;
+}
+
+/*===========================================================================
+ * FUNCTION   : getAnalysisInfo
+ *
+ * DESCRIPTION: Get the Analysis information based on
+ *     current mode and feature mask
+ *
+ * PARAMETERS :
+ *   @fdVideoEnabled : Whether fdVideo enabled currently
+ *   @hal3           : Whether hal3 or hal1
+ *   @featureMask    : Feature mask
+ *   @pAnalysis_info : Analysis info to be filled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCommon::getAnalysisInfo(
+        bool fdVideoEnabled,
+        bool hal3,
+        cam_feature_mask_t featureMask,
+        cam_analysis_info_t *pAnalysisInfo)
+{
+    if (!pAnalysisInfo) {
+        return BAD_VALUE;
+    }
+
+    pAnalysisInfo->valid = 0;
+
+    if ((fdVideoEnabled == TRUE) && (hal3 == FALSE) &&
+            (m_pCapability->analysis_info[CAM_ANALYSIS_INFO_FD_VIDEO].hw_analysis_supported) &&
+            (m_pCapability->analysis_info[CAM_ANALYSIS_INFO_FD_VIDEO].valid)) {
+        *pAnalysisInfo =
+                m_pCapability->analysis_info[CAM_ANALYSIS_INFO_FD_VIDEO];
+    } else if (m_pCapability->analysis_info[CAM_ANALYSIS_INFO_FD_STILL].valid) {
+        *pAnalysisInfo =
+                m_pCapability->analysis_info[CAM_ANALYSIS_INFO_FD_STILL];
+        if (hal3 == TRUE) {
+            pAnalysisInfo->analysis_max_res = pAnalysisInfo->analysis_recommended_res;
+        }
+    }
+
+    if ((featureMask & CAM_QCOM_FEATURE_PAAF) &&
+      (m_pCapability->analysis_info[CAM_ANALYSIS_INFO_PAAF].valid)) {
+        cam_analysis_info_t *pPaafInfo =
+          &m_pCapability->analysis_info[CAM_ANALYSIS_INFO_PAAF];
+
+        if (!pAnalysisInfo->valid) {
+            *pAnalysisInfo = *pPaafInfo;
+        } else {
+            pAnalysisInfo->analysis_max_res.width =
+                MAX(pAnalysisInfo->analysis_max_res.width,
+                pPaafInfo->analysis_max_res.width);
+            pAnalysisInfo->analysis_max_res.height =
+                MAX(pAnalysisInfo->analysis_max_res.height,
+                pPaafInfo->analysis_max_res.height);
+            pAnalysisInfo->analysis_padding_info.height_padding =
+                calculateLCM(pAnalysisInfo->analysis_padding_info.height_padding,
+                pPaafInfo->analysis_padding_info.height_padding);
+            pAnalysisInfo->analysis_padding_info.width_padding =
+                calculateLCM(pAnalysisInfo->analysis_padding_info.width_padding,
+                pPaafInfo->analysis_padding_info.width_padding);
+            pAnalysisInfo->analysis_padding_info.plane_padding =
+                calculateLCM(pAnalysisInfo->analysis_padding_info.plane_padding,
+                pPaafInfo->analysis_padding_info.plane_padding);
+            pAnalysisInfo->analysis_padding_info.min_stride =
+                MAX(pAnalysisInfo->analysis_padding_info.min_stride,
+                pPaafInfo->analysis_padding_info.min_stride);
+            pAnalysisInfo->analysis_padding_info.min_stride =
+                ALIGN(pAnalysisInfo->analysis_padding_info.min_stride,
+                pAnalysisInfo->analysis_padding_info.width_padding);
+
+            pAnalysisInfo->analysis_padding_info.min_scanline =
+                MAX(pAnalysisInfo->analysis_padding_info.min_scanline,
+                pPaafInfo->analysis_padding_info.min_scanline);
+            pAnalysisInfo->analysis_padding_info.min_scanline =
+                ALIGN(pAnalysisInfo->analysis_padding_info.min_scanline,
+                pAnalysisInfo->analysis_padding_info.height_padding);
+
+            pAnalysisInfo->hw_analysis_supported |=
+                pPaafInfo->hw_analysis_supported;
+        }
+    }
+
+    return pAnalysisInfo->valid ? NO_ERROR : BAD_VALUE;
+}
+
+}; // namespace qcamera
diff --git a/msmcobalt/QCamera2/util/QCameraCommon.h b/msmcobalt/QCamera2/util/QCameraCommon.h
new file mode 100644
index 0000000..844b087
--- /dev/null
+++ b/msmcobalt/QCamera2/util/QCameraCommon.h
@@ -0,0 +1,61 @@
+/* Copyright (c) 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_COMMON_H__
+#define __QCAMERA_COMMON_H__
+
+// Camera dependencies
+#include "cam_types.h"
+#include "cam_intf.h"
+
+namespace qcamera {
+
+#define ALIGN(a, b) (((a) + (b)) & ~(b))
+#define MAX(a, b) ((a) > (b) ? (a) : (b))
+
+class QCameraCommon {
+public:
+    QCameraCommon();
+    ~QCameraCommon();
+
+    int32_t init(cam_capability_t *cap);
+
+    int32_t getAnalysisInfo(
+        bool fdVideoEnabled, bool hal3, cam_feature_mask_t featureMask,
+        cam_analysis_info_t *pAnalysisInfo);
+    static uint32_t calculateLCM(int32_t num1, int32_t num2);
+
+private:
+    cam_capability_t *m_pCapability;
+
+};
+
+}; // namespace qcamera
+#endif /* __QCAMERA_COMMON_H__ */
+
diff --git a/msmcobalt/QCamera2/util/QCameraDisplay.cpp b/msmcobalt/QCamera2/util/QCameraDisplay.cpp
new file mode 100644
index 0000000..d77f642
--- /dev/null
+++ b/msmcobalt/QCamera2/util/QCameraDisplay.cpp
@@ -0,0 +1,282 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCameraDisplay"
+
+// To remove
+#include <cutils/properties.h>
+
+// Camera dependencies
+#include "QCamera2HWI.h"
+#include "QCameraDisplay.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+#define CAMERA_VSYNC_WAIT_MS               33 // Used by vsync thread to wait for vsync timeout.
+#define DISPLAY_EVENT_RECEIVER_ARRAY_SIZE  1
+#define DISPLAY_DEFAULT_FPS                60
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : vsyncEventReceiverCamera
+ *
+ * DESCRIPTION: Computes average vsync interval. Called by display
+ *              event handler for every vsync event.
+ *
+ * PARAMETERS :
+ *   @fd      : file descriptor
+ *   @events  : events
+ *   @data    : pointer to user data provided during call back registration.
+ *
+ * RETURN     : always returns 1
+ *==========================================================================*/
+int QCameraDisplay::vsyncEventReceiverCamera(__unused int fd,
+                                             __unused int events, void* data) {
+    android::DisplayEventReceiver::Event buffer[DISPLAY_EVENT_RECEIVER_ARRAY_SIZE];
+    QCameraDisplay* pQCameraDisplay = (QCameraDisplay *) data;
+    ssize_t n;
+
+    while ((n = pQCameraDisplay->mDisplayEventReceiver.getEvents(buffer,
+            DISPLAY_EVENT_RECEIVER_ARRAY_SIZE)) > 0) {
+        for (int i = 0 ; i < n ; i++) {
+            if (buffer[i].header.type == DisplayEventReceiver::DISPLAY_EVENT_VSYNC) {
+                pQCameraDisplay->computeAverageVsyncInterval(buffer[i].header.timestamp);
+            }
+        }
+    }
+    return 1;
+}
+
+/*===========================================================================
+ * FUNCTION   : vsyncThreadCamera
+ *
+ * DESCRIPTION: Thread registers a call back function for every vsync event
+ *              waits on the looper for the next vsync.
+ *
+ * PARAMETERS :
+ *   @data    : receives vsync_info_t structure.
+ *
+ * RETURN     : NULL.Just to fullfill the type requirement of thread function.
+ *==========================================================================*/
+void* QCameraDisplay::vsyncThreadCamera(void * data)
+{
+    QCameraDisplay* pQCameraDisplay = (QCameraDisplay *) data;
+    android::sp<Looper> looper;
+
+    looper = new android::Looper(false);
+    status_t status = pQCameraDisplay->mDisplayEventReceiver.initCheck();
+    if (status != NO_ERROR) {
+        LOGE("Initialization of DisplayEventReceiver failed with status: %d", status);
+        return NULL;
+    }
+    looper->addFd(pQCameraDisplay->mDisplayEventReceiver.getFd(), 0, ALOOPER_EVENT_INPUT,
+            QCameraDisplay::vsyncEventReceiverCamera, pQCameraDisplay);
+    pQCameraDisplay->mDisplayEventReceiver.setVsyncRate(1);
+    while(pQCameraDisplay->mThreadExit == 0)
+    {
+        looper->pollOnce(CAMERA_VSYNC_WAIT_MS);
+    }
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraDisplay
+ *
+ * DESCRIPTION: constructor of QCameraDisplay
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraDisplay::QCameraDisplay()
+    : mVsyncTimeStamp(0),
+      mAvgVsyncInterval(0),
+      mOldTimeStamp(0),
+      mVsyncHistoryIndex(0),
+      mAdditionalVsyncOffsetForWiggle(0),
+      mThreadExit(0),
+      mNum_vsync_from_vfe_isr_to_presentation_timestamp(0),
+      mSet_timestamp_num_ns_prior_to_vsync(0),
+      mVfe_and_mdp_freq_wiggle_filter_max_ns(0),
+      mVfe_and_mdp_freq_wiggle_filter_min_ns(0)
+{
+    int rc = NO_ERROR;
+
+    memset(&mVsyncIntervalHistory, 0, sizeof(mVsyncIntervalHistory));
+    rc = pthread_create(&mVsyncThreadCameraHandle, NULL, vsyncThreadCamera, (void *)this);
+    if (rc == NO_ERROR) {
+        char    value[PROPERTY_VALUE_MAX];
+        nsecs_t default_vsync_interval;
+        pthread_setname_np(mVsyncThreadCameraHandle, "CAM_Vsync");
+        // Read a list of properties used for tuning
+        property_get("persist.camera.disp.num_vsync", value, "4");
+        mNum_vsync_from_vfe_isr_to_presentation_timestamp = atoi(value);
+        property_get("persist.camera.disp.ms_to_vsync", value, "2");
+        mSet_timestamp_num_ns_prior_to_vsync = atoi(value) * NSEC_PER_MSEC;
+        property_get("persist.camera.disp.filter_max", value, "2");
+        mVfe_and_mdp_freq_wiggle_filter_max_ns = atoi(value) * NSEC_PER_MSEC;
+        property_get("persist.camera.disp.filter_min", value, "4");
+        mVfe_and_mdp_freq_wiggle_filter_min_ns = atoi(value) * NSEC_PER_MSEC;
+        property_get("persist.camera.disp.fps", value, "60");
+        if (atoi(value) > 0) {
+            default_vsync_interval= s2ns(1) / atoi(value);
+        } else {
+            default_vsync_interval= s2ns(1) / DISPLAY_DEFAULT_FPS;
+        }
+        for (int i=0; i < CAMERA_NUM_VSYNC_INTERVAL_HISTORY; i++) {
+            mVsyncIntervalHistory[i] = default_vsync_interval;
+        }
+        LOGD("display jitter num_vsync_from_vfe_isr_to_presentation_timestamp %u \
+                set_timestamp_num_ns_prior_to_vsync %llu",
+                mNum_vsync_from_vfe_isr_to_presentation_timestamp,
+                mSet_timestamp_num_ns_prior_to_vsync);
+        LOGD("display jitter vfe_and_mdp_freq_wiggle_filter_max_ns %llu \
+                vfe_and_mdp_freq_wiggle_filter_min_ns %llu",
+                mVfe_and_mdp_freq_wiggle_filter_max_ns,
+                mVfe_and_mdp_freq_wiggle_filter_min_ns);
+    } else {
+        mVsyncThreadCameraHandle = 0;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraDisplay
+ *
+ * DESCRIPTION: destructor of QCameraDisplay
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraDisplay::~QCameraDisplay()
+{
+    mThreadExit = 1;
+    if (mVsyncThreadCameraHandle != 0) {
+        pthread_join(mVsyncThreadCameraHandle, NULL);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : computeAverageVsyncInterval
+ *
+ * DESCRIPTION: Computes average vsync interval using current and previously
+ *              stored vsync data.
+ *
+ * PARAMETERS : current vsync time stamp
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraDisplay::computeAverageVsyncInterval(nsecs_t currentVsyncTimeStamp)
+{
+    nsecs_t sum;
+    nsecs_t vsyncMaxOutlier;
+    nsecs_t vsyncMinOutlier;
+
+    mVsyncTimeStamp = currentVsyncTimeStamp;
+    if (mOldTimeStamp) {
+        // Compute average vsync interval using current and previously stored vsync data.
+        // Leave the max and min vsync interval from history in computing the average.
+        mVsyncIntervalHistory[mVsyncHistoryIndex] = currentVsyncTimeStamp - mOldTimeStamp;
+        mVsyncHistoryIndex++;
+        mVsyncHistoryIndex = mVsyncHistoryIndex % CAMERA_NUM_VSYNC_INTERVAL_HISTORY;
+        sum = mVsyncIntervalHistory[0];
+        vsyncMaxOutlier = mVsyncIntervalHistory[0];
+        vsyncMinOutlier = mVsyncIntervalHistory[0];
+        for (int j=1; j<CAMERA_NUM_VSYNC_INTERVAL_HISTORY; j++) {
+            sum += mVsyncIntervalHistory[j];
+            if (vsyncMaxOutlier < mVsyncIntervalHistory[j]) {
+                vsyncMaxOutlier = mVsyncIntervalHistory[j];
+            } else if (vsyncMinOutlier > mVsyncIntervalHistory[j]) {
+                vsyncMinOutlier = mVsyncIntervalHistory[j];
+            }
+        }
+        sum = sum - vsyncMaxOutlier - vsyncMinOutlier;
+        mAvgVsyncInterval = sum / (CAMERA_NUM_VSYNC_INTERVAL_HISTORY - 2);
+    }
+    mOldTimeStamp = currentVsyncTimeStamp;
+}
+
+/*===========================================================================
+ * FUNCTION   : computePresentationTimeStamp
+ *
+ * DESCRIPTION: Computes presentation time stamp using vsync interval
+ *              and last vsync time stamp and few other tunable variables
+ *              to place the time stamp at the expected future vsync
+ *
+ * PARAMETERS : current frame time stamp set by VFE when buffer copy done.
+ *
+ * RETURN     : time stamp in future or 0 in case of failure.
+ *==========================================================================*/
+nsecs_t QCameraDisplay::computePresentationTimeStamp(nsecs_t frameTimeStamp)
+{
+    nsecs_t moveToNextVsync;
+    nsecs_t keepInCurrentVsync;
+    nsecs_t timeDifference        = 0;
+    nsecs_t presentationTimeStamp = 0;
+    int     expectedVsyncOffset   = 0;
+    int     vsyncOffset;
+
+    if ( (mAvgVsyncInterval != 0) && (mVsyncTimeStamp != 0) ) {
+        // Compute presentation time stamp in future as per the following formula
+        // future time stamp = vfe time stamp +  N *  average vsync interval
+        // Adjust the time stamp so that it is placed few milliseconds before
+        // the expected vsync.
+        // Adjust the time stamp for the period where vsync time stamp and VFE
+        // timstamp cross over due difference in fps.
+        presentationTimeStamp = frameTimeStamp +
+                (mNum_vsync_from_vfe_isr_to_presentation_timestamp * mAvgVsyncInterval);
+        if (presentationTimeStamp > mVsyncTimeStamp) {
+            timeDifference      = presentationTimeStamp - mVsyncTimeStamp;
+            moveToNextVsync     = mAvgVsyncInterval - mVfe_and_mdp_freq_wiggle_filter_min_ns;
+            keepInCurrentVsync  = mAvgVsyncInterval - mVfe_and_mdp_freq_wiggle_filter_max_ns;
+            vsyncOffset         = timeDifference % mAvgVsyncInterval;
+            expectedVsyncOffset = mAvgVsyncInterval -
+                    mSet_timestamp_num_ns_prior_to_vsync - vsyncOffset;
+            if (vsyncOffset > moveToNextVsync) {
+                mAdditionalVsyncOffsetForWiggle = mAvgVsyncInterval;
+            } else if (vsyncOffset < keepInCurrentVsync) {
+                mAdditionalVsyncOffsetForWiggle = 0;
+            }
+            LOGD("vsyncTimeStamp: %llu presentationTimeStamp: %llu expectedVsyncOffset: %d \
+                    timeDifference: %llu vsyncffset: %d avgvsync: %llu \
+                    additionalvsyncOffsetForWiggle: %llu",
+                    mVsyncTimeStamp, presentationTimeStamp, expectedVsyncOffset,
+                    timeDifference, vsyncOffset, mAvgVsyncInterval,
+                    mAdditionalVsyncOffsetForWiggle);
+        }
+        presentationTimeStamp = presentationTimeStamp + expectedVsyncOffset +
+                mAdditionalVsyncOffsetForWiggle;
+    }
+    return presentationTimeStamp;
+}
+
+}; // namespace qcamera
diff --git a/msmcobalt/QCamera2/util/QCameraDisplay.h b/msmcobalt/QCamera2/util/QCameraDisplay.h
new file mode 100644
index 0000000..bb6d157
--- /dev/null
+++ b/msmcobalt/QCamera2/util/QCameraDisplay.h
@@ -0,0 +1,77 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERADISPLAY_H__
+#define __QCAMERADISPLAY_H__
+
+#include <gui/DisplayEventReceiver.h>
+#include <android/looper.h>
+#include <utils/Looper.h>
+
+namespace qcamera {
+
+#define CAMERA_NUM_VSYNC_INTERVAL_HISTORY  6
+#define NSEC_PER_MSEC 1000000LLU
+
+class QCameraDisplay {
+public:
+    QCameraDisplay();
+    ~QCameraDisplay();
+    static int   vsyncEventReceiverCamera(int fd, int events, void* data);
+    static void* vsyncThreadCamera(void * data);
+    void         computeAverageVsyncInterval(nsecs_t currentVsyncTimeStamp);
+    nsecs_t      computePresentationTimeStamp(nsecs_t frameTimeStamp);
+
+private:
+    pthread_t mVsyncThreadCameraHandle;
+    nsecs_t   mVsyncTimeStamp;
+    nsecs_t   mAvgVsyncInterval;
+    nsecs_t   mOldTimeStamp;
+    nsecs_t   mVsyncIntervalHistory[CAMERA_NUM_VSYNC_INTERVAL_HISTORY];
+    nsecs_t   mVsyncHistoryIndex;
+    nsecs_t   mAdditionalVsyncOffsetForWiggle;
+    uint32_t  mThreadExit;
+    // Tunable property. Increasing this will increase the frame delay and will loose
+    // the real time display.
+    uint32_t  mNum_vsync_from_vfe_isr_to_presentation_timestamp;
+    // Tunable property. Set the time stamp x ns prior to expected vsync so that
+    // it will be picked in that vsync
+    nsecs_t  mSet_timestamp_num_ns_prior_to_vsync;
+    // Tunable property for filtering timestamp wiggle when VFE ISR crosses
+    // over MDP ISR over a period. Typical scenario is VFE is running at
+    // 30.2 fps vs display running at 60 fps.
+    nsecs_t  mVfe_and_mdp_freq_wiggle_filter_max_ns;
+    nsecs_t  mVfe_and_mdp_freq_wiggle_filter_min_ns;
+
+    android::DisplayEventReceiver  mDisplayEventReceiver;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERADISPLAY_H__ */
diff --git a/msmcobalt/QCamera2/util/QCameraFlash.cpp b/msmcobalt/QCamera2/util/QCameraFlash.cpp
new file mode 100644
index 0000000..db517b6
--- /dev/null
+++ b/msmcobalt/QCamera2/util/QCameraFlash.cpp
@@ -0,0 +1,413 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+// System dependencies
+#include <stdio.h>
+#include <fcntl.h>
+#include <media/msm_cam_sensor.h>
+
+// Camera dependencies
+#include "HAL3/QCamera3HWI.h"
+#include "QCameraFlash.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+#define STRING_LENGTH_OF_64_BIT_NUMBER 21
+
+volatile uint32_t gCamHal3LogLevel = 1;
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : getInstance
+ *
+ * DESCRIPTION: Get and create the QCameraFlash singleton.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraFlash& QCameraFlash::getInstance()
+{
+    static QCameraFlash flashInstance;
+    return flashInstance;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraFlash
+ *
+ * DESCRIPTION: default constructor of QCameraFlash
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraFlash::QCameraFlash() : m_callbacks(NULL)
+{
+    memset(&m_flashOn, 0, sizeof(m_flashOn));
+    memset(&m_cameraOpen, 0, sizeof(m_cameraOpen));
+    for (int pos = 0; pos < MM_CAMERA_MAX_NUM_SENSORS; pos++) {
+        m_flashFds[pos] = -1;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraFlash
+ *
+ * DESCRIPTION: deconstructor of QCameraFlash
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraFlash::~QCameraFlash()
+{
+    for (int pos = 0; pos < MM_CAMERA_MAX_NUM_SENSORS; pos++) {
+        if (m_flashFds[pos] >= 0)
+            {
+                setFlashMode(pos, false);
+                close(m_flashFds[pos]);
+                m_flashFds[pos] = -1;
+            }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : registerCallbacks
+ *
+ * DESCRIPTION: provide flash module with reference to callbacks to framework
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+int32_t QCameraFlash::registerCallbacks(
+        const camera_module_callbacks_t* callbacks)
+{
+    int32_t retVal = 0;
+    m_callbacks = callbacks;
+    return retVal;
+}
+
+/*===========================================================================
+ * FUNCTION   : initFlash
+ *
+ * DESCRIPTION: Reserve and initialize the flash unit associated with a
+ *              given camera id. This function is blocking until the
+ *              operation completes or fails. Each flash unit can be "inited"
+ *              by only one process at a time.
+ *
+ * PARAMETERS :
+ *   @camera_id : Camera id of the flash.
+ *
+ * RETURN     :
+ *   0        : success
+ *   -EBUSY   : The flash unit or the resource needed to turn on the
+ *              the flash is busy, typically because the flash is
+ *              already in use.
+ *   -EINVAL  : No flash present at camera_id.
+ *==========================================================================*/
+int32_t QCameraFlash::initFlash(const int camera_id)
+{
+    int32_t retVal = 0;
+    bool hasFlash = false;
+    char flashNode[QCAMERA_MAX_FILEPATH_LENGTH];
+    char flashPath[QCAMERA_MAX_FILEPATH_LENGTH] = "/dev/";
+
+    if (camera_id < 0 || camera_id >= MM_CAMERA_MAX_NUM_SENSORS) {
+        LOGE("Invalid camera id: %d", camera_id);
+        return -EINVAL;
+    }
+
+    QCamera3HardwareInterface::getFlashInfo(camera_id,
+            hasFlash,
+            flashNode);
+
+    strlcat(flashPath,
+            flashNode,
+            sizeof(flashPath));
+
+    if (!hasFlash) {
+        LOGE("No flash available for camera id: %d",
+                camera_id);
+        retVal = -EINVAL;
+    } else if (m_cameraOpen[camera_id]) {
+        LOGE("Camera in use for camera id: %d",
+                camera_id);
+        retVal = -EBUSY;
+    } else if (m_flashFds[camera_id] >= 0) {
+        LOGD("Flash is already inited for camera id: %d",
+                camera_id);
+    } else {
+        m_flashFds[camera_id] = open(flashPath, O_RDWR | O_NONBLOCK);
+
+        if (m_flashFds[camera_id] < 0) {
+            LOGE("Unable to open node '%s'",
+                    flashPath);
+            retVal = -EBUSY;
+        } else {
+            struct msm_flash_cfg_data_t cfg;
+            struct msm_flash_init_info_t init_info;
+            memset(&cfg, 0, sizeof(struct msm_flash_cfg_data_t));
+            memset(&init_info, 0, sizeof(struct msm_flash_init_info_t));
+            init_info.flash_driver_type = FLASH_DRIVER_DEFAULT;
+            cfg.cfg.flash_init_info = &init_info;
+            cfg.cfg_type = CFG_FLASH_INIT;
+            retVal = ioctl(m_flashFds[camera_id],
+                    VIDIOC_MSM_FLASH_CFG,
+                    &cfg);
+            if (retVal < 0) {
+                LOGE("Unable to init flash for camera id: %d",
+                        camera_id);
+                close(m_flashFds[camera_id]);
+                m_flashFds[camera_id] = -1;
+            }
+
+            /* wait for PMIC to init */
+            usleep(5000);
+        }
+    }
+
+    LOGD("X, retVal = %d", retVal);
+    return retVal;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFlashMode
+ *
+ * DESCRIPTION: Turn on or off the flash associated with a given handle.
+ *              This function is blocking until the operation completes or
+ *              fails.
+ *
+ * PARAMETERS :
+ *   @camera_id  : Camera id of the flash
+ *   @on         : Whether to turn flash on (true) or off (false)
+ *
+ * RETURN     :
+ *   0        : success
+ *   -EINVAL  : No camera present at camera_id, or it is not inited.
+ *   -EALREADY: Flash is already in requested state
+ *==========================================================================*/
+int32_t QCameraFlash::setFlashMode(const int camera_id, const bool mode)
+{
+    int32_t retVal = 0;
+    struct msm_flash_cfg_data_t cfg;
+
+    if (camera_id < 0 || camera_id >= MM_CAMERA_MAX_NUM_SENSORS) {
+        LOGE("Invalid camera id: %d", camera_id);
+        retVal = -EINVAL;
+    } else if (mode == m_flashOn[camera_id]) {
+        LOGD("flash %d is already in requested state: %d",
+                camera_id,
+                mode);
+        retVal = -EALREADY;
+    } else if (m_flashFds[camera_id] < 0) {
+        LOGE("called for uninited flash: %d", camera_id);
+        retVal = -EINVAL;
+    }  else {
+        memset(&cfg, 0, sizeof(struct msm_flash_cfg_data_t));
+        for (int i = 0; i < MAX_LED_TRIGGERS; i++)
+            cfg.flash_current[i] = QCAMERA_TORCH_CURRENT_VALUE;
+        cfg.cfg_type = mode ? CFG_FLASH_LOW: CFG_FLASH_OFF;
+
+        retVal = ioctl(m_flashFds[camera_id],
+                        VIDIOC_MSM_FLASH_CFG,
+                        &cfg);
+        if (retVal < 0) {
+            LOGE("Unable to change flash mode to %d for camera id: %d",
+                     mode, camera_id);
+        } else
+        {
+            m_flashOn[camera_id] = mode;
+        }
+    }
+    return retVal;
+}
+
+/*===========================================================================
+ * FUNCTION   : deinitFlash
+ *
+ * DESCRIPTION: Release the flash unit associated with a given camera
+ *              position. This function is blocking until the operation
+ *              completes or fails.
+ *
+ * PARAMETERS :
+ *   @camera_id : Camera id of the flash.
+ *
+ * RETURN     :
+ *   0        : success
+ *   -EINVAL  : No camera present at camera_id or not inited.
+ *==========================================================================*/
+int32_t QCameraFlash::deinitFlash(const int camera_id)
+{
+    int32_t retVal = 0;
+
+    if (camera_id < 0 || camera_id >= MM_CAMERA_MAX_NUM_SENSORS) {
+        LOGE("Invalid camera id: %d", camera_id);
+        retVal = -EINVAL;
+    } else if (m_flashFds[camera_id] < 0) {
+        LOGE("called deinitFlash for uninited flash");
+        retVal = -EINVAL;
+    } else {
+        setFlashMode(camera_id, false);
+
+        struct msm_flash_cfg_data_t cfg;
+        cfg.cfg_type = CFG_FLASH_RELEASE;
+        retVal = ioctl(m_flashFds[camera_id],
+                VIDIOC_MSM_FLASH_CFG,
+                &cfg);
+        if (retVal < 0) {
+            LOGE("Failed to release flash for camera id: %d",
+                    camera_id);
+        }
+
+        close(m_flashFds[camera_id]);
+        m_flashFds[camera_id] = -1;
+    }
+
+    return retVal;
+}
+
+/*===========================================================================
+ * FUNCTION   : reserveFlashForCamera
+ *
+ * DESCRIPTION: Give control of the flash to the camera, and notify
+ *              framework that the flash has become unavailable.
+ *
+ * PARAMETERS :
+ *   @camera_id : Camera id of the flash.
+ *
+ * RETURN     :
+ *   0        : success
+ *   -EINVAL  : No camera present at camera_id or not inited.
+ *   -ENOSYS  : No callback available for torch_mode_status_change.
+ *==========================================================================*/
+int32_t QCameraFlash::reserveFlashForCamera(const int camera_id)
+{
+    int32_t retVal = 0;
+
+    if (camera_id < 0 || camera_id >= MM_CAMERA_MAX_NUM_SENSORS) {
+        LOGE("Invalid camera id: %d", camera_id);
+        retVal = -EINVAL;
+    } else if (m_cameraOpen[camera_id]) {
+        LOGD("Flash already reserved for camera id: %d",
+                camera_id);
+    } else {
+        if (m_flashOn[camera_id]) {
+            setFlashMode(camera_id, false);
+            deinitFlash(camera_id);
+        }
+        m_cameraOpen[camera_id] = true;
+
+        bool hasFlash = false;
+        char flashNode[QCAMERA_MAX_FILEPATH_LENGTH];
+
+        QCamera3HardwareInterface::getFlashInfo(camera_id,
+                hasFlash,
+                flashNode);
+
+        if (m_callbacks == NULL ||
+                m_callbacks->torch_mode_status_change == NULL) {
+            LOGE("Callback is not defined!");
+            retVal = -ENOSYS;
+        } else if (!hasFlash) {
+            LOGD("Suppressing callback "
+                    "because no flash exists for camera id: %d",
+                    camera_id);
+        } else {
+            char cameraIdStr[STRING_LENGTH_OF_64_BIT_NUMBER];
+            snprintf(cameraIdStr, STRING_LENGTH_OF_64_BIT_NUMBER,
+                    "%d", camera_id);
+            m_callbacks->torch_mode_status_change(m_callbacks,
+                    cameraIdStr,
+                    TORCH_MODE_STATUS_NOT_AVAILABLE);
+        }
+    }
+
+    return retVal;
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseFlashFromCamera
+ *
+ * DESCRIPTION: Release control of the flash from the camera, and notify
+ *              framework that the flash has become available.
+ *
+ * PARAMETERS :
+ *   @camera_id : Camera id of the flash.
+ *
+ * RETURN     :
+ *   0        : success
+ *   -EINVAL  : No camera present at camera_id or not inited.
+ *   -ENOSYS  : No callback available for torch_mode_status_change.
+ *==========================================================================*/
+int32_t QCameraFlash::releaseFlashFromCamera(const int camera_id)
+{
+    int32_t retVal = 0;
+
+    if (camera_id < 0 || camera_id >= MM_CAMERA_MAX_NUM_SENSORS) {
+        LOGE("Invalid camera id: %d", camera_id);
+        retVal = -EINVAL;
+    } else if (!m_cameraOpen[camera_id]) {
+        LOGD("Flash not reserved for camera id: %d",
+                camera_id);
+    } else {
+        m_cameraOpen[camera_id] = false;
+
+        bool hasFlash = false;
+        char flashNode[QCAMERA_MAX_FILEPATH_LENGTH];
+
+        QCamera3HardwareInterface::getFlashInfo(camera_id,
+                hasFlash,
+                flashNode);
+
+        if (m_callbacks == NULL ||
+                m_callbacks->torch_mode_status_change == NULL) {
+            LOGE("Callback is not defined!");
+            retVal = -ENOSYS;
+        } else if (!hasFlash) {
+            LOGD("Suppressing callback "
+                    "because no flash exists for camera id: %d",
+                    camera_id);
+        } else {
+            char cameraIdStr[STRING_LENGTH_OF_64_BIT_NUMBER];
+            snprintf(cameraIdStr, STRING_LENGTH_OF_64_BIT_NUMBER,
+                    "%d", camera_id);
+            m_callbacks->torch_mode_status_change(m_callbacks,
+                    cameraIdStr,
+                    TORCH_MODE_STATUS_AVAILABLE_OFF);
+        }
+    }
+
+    return retVal;
+}
+
+}; // namespace qcamera
diff --git a/msmcobalt/QCamera2/util/QCameraFlash.h b/msmcobalt/QCamera2/util/QCameraFlash.h
new file mode 100644
index 0000000..ede685b
--- /dev/null
+++ b/msmcobalt/QCamera2/util/QCameraFlash.h
@@ -0,0 +1,69 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_FLASH_H__
+#define __QCAMERA_FLASH_H__
+
+// Camera dependencies
+#include "hardware/camera_common.h"
+
+extern "C" {
+#include "mm_camera_interface.h"
+}
+
+namespace qcamera {
+
+#define QCAMERA_TORCH_CURRENT_VALUE 200
+
+class QCameraFlash {
+public:
+    static QCameraFlash& getInstance();
+
+    int32_t registerCallbacks(const camera_module_callbacks_t* callbacks);
+    int32_t initFlash(const int camera_id);
+    int32_t setFlashMode(const int camera_id, const bool on);
+    int32_t deinitFlash(const int camera_id);
+    int32_t reserveFlashForCamera(const int camera_id);
+    int32_t releaseFlashFromCamera(const int camera_id);
+
+private:
+    QCameraFlash();
+    virtual ~QCameraFlash();
+    QCameraFlash(const QCameraFlash&);
+    QCameraFlash& operator=(const QCameraFlash&);
+
+    const camera_module_callbacks_t *m_callbacks;
+    int32_t m_flashFds[MM_CAMERA_MAX_NUM_SENSORS];
+    bool m_flashOn[MM_CAMERA_MAX_NUM_SENSORS];
+    bool m_cameraOpen[MM_CAMERA_MAX_NUM_SENSORS];
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_FLASH_H__ */
diff --git a/msmcobalt/QCamera2/util/QCameraPerf.cpp b/msmcobalt/QCamera2/util/QCameraPerf.cpp
new file mode 100644
index 0000000..e143f0f
--- /dev/null
+++ b/msmcobalt/QCamera2/util/QCameraPerf.cpp
@@ -0,0 +1,545 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCameraPerf"
+
+// To remove
+#include <cutils/properties.h>
+
+// System dependencies
+#include <stdlib.h>
+#include <dlfcn.h>
+#include <utils/Timers.h>
+// Camera dependencies
+#include "QCameraPerf.h"
+#include "QCameraTrace.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : QCameraPerfLock constructor
+ *
+ * DESCRIPTION: initialize member variables
+ *
+ * PARAMETERS :
+ *   None
+ *
+ * RETURN     : void
+ *
+ *==========================================================================*/
+QCameraPerfLock::QCameraPerfLock() :
+        perf_lock_acq(NULL),
+        perf_lock_rel(NULL),
+        mDlHandle(NULL),
+        mPerfLockEnable(0),
+        mPerfLockHandle(-1),
+        mPerfLockHandleTimed(-1),
+        mTimerSet(0),
+        mPerfLockTimeout(0),
+        mStartTimeofLock(0)
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraPerfLock destructor
+ *
+ * DESCRIPTION: class desctructor
+ *
+ * PARAMETERS :
+ *   None
+ *
+ * RETURN     : void
+ *
+ *==========================================================================*/
+QCameraPerfLock::~QCameraPerfLock()
+{
+    lock_deinit();
+}
+
+
+/*===========================================================================
+ * FUNCTION   : lock_init
+ *
+ * DESCRIPTION: opens the performance lib and initilizes the perf lock functions
+ *
+ * PARAMETERS :
+ *   None
+ *
+ * RETURN     : void
+ *
+ *==========================================================================*/
+void QCameraPerfLock::lock_init()
+{
+    const char *rc;
+    char value[PROPERTY_VALUE_MAX];
+
+    LOGD("E");
+    Mutex::Autolock lock(mLock);
+
+    // Clear the list of active power hints
+    mActivePowerHints.clear();
+    mCurrentPowerHint       = static_cast<power_hint_t>(0);
+    mCurrentPowerHintEnable = false;
+
+    property_get("persist.camera.perflock.enable", value, "1");
+    mPerfLockEnable = atoi(value);
+#ifdef HAS_MULTIMEDIA_HINTS
+    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
+        LOGE("%s module not found", POWER_HARDWARE_MODULE_ID);
+    }
+#endif
+
+    if (mPerfLockEnable) {
+        perf_lock_acq = NULL;
+        perf_lock_rel = NULL;
+        mPerfLockHandle = -1;
+        /* Retrieve name of vendor extension library */
+        if (property_get("ro.vendor.extension_library", value, NULL) <= 0) {
+            goto cleanup;
+        }
+
+        mDlHandle = dlopen(value, RTLD_NOW | RTLD_LOCAL);
+        if (mDlHandle == NULL) {
+            goto cleanup;
+        }
+
+        dlerror();
+
+        perf_lock_acq = (int (*) (int, int, int[], int))dlsym(mDlHandle, "perf_lock_acq");
+        if ((rc = dlerror()) != NULL) {
+            LOGE("failed to perf_lock_acq function handle");
+            goto cleanup;
+        }
+
+        perf_lock_rel = (int (*) (int))dlsym(mDlHandle, "perf_lock_rel");
+        if ((rc = dlerror()) != NULL) {
+            LOGE("failed to perf_lock_rel function handle");
+            goto cleanup;
+        }
+        LOGD("X");
+        return;
+
+cleanup:
+        perf_lock_acq  = NULL;
+        perf_lock_rel  = NULL;
+        mPerfLockEnable = 0;
+        if (mDlHandle) {
+            dlclose(mDlHandle);
+            mDlHandle = NULL;
+        }
+    }
+    LOGD("X");
+}
+
+/*===========================================================================
+ * FUNCTION   : lock_deinit
+ *
+ * DESCRIPTION: deinitialize the perf lock parameters
+ *
+ * PARAMETERS :
+ *   None
+ *
+ * RETURN     : void
+ *
+ *==========================================================================*/
+void QCameraPerfLock::lock_deinit()
+{
+    Mutex::Autolock lock(mLock);
+    if (mPerfLockEnable) {
+        LOGD("E");
+
+        if (mActivePowerHints.empty() == false) {
+            // Disable the active power hint
+            mCurrentPowerHint = *mActivePowerHints.begin();
+            powerHintInternal(mCurrentPowerHint, false);
+            mActivePowerHints.clear();
+        }
+
+        if ((NULL != perf_lock_rel) && (mPerfLockHandleTimed >= 0)) {
+            (*perf_lock_rel)(mPerfLockHandleTimed);
+        }
+
+        if ((NULL != perf_lock_rel) && (mPerfLockHandle >= 0)) {
+            (*perf_lock_rel)(mPerfLockHandle);
+        }
+
+        if (mDlHandle) {
+            perf_lock_acq  = NULL;
+            perf_lock_rel  = NULL;
+
+            dlclose(mDlHandle);
+            mDlHandle       = NULL;
+        }
+        mPerfLockEnable = 0;
+        LOGD("X");
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : isTimerReset
+ *
+ * DESCRIPTION: Check if timout duration is reached
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : true if timeout reached
+ *              false if timeout not reached
+ *
+ *==========================================================================*/
+bool QCameraPerfLock::isTimerReset()
+{
+    Mutex::Autolock lock(mLock);
+    if (mPerfLockEnable && mTimerSet) {
+        nsecs_t timeDiff = systemTime() - mStartTimeofLock;
+        if (ns2ms(timeDiff) > (uint32_t)mPerfLockTimeout) {
+            resetTimer();
+            return true;
+        }
+    }
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : resetTimer
+ *
+ * DESCRIPTION: Reset the timer used in timed perf lock
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : void
+ *
+ *==========================================================================*/
+void QCameraPerfLock::resetTimer()
+{
+    mPerfLockTimeout = 0;
+    mTimerSet = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : start_timer
+ *
+ * DESCRIPTION: get the start of the timer
+ *
+ * PARAMETERS :
+ *  @timer_val: timer duration in milliseconds
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ *==========================================================================*/
+void QCameraPerfLock::startTimer(uint32_t timer_val)
+{
+    mStartTimeofLock = systemTime();
+    mTimerSet = 1;
+    mPerfLockTimeout = timer_val;
+}
+
+/*===========================================================================
+ * FUNCTION   : lock_acq_timed
+ *
+ * DESCRIPTION: Acquire the performance lock for the specified duration.
+ *              If an existing lock timeout has not elapsed, extend the
+ *              lock further for the specified duration
+ *
+ * PARAMETERS :
+ *  @timer_val: lock duration
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ *==========================================================================*/
+int32_t QCameraPerfLock::lock_acq_timed(int32_t timer_val)
+{
+    int32_t ret = -1;
+
+    LOGD("E");
+    Mutex::Autolock lock(mLock);
+
+    if (mPerfLockEnable) {
+        int32_t perf_lock_params[] = {
+                ALL_CPUS_PWR_CLPS_DIS,
+                CPU0_MIN_FREQ_TURBO_MAX,
+                CPU4_MIN_FREQ_TURBO_MAX
+        };
+        if (mTimerSet) {
+            nsecs_t curElapsedTime = systemTime() - mStartTimeofLock;
+            int32_t pendingTimeout = mPerfLockTimeout - ns2ms(curElapsedTime);
+            timer_val += pendingTimeout;
+        }
+        startTimer(timer_val);
+
+        // Disable power hint when acquiring the perf lock
+        if (mCurrentPowerHintEnable) {
+            LOGD("mCurrentPowerHintEnable %d" ,mCurrentPowerHintEnable);
+            powerHintInternal(mCurrentPowerHint, false);
+        }
+
+        if ((NULL != perf_lock_acq) && (mPerfLockHandleTimed < 0)) {
+            ret = (*perf_lock_acq)(mPerfLockHandleTimed, timer_val, perf_lock_params,
+                    sizeof(perf_lock_params) / sizeof(int32_t));
+            LOGD("ret %d", ret);
+            if (ret < 0) {
+                LOGE("failed to acquire lock");
+            } else {
+                mPerfLockHandleTimed = ret;
+            }
+        }
+        LOGD("perf_handle_acq %d ", mPerfLockHandleTimed);
+    }
+
+    LOGD("X");
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : lock_acq
+ *
+ * DESCRIPTION: acquire the performance lock
+ *
+ * PARAMETERS :
+ *   None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ *==========================================================================*/
+int32_t QCameraPerfLock::lock_acq()
+{
+    int32_t ret = -1;
+
+    LOGD("E");
+    Mutex::Autolock lock(mLock);
+
+    if (mPerfLockEnable) {
+        int32_t perf_lock_params[] = {
+                ALL_CPUS_PWR_CLPS_DIS,
+                CPU0_MIN_FREQ_TURBO_MAX,
+                CPU4_MIN_FREQ_TURBO_MAX
+        };
+
+        // Disable power hint when acquiring the perf lock
+        if (mCurrentPowerHintEnable) {
+            powerHintInternal(mCurrentPowerHint, false);
+        }
+
+        if ((NULL != perf_lock_acq) && (mPerfLockHandle < 0)) {
+            ret = (*perf_lock_acq)(mPerfLockHandle, ONE_SEC, perf_lock_params,
+                    sizeof(perf_lock_params) / sizeof(int32_t));
+            LOGD("ret %d", ret);
+            if (ret < 0) {
+                LOGE("failed to acquire lock");
+            } else {
+                mPerfLockHandle = ret;
+            }
+        }
+        LOGD("perf_handle_acq %d ", mPerfLockHandle);
+    }
+
+    LOGD("X");
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : lock_rel_timed
+ *
+ * DESCRIPTION: release the performance lock
+ *
+ * PARAMETERS :
+ *   None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ *==========================================================================*/
+int32_t QCameraPerfLock::lock_rel_timed()
+{
+    int ret = -1;
+    Mutex::Autolock lock(mLock);
+    if (mPerfLockEnable) {
+        LOGD("E");
+        if (mPerfLockHandleTimed < 0) {
+            LOGW("mPerfLockHandle < 0,check if lock is acquired");
+            return ret;
+        }
+        LOGD("perf_handle_rel %d ", mPerfLockHandleTimed);
+
+        if ((NULL != perf_lock_rel) && (0 <= mPerfLockHandleTimed)) {
+            ret = (*perf_lock_rel)(mPerfLockHandleTimed);
+            if (ret < 0) {
+                LOGE("failed to release lock");
+            }
+            mPerfLockHandleTimed = -1;
+            resetTimer();
+        }
+
+        if ((mCurrentPowerHintEnable == 1) && (mTimerSet == 0)) {
+            powerHintInternal(mCurrentPowerHint, mCurrentPowerHintEnable);
+        }
+        LOGD("X");
+    }
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : lock_rel
+ *
+ * DESCRIPTION: release the performance lock
+ *
+ * PARAMETERS :
+ *   None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ *==========================================================================*/
+int32_t QCameraPerfLock::lock_rel()
+{
+    int ret = -1;
+    Mutex::Autolock lock(mLock);
+    if (mPerfLockEnable) {
+        LOGD("E");
+        if (mPerfLockHandle < 0) {
+            LOGW("mPerfLockHandle < 0,check if lock is acquired");
+            return ret;
+        }
+        LOGD("perf_handle_rel %d ", mPerfLockHandle);
+
+        if ((NULL != perf_lock_rel) && (0 <= mPerfLockHandle)) {
+            ret = (*perf_lock_rel)(mPerfLockHandle);
+            if (ret < 0) {
+                LOGE("failed to release lock");
+            }
+            mPerfLockHandle = -1;
+        }
+
+        if (mCurrentPowerHintEnable == 1) {
+            powerHintInternal(mCurrentPowerHint, mCurrentPowerHintEnable);
+        }
+        LOGD("X");
+    }
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : powerHintInternal
+ *
+ * DESCRIPTION: Sets the requested power hint and state to power HAL.
+ *
+ * PARAMETERS :
+ * hint       : Power hint
+ * enable     : Enable power hint if set to 1. Disable if set to 0.
+ * RETURN     : void
+ *
+ *==========================================================================*/
+void QCameraPerfLock::powerHintInternal(power_hint_t hint, bool enable)
+{
+#ifdef HAS_MULTIMEDIA_HINTS
+    if (m_pPowerModule != NULL) {
+        if (enable == true) {
+            m_pPowerModule->powerHint(m_pPowerModule, hint, (void *)"state=1");
+        } else {
+            m_pPowerModule->powerHint(m_pPowerModule, hint, (void *)"state=0");
+        }
+    }
+#endif
+}
+
+/*===========================================================================
+ * FUNCTION   : powerHint
+ *
+ * DESCRIPTION: Updates the list containing active/enabled power hints.
+ *              If needed, calls the internal powerHint function with
+ *              requested power hint and state.
+ * PARAMETERS :
+ * hint       : Power hint
+ * enable     : Enable power hint if set to 1. Disable if set to 0.
+ * RETURN     : void
+ *
+ *==========================================================================*/
+void QCameraPerfLock::powerHint(power_hint_t hint, bool enable)
+{
+#ifdef HAS_MULTIMEDIA_HINTS
+    if (enable == true) {
+        if ((hint != mCurrentPowerHint) || (enable != mCurrentPowerHintEnable)) {
+            // Disable the current active power hint
+            if (mCurrentPowerHintEnable == true) {
+                powerHintInternal(mCurrentPowerHint, false);
+            }
+            // Push the new power hint at the head of the active power hint list
+            mActivePowerHints.push_front(hint);
+
+            // Set the new power hint
+            mCurrentPowerHint       = hint;
+            mCurrentPowerHintEnable = enable;
+            powerHintInternal(hint, enable);
+        }
+    } else {
+        // Remove the power hint from the list
+        for (List<power_hint_t>::iterator it = mActivePowerHints.begin();
+                it != mActivePowerHints.end(); ++it) {
+            if (*it == hint) {
+                if (it != mActivePowerHints.begin()) {
+                    LOGW("Request to remove the previous power hint: %d instead of "
+                            "currently active power hint: %d", static_cast<int>(hint),
+                                                            static_cast<int>(mCurrentPowerHint));
+                }
+                mActivePowerHints.erase(it);
+                break;
+            }
+        }
+
+        if (hint == mCurrentPowerHint) {
+            // Disable the power hint
+            powerHintInternal(hint, false);
+
+            // If the active power hint list is not empty,
+            // restore the previous power hint from the head of the list
+            if (mActivePowerHints.empty() == false) {
+                mCurrentPowerHint       = *mActivePowerHints.begin();
+                mCurrentPowerHintEnable = true;
+                powerHintInternal(mCurrentPowerHint, true);
+            } else {
+                mCurrentPowerHint       = static_cast<power_hint_t>(0);
+                mCurrentPowerHintEnable = false;
+            }
+        }
+    }
+#endif
+}
+
+}; // namespace qcamera
diff --git a/msmcobalt/QCamera2/util/QCameraPerf.h b/msmcobalt/QCamera2/util/QCameraPerf.h
new file mode 100644
index 0000000..aa3e6be
--- /dev/null
+++ b/msmcobalt/QCamera2/util/QCameraPerf.h
@@ -0,0 +1,93 @@
+/* Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERAPERF_H__
+#define __QCAMERAPERF_H__
+
+// System dependencies
+#include <utils/List.h>
+#include <utils/Mutex.h>
+
+// Camera dependencies
+#include "hardware/power.h"
+
+typedef enum {
+    ALL_CORES_ONLINE = 0x7FE,
+    ALL_CPUS_PWR_CLPS_DIS = 0x101,
+    CPU0_MIN_FREQ_TURBO_MAX = 0x2FE,
+    CPU4_MIN_FREQ_TURBO_MAX = 0x1FFE,
+}perf_lock_params_t;
+
+/* Time related macros */
+#define ONE_SEC 1000
+typedef int64_t nsecs_t;
+#define NSEC_PER_SEC 1000000000LLU
+
+using namespace android;
+
+namespace qcamera {
+
+class QCameraPerfLock {
+public:
+    QCameraPerfLock();
+    ~QCameraPerfLock();
+
+    void    lock_init();
+    void    lock_deinit();
+    int32_t lock_rel();
+    int32_t lock_acq();
+    int32_t lock_acq_timed(int32_t timer_val);
+    int32_t lock_rel_timed();
+    bool    isTimerReset();
+    void    powerHintInternal(power_hint_t hint, bool enable);
+    void    powerHint(power_hint_t hint, bool enable);
+    bool    isPerfLockTimedAcquired() { return (0 <= mPerfLockHandleTimed); }
+
+private:
+    int32_t        (*perf_lock_acq)(int, int, int[], int);
+    int32_t        (*perf_lock_rel)(int);
+    void            startTimer(uint32_t timer_val);
+    void            resetTimer();
+    void           *mDlHandle;
+    uint32_t        mPerfLockEnable;
+    Mutex           mLock;
+    int32_t         mPerfLockHandle;        // Performance lock library handle
+    int32_t         mPerfLockHandleTimed;   // Performance lock library handle
+    power_module_t *m_pPowerModule;         // power module Handle
+    power_hint_t    mCurrentPowerHint;
+    bool            mCurrentPowerHintEnable;
+    uint32_t        mTimerSet;
+    uint32_t        mPerfLockTimeout;
+    nsecs_t         mStartTimeofLock;
+    List<power_hint_t> mActivePowerHints;   // Active/enabled power hints list
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMREAPERF_H__ */
diff --git a/msmcobalt/QCamera2/util/QCameraQueue.cpp b/msmcobalt/QCamera2/util/QCameraQueue.cpp
new file mode 100644
index 0000000..3b9d239
--- /dev/null
+++ b/msmcobalt/QCamera2/util/QCameraQueue.cpp
@@ -0,0 +1,464 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+// System dependencies
+#include <string.h>
+#include <utils/Errors.h>
+
+// Camera dependencies
+#include "QCameraQueue.h"
+
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : QCameraQueue
+ *
+ * DESCRIPTION: default constructor of QCameraQueue
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraQueue::QCameraQueue()
+{
+    pthread_mutex_init(&m_lock, NULL);
+    cam_list_init(&m_head.list);
+    m_size = 0;
+    m_dataFn = NULL;
+    m_userData = NULL;
+    m_active = true;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraQueue
+ *
+ * DESCRIPTION: constructor of QCameraQueue
+ *
+ * PARAMETERS :
+ *   @data_rel_fn : function ptr to release node data internal resource
+ *   @user_data   : user data ptr
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraQueue::QCameraQueue(release_data_fn data_rel_fn, void *user_data)
+{
+    pthread_mutex_init(&m_lock, NULL);
+    cam_list_init(&m_head.list);
+    m_size = 0;
+    m_dataFn = data_rel_fn;
+    m_userData = user_data;
+    m_active = true;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraQueue
+ *
+ * DESCRIPTION: deconstructor of QCameraQueue
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraQueue::~QCameraQueue()
+{
+    flush();
+    pthread_mutex_destroy(&m_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : init
+ *
+ * DESCRIPTION: Put the queue to active state (ready to enqueue and dequeue)
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraQueue::init()
+{
+    pthread_mutex_lock(&m_lock);
+    m_active = true;
+    pthread_mutex_unlock(&m_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : isEmpty
+ *
+ * DESCRIPTION: return if the queue is empty or not
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : true -- queue is empty; false -- not empty
+ *==========================================================================*/
+bool QCameraQueue::isEmpty()
+{
+    bool flag = true;
+    pthread_mutex_lock(&m_lock);
+    if (m_size > 0) {
+        flag = false;
+    }
+    pthread_mutex_unlock(&m_lock);
+    return flag;
+}
+
+/*===========================================================================
+ * FUNCTION   : enqueue
+ *
+ * DESCRIPTION: enqueue data into the queue
+ *
+ * PARAMETERS :
+ *   @data    : data to be enqueued
+ *
+ * RETURN     : true -- success; false -- failed
+ *==========================================================================*/
+bool QCameraQueue::enqueue(void *data)
+{
+    bool rc;
+    camera_q_node *node =
+        (camera_q_node *)malloc(sizeof(camera_q_node));
+    if (NULL == node) {
+        LOGE("No memory for camera_q_node");
+        return false;
+    }
+
+    memset(node, 0, sizeof(camera_q_node));
+    node->data = data;
+
+    pthread_mutex_lock(&m_lock);
+    if (m_active) {
+        cam_list_add_tail_node(&node->list, &m_head.list);
+        m_size++;
+        rc = true;
+    } else {
+        free(node);
+        rc = false;
+    }
+    pthread_mutex_unlock(&m_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : enqueueWithPriority
+ *
+ * DESCRIPTION: enqueue data into queue with priority, will insert into the
+ *              head of the queue
+ *
+ * PARAMETERS :
+ *   @data    : data to be enqueued
+ *
+ * RETURN     : true -- success; false -- failed
+ *==========================================================================*/
+bool QCameraQueue::enqueueWithPriority(void *data)
+{
+    bool rc;
+    camera_q_node *node =
+        (camera_q_node *)malloc(sizeof(camera_q_node));
+    if (NULL == node) {
+        LOGE("No memory for camera_q_node");
+        return false;
+    }
+
+    memset(node, 0, sizeof(camera_q_node));
+    node->data = data;
+
+    pthread_mutex_lock(&m_lock);
+    if (m_active) {
+        struct cam_list *p_next = m_head.list.next;
+
+        m_head.list.next = &node->list;
+        p_next->prev = &node->list;
+        node->list.next = p_next;
+        node->list.prev = &m_head.list;
+
+        m_size++;
+        rc = true;
+    } else {
+        free(node);
+        rc = false;
+    }
+    pthread_mutex_unlock(&m_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : peek
+ *
+ * DESCRIPTION: return the head element without removing it
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : data ptr. NULL if not any data in the queue.
+ *==========================================================================*/
+void* QCameraQueue::peek()
+{
+    camera_q_node* node = NULL;
+    void* data = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    pthread_mutex_lock(&m_lock);
+    if (m_active) {
+        head = &m_head.list;
+        pos = head->next;
+        if (pos != head) {
+            node = member_of(pos, camera_q_node, list);
+        }
+    }
+    pthread_mutex_unlock(&m_lock);
+
+    if (NULL != node) {
+        data = node->data;
+    }
+
+    return data;
+}
+
+/*===========================================================================
+ * FUNCTION   : dequeue
+ *
+ * DESCRIPTION: dequeue data from the queue
+ *
+ * PARAMETERS :
+ *   @bFromHead : if true, dequeue from the head
+ *                if false, dequeue from the tail
+ *
+ * RETURN     : data ptr. NULL if not any data in the queue.
+ *==========================================================================*/
+void* QCameraQueue::dequeue(bool bFromHead)
+{
+    camera_q_node* node = NULL;
+    void* data = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    pthread_mutex_lock(&m_lock);
+    if (m_active) {
+        head = &m_head.list;
+        if (bFromHead) {
+            pos = head->next;
+        } else {
+            pos = head->prev;
+        }
+        if (pos != head) {
+            node = member_of(pos, camera_q_node, list);
+            cam_list_del_node(&node->list);
+            m_size--;
+        }
+    }
+    pthread_mutex_unlock(&m_lock);
+
+    if (NULL != node) {
+        data = node->data;
+        free(node);
+    }
+
+    return data;
+}
+
+/*===========================================================================
+ * FUNCTION   : dequeue
+ *
+ * DESCRIPTION: dequeue data from the queue
+ *
+ * PARAMETERS :
+ *   @match : matching function callback
+ *   @match_data : the actual data to be matched
+ *
+ * RETURN     : data ptr. NULL if not any data in the queue.
+ *==========================================================================*/
+void* QCameraQueue::dequeue(match_fn_data match, void *match_data){
+    camera_q_node* node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+    void* data = NULL;
+
+    if ( NULL == match || NULL == match_data ) {
+        return NULL;
+    }
+
+    pthread_mutex_lock(&m_lock);
+    if (m_active) {
+        head = &m_head.list;
+        pos = head->next;
+
+        while(pos != head) {
+            node = member_of(pos, camera_q_node, list);
+            pos = pos->next;
+            if (NULL != node) {
+                if ( match(node->data, m_userData, match_data) ) {
+                    cam_list_del_node(&node->list);
+                    m_size--;
+                    data = node->data;
+                    free(node);
+                    pthread_mutex_unlock(&m_lock);
+                    return data;
+                }
+            }
+        }
+    }
+    pthread_mutex_unlock(&m_lock);
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : flush
+ *
+ * DESCRIPTION: flush all nodes from the queue, queue will be empty after this
+ *              operation.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraQueue::flush(){
+    camera_q_node* node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    pthread_mutex_lock(&m_lock);
+    if (m_active) {
+        head = &m_head.list;
+        pos = head->next;
+
+        while(pos != head) {
+            node = member_of(pos, camera_q_node, list);
+            pos = pos->next;
+            cam_list_del_node(&node->list);
+            m_size--;
+
+            if (NULL != node->data) {
+                if (m_dataFn) {
+                    m_dataFn(node->data, m_userData);
+                }
+                free(node->data);
+            }
+            free(node);
+
+        }
+        m_size = 0;
+        m_active = false;
+    }
+    pthread_mutex_unlock(&m_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : flushNodes
+ *
+ * DESCRIPTION: flush only specific nodes, depending on
+ *              the given matching function.
+ *
+ * PARAMETERS :
+ *   @match   : matching function
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraQueue::flushNodes(match_fn match){
+    camera_q_node* node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    if ( NULL == match ) {
+        return;
+    }
+
+    pthread_mutex_lock(&m_lock);
+    if (m_active) {
+        head = &m_head.list;
+        pos = head->next;
+
+        while(pos != head) {
+            node = member_of(pos, camera_q_node, list);
+            pos = pos->next;
+            if ( match(node->data, m_userData) ) {
+                cam_list_del_node(&node->list);
+                m_size--;
+
+                if (NULL != node->data) {
+                    if (m_dataFn) {
+                        m_dataFn(node->data, m_userData);
+                    }
+                    free(node->data);
+                }
+                free(node);
+            }
+        }
+    }
+    pthread_mutex_unlock(&m_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : flushNodes
+ *
+ * DESCRIPTION: flush only specific nodes, depending on
+ *              the given matching function.
+ *
+ * PARAMETERS :
+ *   @match   : matching function
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraQueue::flushNodes(match_fn_data match, void *match_data){
+    camera_q_node* node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    if ( NULL == match ) {
+        return;
+    }
+
+    pthread_mutex_lock(&m_lock);
+    if (m_active) {
+        head = &m_head.list;
+        pos = head->next;
+
+        while(pos != head) {
+            node = member_of(pos, camera_q_node, list);
+            pos = pos->next;
+            if ( match(node->data, m_userData, match_data) ) {
+                cam_list_del_node(&node->list);
+                m_size--;
+
+                if (NULL != node->data) {
+                    if (m_dataFn) {
+                        m_dataFn(node->data, m_userData);
+                    }
+                    free(node->data);
+                }
+                free(node);
+            }
+        }
+    }
+    pthread_mutex_unlock(&m_lock);
+}
+
+}; // namespace qcamera
diff --git a/msmcobalt/QCamera2/util/QCameraQueue.h b/msmcobalt/QCamera2/util/QCameraQueue.h
new file mode 100644
index 0000000..dfa221e
--- /dev/null
+++ b/msmcobalt/QCamera2/util/QCameraQueue.h
@@ -0,0 +1,79 @@
+/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_QUEUE_H__
+#define __QCAMERA_QUEUE_H__
+
+// System dependencies
+#include <pthread.h>
+
+// Camera dependencies
+#include "cam_list.h"
+
+namespace qcamera {
+
+typedef bool (*match_fn_data)(void *data, void *user_data, void *match_data);
+typedef void (*release_data_fn)(void* data, void *user_data);
+typedef bool (*match_fn)(void *data, void *user_data);
+
+class QCameraQueue {
+public:
+    QCameraQueue();
+    QCameraQueue(release_data_fn data_rel_fn, void *user_data);
+    virtual ~QCameraQueue();
+    void init();
+    bool enqueue(void *data);
+    bool enqueueWithPriority(void *data);
+    /* This call will put queue into uninitialized state.
+     * Need to call init() in order to use the queue again */
+    void flush();
+    void flushNodes(match_fn match);
+    void flushNodes(match_fn_data match, void *spec_data);
+    void* dequeue(bool bFromHead = true);
+    void* dequeue(match_fn_data match, void *spec_data);
+    void* peek();
+    bool isEmpty();
+    int getCurrentSize() {return m_size;}
+private:
+    typedef struct {
+        struct cam_list list;
+        void* data;
+    } camera_q_node;
+
+    camera_q_node m_head; // dummy head
+    int m_size;
+    bool m_active;
+    pthread_mutex_t m_lock;
+    release_data_fn m_dataFn;
+    void * m_userData;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_QUEUE_H__ */
diff --git a/msmcobalt/QCamera2/util/QCameraTrace.h b/msmcobalt/QCamera2/util/QCameraTrace.h
new file mode 100644
index 0000000..d7eeb8f
--- /dev/null
+++ b/msmcobalt/QCamera2/util/QCameraTrace.h
@@ -0,0 +1,150 @@
+/*
+ * Copyright (c) 2015-2016, The Linux Foundation. All rights reserved.
+ * Not a Contribution.
+ *
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __QCAMERATRACE_H__
+#define __QCAMERATRACE_H__
+
+#include <utils/Trace.h>
+
+#ifdef QCAMERA_REDEFINE_LOG
+#define CAM_MODULE CAM_HAL_MODULE
+extern "C" {
+#include "mm_camera_dbg.h"
+}
+#endif
+
+#undef ATRACE_CALL
+#undef ATRACE_NAME
+#undef ATRACE_BEGIN
+#undef ATRACE_INT
+#undef ATRACE_END
+#undef ATRACE_BEGIN_SNPRINTF
+#undef KPI_ATRACE_BEGIN
+#undef KPI_ATRACE_END
+#undef KPI_ATRACE_INT
+#undef ATRACE_TAG
+#undef ATRACE_BEGIN_DBG
+#undef ATRACE_INT_DBG
+#undef ATRACE_END_DBG
+
+#define KPI_ONLY 1
+#define KPI_DBG 2
+
+#define CAMERA_TRACE_BUF 32
+
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+
+//to enable only KPI logs
+#define KPI_ATRACE_BEGIN(name) ({\
+if (gKpiDebugLevel >= KPI_ONLY) { \
+     atrace_begin(ATRACE_TAG, name); \
+}\
+})
+
+#define KPI_ATRACE_END() ({\
+if (gKpiDebugLevel >= KPI_ONLY) { \
+     atrace_end(ATRACE_TAG); \
+}\
+})
+
+#define KPI_ATRACE_INT(name,val) ({\
+if (gKpiDebugLevel >= KPI_ONLY) { \
+     atrace_int(ATRACE_TAG, name, val); \
+}\
+})
+
+
+#define ATRACE_BEGIN_SNPRINTF(fmt_str, ...) \
+ if (gKpiDebugLevel >= KPI_DBG) { \
+   char trace_tag[CAMERA_TRACE_BUF]; \
+   snprintf(trace_tag, CAMERA_TRACE_BUF, fmt_str, ##__VA_ARGS__); \
+   ATRACE_BEGIN(trace_tag); \
+}
+
+#define ATRACE_BEGIN_DBG(name) ({\
+if (gKpiDebugLevel >= KPI_DBG) { \
+     atrace_begin(ATRACE_TAG, name); \
+}\
+})
+
+#define ATRACE_END_DBG() ({\
+if (gKpiDebugLevel >= KPI_DBG) { \
+     atrace_end(ATRACE_TAG); \
+}\
+})
+
+#define ATRACE_INT_DBG(name,val) ({\
+if (gKpiDebugLevel >= KPI_DBG) { \
+     atrace_int(ATRACE_TAG, name, val); \
+}\
+})
+
+#define ATRACE_BEGIN ATRACE_BEGIN_DBG
+#define ATRACE_INT ATRACE_INT_DBG
+#define ATRACE_END ATRACE_END_DBG
+
+#define KPI_ATRACE_NAME(name) qcamera::ScopedTraceKpi ___tracer(ATRACE_TAG, name)
+#define ATRACE_NAME(name) qcamera::ScopedTraceDbg ___tracer(ATRACE_TAG, name)
+#define KPI_ATRACE_CALL() KPI_ATRACE_NAME(__FUNCTION__)
+#define ATRACE_CALL() ATRACE_NAME(__FUNCTION__)
+
+namespace qcamera {
+extern volatile uint32_t gKpiDebugLevel;
+class ScopedTraceKpi {
+public:
+    inline ScopedTraceKpi(uint64_t tag, const char *name)
+    : mTag(tag) {
+        if (gKpiDebugLevel >= KPI_ONLY) {
+            atrace_begin(mTag,name);
+        }
+    }
+
+    inline ~ScopedTraceKpi() {
+        if (gKpiDebugLevel >= KPI_ONLY) {
+            atrace_end(mTag);
+        }
+    }
+
+    private:
+        uint64_t mTag;
+};
+
+class ScopedTraceDbg {
+public:
+    inline ScopedTraceDbg(uint64_t tag, const char *name)
+    : mTag(tag) {
+        if (gKpiDebugLevel >= KPI_DBG) {
+            atrace_begin(mTag,name);
+        }
+    }
+
+    inline ~ScopedTraceDbg() {
+        if (gKpiDebugLevel >= KPI_DBG) {
+            atrace_end(mTag);
+        }
+    }
+
+    private:
+        uint64_t mTag;
+};
+};
+
+extern volatile uint32_t gKpiDebugLevel;
+
+#endif /* __QCAMREATRACE_H__ */
diff --git a/msmcobalt/QCameraParameters.h b/msmcobalt/QCameraParameters.h
new file mode 100644
index 0000000..391ca44
--- /dev/null
+++ b/msmcobalt/QCameraParameters.h
@@ -0,0 +1,256 @@
+/*
+** Copyright 2008, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_HARDWARE_QCAMERA_PARAMETERS_H
+#define ANDROID_HARDWARE_QCAMERA_PARAMETERS_H
+
+// Camera dependencies
+#include <camera/CameraParameters.h>
+
+namespace android {
+
+struct FPSRange{
+    int minFPS;
+    int maxFPS;
+    FPSRange(){
+        minFPS=0;
+        maxFPS=0;
+    };
+    FPSRange(int min,int max){
+        minFPS=min;
+        maxFPS=max;
+    };
+};
+class QCameraParameters: public CameraParameters
+{
+public:
+#if 1
+    QCameraParameters() : CameraParameters() {};
+    QCameraParameters(const String8 &params): CameraParameters(params) {};
+    #else
+    QCameraParameters() : CameraParameters() {};
+    QCameraParameters(const String8 &params) { unflatten(params); }
+#endif
+    ~QCameraParameters();
+
+    // Supported PREVIEW/RECORDING SIZES IN HIGH FRAME RATE recording, sizes in pixels.
+    // Example value: "800x480,432x320". Read only.
+    static const char KEY_SUPPORTED_HFR_SIZES[];
+    // The mode of preview frame rate.
+    // Example value: "frame-rate-auto, frame-rate-fixed".
+    static const char KEY_PREVIEW_FRAME_RATE_MODE[];
+    static const char KEY_SUPPORTED_PREVIEW_FRAME_RATE_MODES[];
+    static const char KEY_PREVIEW_FRAME_RATE_AUTO_MODE[];
+    static const char KEY_PREVIEW_FRAME_RATE_FIXED_MODE[];
+
+    static const char KEY_SKIN_TONE_ENHANCEMENT[] ;
+    static const char KEY_SUPPORTED_SKIN_TONE_ENHANCEMENT_MODES[] ;
+
+    //Touch Af/AEC settings.
+    static const char KEY_TOUCH_AF_AEC[];
+    static const char KEY_SUPPORTED_TOUCH_AF_AEC[];
+    //Touch Index for AEC.
+    static const char KEY_TOUCH_INDEX_AEC[];
+    //Touch Index for AF.
+    static const char KEY_TOUCH_INDEX_AF[];
+    // Current auto scene detection mode.
+    // Example value: "off" or SCENE_DETECT_XXX constants. Read/write.
+    static const char KEY_SCENE_DETECT[];
+    // Supported auto scene detection settings.
+    // Example value: "off,backlight,snow/cloudy". Read only.
+    static const char KEY_SUPPORTED_SCENE_DETECT[];
+	   // Returns true if video snapshot is supported. That is, applications
+    static const char KEY_FULL_VIDEO_SNAP_SUPPORTED[];
+    static const char KEY_POWER_MODE_SUPPORTED[];
+
+    static const char KEY_ISO_MODE[];
+    static const char KEY_SUPPORTED_ISO_MODES[];
+    static const char KEY_LENSSHADE[] ;
+    static const char KEY_SUPPORTED_LENSSHADE_MODES[] ;
+
+    static const char KEY_AUTO_EXPOSURE[];
+    static const char KEY_SUPPORTED_AUTO_EXPOSURE[];
+
+    static const char KEY_GPS_LATITUDE_REF[];
+    static const char KEY_GPS_LONGITUDE_REF[];
+    static const char KEY_GPS_ALTITUDE_REF[];
+    static const char KEY_GPS_STATUS[];
+    static const char KEY_EXIF_DATETIME[];
+    static const char KEY_MEMORY_COLOR_ENHANCEMENT[];
+    static const char KEY_SUPPORTED_MEM_COLOR_ENHANCE_MODES[];
+
+
+    static const char KEY_POWER_MODE[];
+
+    static const char KEY_ZSL[];
+    static const char KEY_SUPPORTED_ZSL_MODES[];
+
+    static const char KEY_CAMERA_MODE[];
+
+    static const char KEY_VIDEO_HIGH_FRAME_RATE[];
+    static const char KEY_SUPPORTED_VIDEO_HIGH_FRAME_RATE_MODES[];
+    static const char KEY_HIGH_DYNAMIC_RANGE_IMAGING[];
+    static const char KEY_SUPPORTED_HDR_IMAGING_MODES[];
+    static const char KEY_AE_BRACKET_HDR[];
+
+
+    // DENOISE
+    static const char KEY_DENOISE[];
+    static const char KEY_SUPPORTED_DENOISE[];
+
+    //Selectable zone AF.
+    static const char KEY_SELECTABLE_ZONE_AF[];
+    static const char KEY_SUPPORTED_SELECTABLE_ZONE_AF[];
+
+    //Face Detection
+    static const char KEY_FACE_DETECTION[];
+    static const char KEY_SUPPORTED_FACE_DETECTION[];
+
+    //Redeye Reduction
+    static const char KEY_REDEYE_REDUCTION[];
+    static const char KEY_SUPPORTED_REDEYE_REDUCTION[];
+    static const char EFFECT_EMBOSS[];
+    static const char EFFECT_SKETCH[];
+    static const char EFFECT_NEON[];
+
+    // Values for Touch AF/AEC
+    static const char TOUCH_AF_AEC_OFF[] ;
+    static const char TOUCH_AF_AEC_ON[] ;
+    static const char SCENE_MODE_ASD[];
+    static const char SCENE_MODE_BACKLIGHT[];
+    static const char SCENE_MODE_FLOWERS[];
+    static const char SCENE_MODE_AR[];
+    static const char SCENE_MODE_HDR[];
+	static const char SCENE_DETECT_OFF[];
+    static const char SCENE_DETECT_ON[];
+    static const char PIXEL_FORMAT_YUV420SP_ADRENO[]; // ADRENO
+	static const char PIXEL_FORMAT_RAW[];
+    static const char PIXEL_FORMAT_YV12[]; // NV12
+    static const char PIXEL_FORMAT_NV12[]; //NV12
+    // Normal focus mode. Applications should call
+    // CameraHardwareInterface.autoFocus to start the focus in this mode.
+    static const char FOCUS_MODE_NORMAL[];
+    static const char ISO_AUTO[];
+    static const char ISO_HJR[] ;
+    static const char ISO_100[];
+    static const char ISO_200[] ;
+    static const char ISO_400[];
+    static const char ISO_800[];
+    static const char ISO_1600[];
+    // Values for Lens Shading
+    static const char LENSSHADE_ENABLE[] ;
+    static const char LENSSHADE_DISABLE[] ;
+
+    // Values for auto exposure settings.
+    static const char AUTO_EXPOSURE_FRAME_AVG[];
+    static const char AUTO_EXPOSURE_CENTER_WEIGHTED[];
+    static const char AUTO_EXPOSURE_SPOT_METERING[];
+
+    static const char KEY_SHARPNESS[];
+    static const char KEY_MAX_SHARPNESS[];
+    static const char KEY_CONTRAST[];
+    static const char KEY_MAX_CONTRAST[];
+    static const char KEY_SATURATION[];
+    static const char KEY_MAX_SATURATION[];
+
+    static const char KEY_HISTOGRAM[] ;
+    static const char KEY_SUPPORTED_HISTOGRAM_MODES[] ;
+    // Values for HISTOGRAM
+    static const char HISTOGRAM_ENABLE[] ;
+    static const char HISTOGRAM_DISABLE[] ;
+
+    // Values for SKIN TONE ENHANCEMENT
+    static const char SKIN_TONE_ENHANCEMENT_ENABLE[] ;
+    static const char SKIN_TONE_ENHANCEMENT_DISABLE[] ;
+
+    // Values for Denoise
+    static const char DENOISE_OFF[] ;
+    static const char DENOISE_ON[] ;
+
+    // Values for auto exposure settings.
+    static const char SELECTABLE_ZONE_AF_AUTO[];
+    static const char SELECTABLE_ZONE_AF_SPOT_METERING[];
+    static const char SELECTABLE_ZONE_AF_CENTER_WEIGHTED[];
+    static const char SELECTABLE_ZONE_AF_FRAME_AVERAGE[];
+
+    // Values for Face Detection settings.
+    static const char FACE_DETECTION_OFF[];
+    static const char FACE_DETECTION_ON[];
+
+    // Values for MCE settings.
+    static const char MCE_ENABLE[];
+    static const char MCE_DISABLE[];
+
+    // Values for ZSL settings.
+    static const char ZSL_OFF[];
+    static const char ZSL_ON[];
+
+    // Values for HDR Bracketing settings.
+    static const char AE_BRACKET_HDR_OFF[];
+    static const char AE_BRACKET_HDR[];
+    static const char AE_BRACKET[];
+
+    // Values for Power mode settings.
+    static const char LOW_POWER[];
+    static const char NORMAL_POWER[];
+
+    // Values for HFR settings.
+    static const char VIDEO_HFR_OFF[];
+    static const char VIDEO_HFR_2X[];
+    static const char VIDEO_HFR_3X[];
+    static const char VIDEO_HFR_4X[];
+
+    // Values for Redeye Reduction settings.
+    static const char REDEYE_REDUCTION_ENABLE[];
+    static const char REDEYE_REDUCTION_DISABLE[];
+    // Values for HDR settings.
+    static const char HDR_ENABLE[];
+    static const char HDR_DISABLE[];
+
+   // Values for Redeye Reduction settings.
+   // static const char REDEYE_REDUCTION_ENABLE[];
+   // static const char REDEYE_REDUCTION_DISABLE[];
+   // Values for HDR settings.
+   //    static const char HDR_ENABLE[];
+   //    static const char HDR_DISABLE[];
+
+
+   static const char KEY_SINGLE_ISP_OUTPUT_ENABLED[];
+   static const char KEY_SUPPORTED_CAMERA_FEATURES[];
+   static const char KEY_MAX_NUM_REQUESTED_FACES[];
+
+    enum {
+        CAMERA_ORIENTATION_UNKNOWN = 0,
+        CAMERA_ORIENTATION_PORTRAIT = 1,
+        CAMERA_ORIENTATION_LANDSCAPE = 2,
+    };
+    int getOrientation() const;
+    void setOrientation(int orientation);
+    void getSupportedHfrSizes(Vector<Size> &sizes) const;
+    void setPreviewFpsRange(int minFPS,int maxFPS);
+	void setPreviewFrameRateMode(const char *mode);
+    const char *getPreviewFrameRateMode() const;
+    void setTouchIndexAec(int x, int y);
+    void getTouchIndexAec(int *x, int *y) const;
+    void setTouchIndexAf(int x, int y);
+    void getTouchIndexAf(int *x, int *y) const;
+    void getMeteringAreaCenter(int * x, int *y) const;
+
+};
+
+}; // namespace android
+
+#endif
diff --git a/msmcobalt/QCamera_Intf.h b/msmcobalt/QCamera_Intf.h
new file mode 100644
index 0000000..f2360cc
--- /dev/null
+++ b/msmcobalt/QCamera_Intf.h
@@ -0,0 +1,1147 @@
+/* Copyright (c) 2012, 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_INTF_H__
+#define __QCAMERA_INTF_H__
+
+// System dependencies
+#include <pthread.h>
+#include <stdint.h>
+
+#define PAD_TO_WORD(a)               (((a)+3)&~3)
+#define PAD_TO_2K(a)                 (((a)+2047)&~2047)
+#define PAD_TO_4K(a)                 (((a)+4095)&~4095)
+#define PAD_TO_8K(a)                 (((a)+8191)&~8191)
+
+#define CEILING32(X) (((X) + 0x0001F) & 0xFFFFFFE0)
+#define CEILING16(X) (((X) + 0x000F) & 0xFFF0)
+#define CEILING4(X)  (((X) + 0x0003) & 0xFFFC)
+#define CEILING2(X)  (((X) + 0x0001) & 0xFFFE)
+
+#define MAX_ROI 2
+#define MAX_NUM_PARM 5
+#define MAX_NUM_OPS 2
+#define VIDEO_MAX_PLANES 8
+#define MAX_SNAPSHOT_BUFFERS 5
+#define MAX_EXP_BRACKETING_LENGTH 32
+
+
+/* Exif Tag ID */
+typedef uint32_t exif_tag_id_t;
+
+/* Exif Info (opaque definition) */
+struct exif_info_t;
+typedef struct exif_info_t * exif_info_obj_t;
+
+typedef enum {
+  BACK_CAMERA,
+  FRONT_CAMERA,
+}cam_position_t;
+
+typedef enum {
+  CAM_CTRL_FAILED,        /* Failure in doing operation */
+  CAM_CTRL_SUCCESS,       /* Operation Succeded */
+  CAM_CTRL_INVALID_PARM,  /* Inavlid parameter provided */
+  CAM_CTRL_NOT_SUPPORTED, /* Parameter/operation not supported */
+  CAM_CTRL_ACCEPTED,      /* Parameter accepted */
+  CAM_CTRL_MAX,
+} cam_ctrl_status_t;
+
+typedef enum {
+  CAMERA_YUV_420_NV12,
+  CAMERA_YUV_420_NV21,
+  CAMERA_YUV_420_NV21_ADRENO,
+  CAMERA_BAYER_SBGGR10,
+  CAMERA_RDI,
+  CAMERA_YUV_420_YV12,
+  CAMERA_YUV_422_NV16,
+  CAMERA_YUV_422_NV61,
+  CAMERA_YUV_422_YUYV,
+  CAMERA_SAEC,
+  CAMERA_SAWB,
+  CAMERA_SAFC,
+  CAMERA_SHST,
+} cam_format_t;
+
+typedef enum {
+  CAMERA_PAD_NONE,
+  CAMERA_PAD_TO_WORD,   /*2 bytes*/
+  CAMERA_PAD_TO_LONG_WORD, /*4 bytes*/
+  CAMERA_PAD_TO_8, /*8 bytes*/
+  CAMERA_PAD_TO_16, /*16 bytes*/
+
+  CAMERA_PAD_TO_1K, /*1k bytes*/
+  CAMERA_PAD_TO_2K, /*2k bytes*/
+  CAMERA_PAD_TO_4K,
+  CAMERA_PAD_TO_8K
+} cam_pad_format_t;
+
+typedef struct {
+  int ext_mode;   /* preview, main, thumbnail, video, raw, etc */
+  int frame_idx;  /* frame index */
+  int fd;         /* origin fd */
+  uint32_t size;
+  uint8_t is_hist; /* is hist mapping? */
+} mm_camera_frame_map_type;
+
+typedef struct {
+  int ext_mode;   /* preview, main, thumbnail, video, raw, etc */
+  int frame_idx;  /* frame index */
+  uint8_t is_hist; /* is hist unmapping? */
+} mm_camera_frame_unmap_type;
+
+typedef enum {
+  CAM_SOCK_MSG_TYPE_FD_MAPPING,
+  CAM_SOCK_MSG_TYPE_FD_UNMAPPING,
+  CAM_SOCK_MSG_TYPE_WDN_START,
+  CAM_SOCK_MSG_TYPE_HDR_START,
+  CAM_SOCK_MSG_TYPE_HIST_MAPPING,
+  CAM_SOCK_MSG_TYPE_HIST_UNMAPPING,
+  CAM_SOCK_MSG_TYPE_MAX
+}mm_camera_socket_msg_type;
+#define MAX_HDR_EXP_FRAME_NUM 5
+typedef struct {
+  unsigned long cookie;
+  int num_hdr_frames;
+  int hdr_main_idx[MAX_HDR_EXP_FRAME_NUM];
+  int hdr_thm_idx[MAX_HDR_EXP_FRAME_NUM];
+  int exp[MAX_HDR_EXP_FRAME_NUM];
+} mm_camera_hdr_start_type;
+
+#define MM_MAX_WDN_NUM 2
+typedef struct {
+  unsigned long cookie;
+  int num_frames;
+  int ext_mode[MM_MAX_WDN_NUM];
+  int frame_idx[MM_MAX_WDN_NUM];
+} mm_camera_wdn_start_type;
+
+typedef struct {
+  mm_camera_socket_msg_type msg_type;
+  union {
+    mm_camera_frame_map_type frame_fd_map;
+    mm_camera_frame_unmap_type frame_fd_unmap;
+    mm_camera_wdn_start_type wdn_start;
+    mm_camera_hdr_start_type hdr_pkg;
+  } payload;
+} cam_sock_packet_t;
+
+typedef enum {
+  CAM_VIDEO_FRAME,
+  CAM_SNAPSHOT_FRAME,
+  CAM_PREVIEW_FRAME,
+}cam_frame_type_t;
+
+
+typedef enum {
+  CAMERA_MODE_2D = (1<<0),
+  CAMERA_MODE_3D = (1<<1),
+  CAMERA_NONZSL_MODE = (1<<2),
+  CAMERA_ZSL_MODE = (1<<3),
+  CAMERA_MODE_MAX = CAMERA_ZSL_MODE,
+} camera_mode_t;
+
+
+typedef struct {
+  int  modes_supported;
+  int8_t camera_id;
+  cam_position_t position;
+  uint32_t sensor_mount_angle;
+}camera_info_t;
+
+typedef struct {
+  camera_mode_t mode;
+  int8_t camera_id;
+  camera_mode_t cammode;
+}config_params_t;
+
+typedef struct {
+  uint32_t len;
+  uint32_t y_offset;
+  uint32_t cbcr_offset;
+} cam_sp_len_offset_t;
+
+typedef struct{
+  uint32_t len;
+  uint32_t offset;
+} cam_mp_len_offset_t;
+
+typedef struct {
+  int num_planes;
+  union {
+    cam_sp_len_offset_t sp;
+    cam_mp_len_offset_t mp[8];
+  };
+  uint32_t frame_len;
+} cam_frame_len_offset_t;
+
+typedef struct {
+  uint32_t parm[MAX_NUM_PARM];
+  uint32_t ops[MAX_NUM_OPS];
+  uint8_t yuv_output;
+  uint8_t jpeg_capture;
+  uint32_t max_pict_width;
+  uint32_t max_pict_height;
+  uint32_t max_preview_width;
+  uint32_t max_preview_height;
+  uint32_t max_video_width;
+  uint32_t max_video_height;
+  uint32_t effect;
+  camera_mode_t modes;
+  uint8_t preview_format;
+  uint32_t preview_sizes_cnt;
+  uint32_t thumb_sizes_cnt;
+  uint32_t video_sizes_cnt;
+  uint32_t hfr_sizes_cnt;
+  uint8_t vfe_output_enable;
+  uint8_t hfr_frame_skip;
+  uint32_t default_preview_width;
+  uint32_t default_preview_height;
+  uint32_t bestshot_reconfigure;
+  uint32_t pxlcode;
+}cam_prop_t;
+
+typedef struct {
+  uint16_t video_width;         /* Video width seen by VFE could be different than orig. Ex. DIS */
+  uint16_t video_height;        /* Video height seen by VFE */
+  uint16_t picture_width;       /* Picture width seen by VFE */
+  uint16_t picture_height;      /* Picture height seen by VFE */
+  uint16_t display_width;       /* width of display */
+  uint16_t display_height;      /* height of display */
+  uint16_t orig_video_width;    /* original video width received */
+  uint16_t orig_video_height;   /* original video height received */
+  uint16_t orig_picture_dx;     /* original picture width received */
+  uint16_t orig_picture_dy;     /* original picture height received */
+  uint16_t ui_thumbnail_height; /* Just like orig_picture_dx */
+  uint16_t ui_thumbnail_width;  /* Just like orig_picture_dy */
+  uint16_t thumbnail_height;
+  uint16_t thumbnail_width;
+  uint16_t orig_picture_width;
+  uint16_t orig_picture_height;
+  uint16_t orig_thumb_width;
+  uint16_t orig_thumb_height;
+  uint16_t raw_picture_height;
+  uint16_t raw_picture_width;
+  uint16_t rdi0_height;
+  uint16_t rdi0_width;
+  uint16_t rdi1_height;
+  uint16_t rdi1_width;
+  uint32_t hjr_xtra_buff_for_bayer_filtering;
+  cam_format_t    prev_format;
+  cam_format_t    enc_format;
+  cam_format_t    thumb_format;
+  cam_format_t    main_img_format;
+  cam_format_t    rdi0_format;
+  cam_format_t    rdi1_format;
+  cam_format_t    raw_img_format;
+  cam_pad_format_t prev_padding_format;
+  cam_pad_format_t enc_padding_format;
+  cam_pad_format_t thumb_padding_format;
+  cam_pad_format_t main_padding_format;
+  uint16_t display_luma_width;
+  uint16_t display_luma_height;
+  uint16_t display_chroma_width;
+  uint16_t display_chroma_height;
+  uint16_t video_luma_width;
+  uint16_t video_luma_height;
+  uint16_t video_chroma_width;
+  uint16_t video_chroma_height;
+  uint16_t thumbnail_luma_width;
+  uint16_t thumbnail_luma_height;
+  uint16_t thumbnail_chroma_width;
+  uint16_t thumbnail_chroma_height;
+  uint16_t main_img_luma_width;
+  uint16_t main_img_luma_height;
+  uint16_t main_img_chroma_width;
+  uint16_t main_img_chroma_height;
+  int rotation;
+  cam_frame_len_offset_t display_frame_offset;
+  cam_frame_len_offset_t video_frame_offset;
+  cam_frame_len_offset_t picture_frame_offset;
+  cam_frame_len_offset_t thumb_frame_offset;
+  uint32_t channel_interface_mask;
+} cam_ctrl_dimension_t;
+
+typedef struct {
+  uint16_t type;
+  uint16_t width;
+  uint16_t height;
+} cam_stats_buf_dimension_t;
+
+typedef struct {
+  uint8_t cid;
+  uint8_t dt;
+  uint32_t inst_handle;
+} cam_cid_entry_t;
+
+#define CAM_MAX_CID_NUM    8
+typedef struct {
+  /*should we hard code max CIDs? if not we need to have two CMD*/
+  uint8_t num_cids;
+  cam_cid_entry_t cid_entries[CAM_MAX_CID_NUM];
+} cam_cid_info_t;
+
+typedef struct {
+  /* we still use prev, video, main,
+   * thumb to interprete image types */
+  uint32_t image_mode;                 /* input */
+  cam_format_t format;                 /* input */
+  cam_pad_format_t padding_format;     /* input */
+  int rotation;                        /* input */
+  uint16_t width;                      /* input/output */
+  uint16_t height;                     /* input/output */
+  cam_frame_len_offset_t frame_offset; /* output */
+} cam_frame_resolution_t;
+
+typedef struct {
+  uint32_t instance_hdl; /* instance handler of the stream */
+  uint32_t frame_idx;    /* frame index */
+  uint16_t frame_width;
+  uint16_t frame_height;
+  cam_frame_len_offset_t frame_offset;
+} mm_camera_wnr_frame_info_t;
+
+#define MM_CAMEAR_MAX_STRAEM_BUNDLE 4
+typedef struct {
+    uint8_t num_frames;
+    mm_camera_wnr_frame_info_t frames[MM_CAMEAR_MAX_STRAEM_BUNDLE];
+} mm_camera_wnr_info_t;
+
+typedef struct {
+  uint8_t num;
+  uint32_t stream_handles[MM_CAMEAR_MAX_STRAEM_BUNDLE]; /* instance handler */
+} cam_stream_bundle_t;
+
+/* Add enumenrations at the bottom but before MM_CAMERA_PARM_MAX */
+typedef enum {
+    MM_CAMERA_PARM_PICT_SIZE,
+    MM_CAMERA_PARM_ZOOM_RATIO,
+    MM_CAMERA_PARM_HISTOGRAM,
+    MM_CAMERA_PARM_DIMENSION,
+    MM_CAMERA_PARM_FPS,
+    MM_CAMERA_PARM_FPS_MODE, /*5*/
+    MM_CAMERA_PARM_EFFECT,
+    MM_CAMERA_PARM_EXPOSURE_COMPENSATION,
+    MM_CAMERA_PARM_EXPOSURE,
+    MM_CAMERA_PARM_SHARPNESS,
+    MM_CAMERA_PARM_CONTRAST, /*10*/
+    MM_CAMERA_PARM_SATURATION,
+    MM_CAMERA_PARM_BRIGHTNESS,
+    MM_CAMERA_PARM_WHITE_BALANCE,
+    MM_CAMERA_PARM_LED_MODE,
+    MM_CAMERA_PARM_ANTIBANDING, /*15*/
+    MM_CAMERA_PARM_ROLLOFF,
+    MM_CAMERA_PARM_CONTINUOUS_AF,
+    MM_CAMERA_PARM_FOCUS_RECT,
+    MM_CAMERA_PARM_AEC_ROI,
+    MM_CAMERA_PARM_AF_ROI, /*20*/
+    MM_CAMERA_PARM_HJR,
+    MM_CAMERA_PARM_ISO,
+    MM_CAMERA_PARM_BL_DETECTION,
+    MM_CAMERA_PARM_SNOW_DETECTION,
+    MM_CAMERA_PARM_BESTSHOT_MODE, /*25*/
+    MM_CAMERA_PARM_ZOOM,
+    MM_CAMERA_PARM_VIDEO_DIS,
+    MM_CAMERA_PARM_VIDEO_ROT,
+    MM_CAMERA_PARM_SCE_FACTOR,
+    MM_CAMERA_PARM_FD, /*30*/
+    MM_CAMERA_PARM_MODE,
+    /* 2nd 32 bits */
+    MM_CAMERA_PARM_3D_FRAME_FORMAT,
+    MM_CAMERA_PARM_CAMERA_ID,
+    MM_CAMERA_PARM_CAMERA_INFO,
+    MM_CAMERA_PARM_PREVIEW_SIZE, /*35*/
+    MM_CAMERA_PARM_QUERY_FALSH4SNAP,
+    MM_CAMERA_PARM_FOCUS_DISTANCES,
+    MM_CAMERA_PARM_BUFFER_INFO,
+    MM_CAMERA_PARM_JPEG_ROTATION,
+    MM_CAMERA_PARM_JPEG_MAINIMG_QUALITY, /* 40 */
+    MM_CAMERA_PARM_JPEG_THUMB_QUALITY,
+    MM_CAMERA_PARM_ZSL_ENABLE,
+    MM_CAMERA_PARM_FOCAL_LENGTH,
+    MM_CAMERA_PARM_HORIZONTAL_VIEW_ANGLE,
+    MM_CAMERA_PARM_VERTICAL_VIEW_ANGLE, /* 45 */
+    MM_CAMERA_PARM_MCE,
+    MM_CAMERA_PARM_RESET_LENS_TO_INFINITY,
+    MM_CAMERA_PARM_SNAPSHOTDATA,
+    MM_CAMERA_PARM_HFR,
+    MM_CAMERA_PARM_REDEYE_REDUCTION, /* 50 */
+    MM_CAMERA_PARM_WAVELET_DENOISE,
+    MM_CAMERA_PARM_3D_DISPLAY_DISTANCE,
+    MM_CAMERA_PARM_3D_VIEW_ANGLE,
+    MM_CAMERA_PARM_PREVIEW_FORMAT,
+    MM_CAMERA_PARM_RDI_FORMAT,
+    MM_CAMERA_PARM_HFR_SIZE, /* 55 */
+    MM_CAMERA_PARM_3D_EFFECT,
+    MM_CAMERA_PARM_3D_MANUAL_CONV_RANGE,
+    MM_CAMERA_PARM_3D_MANUAL_CONV_VALUE,
+    MM_CAMERA_PARM_ENABLE_3D_MANUAL_CONVERGENCE,
+    /* These are new parameters defined here */
+    MM_CAMERA_PARM_CH_IMAGE_FMT, /* 60 */       // mm_camera_ch_image_fmt_parm_t
+    MM_CAMERA_PARM_OP_MODE,             // camera state, sub state also
+    MM_CAMERA_PARM_SHARPNESS_CAP,       //
+    MM_CAMERA_PARM_SNAPSHOT_BURST_NUM,  // num shots per snapshot action
+    MM_CAMERA_PARM_LIVESHOT_MAIN,       // enable/disable full size live shot
+    MM_CAMERA_PARM_MAXZOOM, /* 65 */
+    MM_CAMERA_PARM_LUMA_ADAPTATION,     // enable/disable
+    MM_CAMERA_PARM_HDR,
+    MM_CAMERA_PARM_CROP,
+    MM_CAMERA_PARM_MAX_PICTURE_SIZE,
+    MM_CAMERA_PARM_MAX_PREVIEW_SIZE, /* 70 */
+    MM_CAMERA_PARM_ASD_ENABLE,
+    MM_CAMERA_PARM_RECORDING_HINT,
+    MM_CAMERA_PARM_CAF_ENABLE,
+    MM_CAMERA_PARM_FULL_LIVESHOT,
+    MM_CAMERA_PARM_DIS_ENABLE, /* 75 */
+    MM_CAMERA_PARM_AEC_LOCK,
+    MM_CAMERA_PARM_AWB_LOCK,
+    MM_CAMERA_PARM_AF_MTR_AREA,
+    MM_CAMERA_PARM_AEC_MTR_AREA,
+    MM_CAMERA_PARM_LOW_POWER_MODE,
+    MM_CAMERA_PARM_MAX_HFR_MODE, /* 80 */
+    MM_CAMERA_PARM_MAX_VIDEO_SIZE,
+    MM_CAMERA_PARM_DEF_PREVIEW_SIZES,
+    MM_CAMERA_PARM_DEF_VIDEO_SIZES,
+    MM_CAMERA_PARM_DEF_THUMB_SIZES,
+    MM_CAMERA_PARM_DEF_HFR_SIZES,
+    MM_CAMERA_PARM_PREVIEW_SIZES_CNT,
+    MM_CAMERA_PARM_VIDEO_SIZES_CNT,
+    MM_CAMERA_PARM_THUMB_SIZES_CNT,
+    MM_CAMERA_PARM_HFR_SIZES_CNT,
+    MM_CAMERA_PARM_GRALLOC_USAGE,
+    MM_CAMERA_PARM_VFE_OUTPUT_ENABLE, //to check whether both oputputs are
+    MM_CAMERA_PARM_DEFAULT_PREVIEW_WIDTH,
+    MM_CAMERA_PARM_DEFAULT_PREVIEW_HEIGHT,
+    MM_CAMERA_PARM_FOCUS_MODE,
+    MM_CAMERA_PARM_HFR_FRAME_SKIP,
+    MM_CAMERA_PARM_CH_INTERFACE,
+    //or single output enabled to differentiate 7x27a with others
+    MM_CAMERA_PARM_BESTSHOT_RECONFIGURE,
+    MM_CAMERA_PARM_MAX_NUM_FACES_DECT,
+    MM_CAMERA_PARM_FPS_RANGE,
+    MM_CAMERA_PARM_CID,
+    MM_CAMERA_PARM_FRAME_RESOLUTION,
+    MM_CAMERA_PARM_RAW_SNAPSHOT_FMT,
+    MM_CAMERA_PARM_FACIAL_FEATURE_INFO,
+    MM_CAMERA_PARM_MOBICAT,
+    MM_CAMERA_PARM_MAX
+} mm_camera_parm_type_t;
+
+typedef enum {
+  STREAM_NONE           =  0x0,
+  STREAM_IMAGE          =  0x1,
+  STREAM_RAW            =  0x2,
+  STREAM_RAW1           =  0x4,
+  STREAM_RAW2           =  0x8,
+} mm_camera_channel_stream_info_t;
+
+typedef enum {
+  CAMERA_SET_PARM_DISPLAY_INFO,
+  CAMERA_SET_PARM_DIMENSION,
+
+  CAMERA_SET_PARM_ZOOM,
+  CAMERA_SET_PARM_SENSOR_POSITION,
+  CAMERA_SET_PARM_FOCUS_RECT,
+  CAMERA_SET_PARM_LUMA_ADAPTATION,
+  CAMERA_SET_PARM_CONTRAST,
+  CAMERA_SET_PARM_BRIGHTNESS,
+  CAMERA_SET_PARM_EXPOSURE_COMPENSATION,
+  CAMERA_SET_PARM_SHARPNESS,
+  CAMERA_SET_PARM_HUE,  /* 10 */
+  CAMERA_SET_PARM_SATURATION,
+  CAMERA_SET_PARM_EXPOSURE,
+  CAMERA_SET_PARM_AUTO_FOCUS,
+  CAMERA_SET_PARM_WB,
+  CAMERA_SET_PARM_EFFECT,
+  CAMERA_SET_PARM_FPS,
+  CAMERA_SET_PARM_FLASH,
+  CAMERA_SET_PARM_NIGHTSHOT_MODE,
+  CAMERA_SET_PARM_REFLECT,
+  CAMERA_SET_PARM_PREVIEW_MODE,  /* 20 */
+  CAMERA_SET_PARM_ANTIBANDING,
+  CAMERA_SET_PARM_RED_EYE_REDUCTION,
+  CAMERA_SET_PARM_FOCUS_STEP,
+  CAMERA_SET_PARM_EXPOSURE_METERING,
+  CAMERA_SET_PARM_AUTO_EXPOSURE_MODE,
+  CAMERA_SET_PARM_ISO,
+  CAMERA_SET_PARM_BESTSHOT_MODE,
+  CAMERA_SET_PARM_ENCODE_ROTATION,
+
+  CAMERA_SET_PARM_PREVIEW_FPS,
+  CAMERA_SET_PARM_AF_MODE,  /* 30 */
+  CAMERA_SET_PARM_HISTOGRAM,
+  CAMERA_SET_PARM_FLASH_STATE,
+  CAMERA_SET_PARM_FRAME_TIMESTAMP,
+  CAMERA_SET_PARM_STROBE_FLASH,
+  CAMERA_SET_PARM_FPS_LIST,
+  CAMERA_SET_PARM_HJR,
+  CAMERA_SET_PARM_ROLLOFF,
+
+  CAMERA_STOP_PREVIEW,
+  CAMERA_START_PREVIEW,
+  CAMERA_START_SNAPSHOT, /* 40 */
+  CAMERA_START_RAW_SNAPSHOT,
+  CAMERA_STOP_SNAPSHOT,
+  CAMERA_EXIT,
+  CAMERA_ENABLE_BSM,
+  CAMERA_DISABLE_BSM,
+  CAMERA_GET_PARM_ZOOM,
+  CAMERA_GET_PARM_MAXZOOM,
+  CAMERA_GET_PARM_ZOOMRATIOS,
+  CAMERA_GET_PARM_AF_SHARPNESS,
+  CAMERA_SET_PARM_LED_MODE, /* 50 */
+  CAMERA_SET_MOTION_ISO,
+  CAMERA_AUTO_FOCUS_CANCEL,
+  CAMERA_GET_PARM_FOCUS_STEP,
+  CAMERA_ENABLE_AFD,
+  CAMERA_PREPARE_SNAPSHOT,
+  CAMERA_SET_FPS_MODE,
+  CAMERA_START_VIDEO,
+  CAMERA_STOP_VIDEO,
+  CAMERA_START_RECORDING,
+  CAMERA_STOP_RECORDING, /* 60 */
+  CAMERA_SET_VIDEO_DIS_PARAMS,
+  CAMERA_SET_VIDEO_ROT_PARAMS,
+  CAMERA_SET_PARM_AEC_ROI,
+  CAMERA_SET_CAF,
+  CAMERA_SET_PARM_BL_DETECTION_ENABLE,
+  CAMERA_SET_PARM_SNOW_DETECTION_ENABLE,
+  CAMERA_SET_PARM_STROBE_FLASH_MODE,
+  CAMERA_SET_PARM_AF_ROI,
+  CAMERA_START_LIVESHOT,
+  CAMERA_SET_SCE_FACTOR, /* 70 */
+  CAMERA_GET_CAPABILITIES,
+  CAMERA_GET_PARM_DIMENSION,
+  CAMERA_GET_PARM_LED_MODE,
+  CAMERA_SET_PARM_FD,
+  CAMERA_GET_PARM_3D_FRAME_FORMAT,
+  CAMERA_QUERY_FLASH_FOR_SNAPSHOT,
+  CAMERA_GET_PARM_FOCUS_DISTANCES,
+  CAMERA_START_ZSL,
+  CAMERA_STOP_ZSL,
+  CAMERA_ENABLE_ZSL, /* 80 */
+  CAMERA_GET_PARM_FOCAL_LENGTH,
+  CAMERA_GET_PARM_HORIZONTAL_VIEW_ANGLE,
+  CAMERA_GET_PARM_VERTICAL_VIEW_ANGLE,
+  CAMERA_SET_PARM_WAVELET_DENOISE,
+  CAMERA_SET_PARM_MCE,
+  CAMERA_ENABLE_STEREO_CAM,
+  CAMERA_SET_PARM_RESET_LENS_TO_INFINITY,
+  CAMERA_GET_PARM_SNAPSHOTDATA,
+  CAMERA_SET_PARM_HFR,
+  CAMERA_SET_REDEYE_REDUCTION, /* 90 */
+  CAMERA_SET_PARM_3D_DISPLAY_DISTANCE,
+  CAMERA_SET_PARM_3D_VIEW_ANGLE,
+  CAMERA_SET_PARM_3D_EFFECT,
+  CAMERA_SET_PARM_PREVIEW_FORMAT,
+  CAMERA_GET_PARM_3D_DISPLAY_DISTANCE, /* 95 */
+  CAMERA_GET_PARM_3D_VIEW_ANGLE,
+  CAMERA_GET_PARM_3D_EFFECT,
+  CAMERA_GET_PARM_3D_MANUAL_CONV_RANGE,
+  CAMERA_SET_PARM_3D_MANUAL_CONV_VALUE,
+  CAMERA_ENABLE_3D_MANUAL_CONVERGENCE, /* 100 */
+  CAMERA_SET_PARM_HDR,
+  CAMERA_SET_ASD_ENABLE,
+  CAMERA_POSTPROC_ABORT,
+  CAMERA_SET_AEC_MTR_AREA,
+  CAMERA_SET_AEC_LOCK,       /*105*/
+  CAMERA_SET_AWB_LOCK,
+  CAMERA_SET_RECORDING_HINT,
+  CAMERA_SET_PARM_CAF,
+  CAMERA_SET_FULL_LIVESHOT,
+  CAMERA_SET_DIS_ENABLE,  /*110*/
+  CAMERA_GET_PARM_MAX_HFR_MODE,
+  CAMERA_SET_LOW_POWER_MODE,
+  CAMERA_GET_PARM_DEF_PREVIEW_SIZES,
+  CAMERA_GET_PARM_DEF_VIDEO_SIZES,
+  CAMERA_GET_PARM_DEF_THUMB_SIZES, /*115*/
+  CAMERA_GET_PARM_DEF_HFR_SIZES,
+  CAMERA_GET_PARM_MAX_LIVESHOT_SIZE,
+  CAMERA_GET_PARM_FPS_RANGE,
+  CAMERA_SET_3A_CONVERGENCE,
+  CAMERA_SET_PREVIEW_HFR, /*120*/
+  CAMERA_GET_MAX_DIMENSION,
+  CAMERA_GET_MAX_NUM_FACES_DECT,
+  CAMERA_SET_CHANNEL_STREAM,
+  CAMERA_GET_CHANNEL_STREAM,
+  CAMERA_SET_PARM_CID, /*125*/
+  CAMERA_GET_PARM_FRAME_RESOLUTION,
+  CAMERA_GET_FACIAL_FEATURE_INFO,
+  CAMERA_GET_PP_MASK, /* get post-processing mask */
+  CAMERA_DO_PP_WNR,   /* do post-process WNR */
+  CAMERA_GET_PARM_HDR,
+  CAMERA_SEND_PP_PIPELINE_CMD, /* send offline pp cmd */
+  CAMERA_SET_BUNDLE, /* set stream bundle */
+  CAMERA_ENABLE_MOBICAT,
+  CAMERA_GET_PARM_MOBICAT,
+  CAMERA_CTRL_PARM_MAX
+} cam_ctrl_type;
+
+typedef enum {
+  CAMERA_ERROR_NO_MEMORY,
+  CAMERA_ERROR_EFS_FAIL,                /* Low-level operation failed */
+  CAMERA_ERROR_EFS_FILE_OPEN,           /* File already opened */
+  CAMERA_ERROR_EFS_FILE_NOT_OPEN,       /* File not opened */
+  CAMERA_ERROR_EFS_FILE_ALREADY_EXISTS, /* File already exists */
+  CAMERA_ERROR_EFS_NONEXISTENT_DIR,     /* User directory doesn't exist */
+  CAMERA_ERROR_EFS_NONEXISTENT_FILE,    /* User directory doesn't exist */
+  CAMERA_ERROR_EFS_BAD_FILE_NAME,       /* Client specified invalid file/directory name*/
+  CAMERA_ERROR_EFS_BAD_FILE_HANDLE,     /* Client specified invalid file/directory name*/
+  CAMERA_ERROR_EFS_SPACE_EXHAUSTED,     /* Out of file system space */
+  CAMERA_ERROR_EFS_OPEN_TABLE_FULL,     /* Out of open-file table slots                */
+  CAMERA_ERROR_EFS_OTHER_ERROR,         /* Other error                                 */
+  CAMERA_ERROR_CONFIG,
+  CAMERA_ERROR_EXIF_ENCODE,
+  CAMERA_ERROR_VIDEO_ENGINE,
+  CAMERA_ERROR_IPL,
+  CAMERA_ERROR_INVALID_FORMAT,
+  CAMERA_ERROR_TIMEOUT,
+  CAMERA_ERROR_ESD,
+  CAMERA_ERROR_MAX
+} camera_error_type;
+
+#if defined CAMERA_ANTIBANDING_OFF
+#undef CAMERA_ANTIBANDING_OFF
+#endif
+
+#if defined CAMERA_ANTIBANDING_60HZ
+#undef CAMERA_ANTIBANDING_60HZ
+#endif
+
+#if defined CAMERA_ANTIBANDING_50HZ
+#undef CAMERA_ANTIBANDING_50HZ
+#endif
+
+#if defined CAMERA_ANTIBANDING_AUTO
+#undef CAMERA_ANTIBANDING_AUTO
+#endif
+
+typedef enum {
+  CAMERA_PP_MASK_TYPE_WNR = 0x01
+} camera_pp_mask_type;
+
+typedef enum {
+  CAMERA_ANTIBANDING_OFF,
+  CAMERA_ANTIBANDING_60HZ,
+  CAMERA_ANTIBANDING_50HZ,
+  CAMERA_ANTIBANDING_AUTO,
+  CAMERA_ANTIBANDING_AUTO_50HZ,
+  CAMERA_ANTIBANDING_AUTO_60HZ,
+  CAMERA_MAX_ANTIBANDING,
+} camera_antibanding_type;
+
+/* Enum Type for different ISO Mode supported */
+typedef enum {
+  CAMERA_ISO_AUTO = 0,
+  CAMERA_ISO_DEBLUR,
+  CAMERA_ISO_100,
+  CAMERA_ISO_200,
+  CAMERA_ISO_400,
+  CAMERA_ISO_800,
+  CAMERA_ISO_1600,
+  CAMERA_ISO_MAX
+} camera_iso_mode_type;
+
+typedef enum {
+  MM_CAMERA_FACIAL_FEATURE_FD, // facial detection
+  MM_CAMERA_FACIAL_FEATURE_MAX
+} camera_facial_features;
+
+typedef enum {
+  AEC_ROI_OFF,
+  AEC_ROI_ON
+} aec_roi_ctrl_t;
+
+typedef enum {
+  AEC_ROI_BY_INDEX,
+  AEC_ROI_BY_COORDINATE,
+} aec_roi_type_t;
+
+typedef struct {
+  uint32_t x;
+  uint32_t y;
+} cam_coordinate_type_t;
+
+/*
+ * Define DRAW_RECTANGLES to draw rectangles on screen. Just for test purpose.
+ */
+//#define DRAW_RECTANGLES
+
+typedef struct {
+  uint16_t x;
+  uint16_t y;
+  uint16_t dx;
+  uint16_t dy;
+} roi_t;
+
+typedef struct {
+  aec_roi_ctrl_t aec_roi_enable;
+  aec_roi_type_t aec_roi_type;
+  union {
+    cam_coordinate_type_t coordinate;
+    uint32_t aec_roi_idx;
+  } aec_roi_position;
+} cam_set_aec_roi_t;
+
+typedef struct {
+  uint32_t frm_id;
+  uint8_t num_roi;
+  roi_t roi[MAX_ROI];
+  uint8_t is_multiwindow;
+} roi_info_t;
+
+/* Exif Tag Data Type */
+typedef enum
+{
+    EXIF_BYTE      = 1,
+    EXIF_ASCII     = 2,
+    EXIF_SHORT     = 3,
+    EXIF_LONG      = 4,
+    EXIF_RATIONAL  = 5,
+    EXIF_UNDEFINED = 7,
+    EXIF_SLONG     = 9,
+    EXIF_SRATIONAL = 10
+} exif_tag_type_t;
+
+
+/* Exif Rational Data Type */
+typedef struct
+{
+    uint32_t  num;    // Numerator
+    uint32_t  denom;  // Denominator
+
+} rat_t;
+
+/* Exif Signed Rational Data Type */
+typedef struct
+{
+    int32_t  num;    // Numerator
+    int32_t  denom;  // Denominator
+
+} srat_t;
+
+typedef struct
+{
+  exif_tag_type_t type;
+  uint8_t copy;
+  uint32_t count;
+  union
+  {
+    char      *_ascii;
+    uint8_t   *_bytes;
+    uint8_t    _byte;
+    uint16_t  *_shorts;
+    uint16_t   _short;
+    uint32_t  *_longs;
+    uint32_t   _long;
+    rat_t     *_rats;
+    rat_t      _rat;
+    uint8_t   *_undefined;
+    int32_t   *_slongs;
+    int32_t    _slong;
+    srat_t    *_srats;
+    srat_t     _srat;
+  } data;
+} exif_tag_entry_t;
+
+typedef struct {
+    uint32_t      tag_id;
+    exif_tag_entry_t  tag_entry;
+} exif_tags_info_t;
+
+
+typedef enum {
+ HDR_BRACKETING_OFF,
+ HDR_MODE,
+ EXP_BRACKETING_MODE
+ } hdr_mode;
+
+typedef struct {
+  hdr_mode mode;
+  uint32_t hdr_enable;
+  uint32_t total_frames;
+  uint32_t total_hal_frames;
+  char values[MAX_EXP_BRACKETING_LENGTH];  /* user defined values */
+} exp_bracketing_t;
+typedef struct {
+  roi_t      mtr_area[MAX_ROI];
+  uint32_t   num_area;
+  int        weight[MAX_ROI];
+} aec_mtr_area_t;
+
+typedef struct {
+  int denoise_enable;
+  int process_plates;
+} denoise_param_t;
+
+#ifndef HAVE_CAMERA_SIZE_TYPE
+  #define HAVE_CAMERA_SIZE_TYPE
+struct camera_size_type {
+  int width;
+  int height;
+};
+#endif
+
+typedef struct {
+  uint32_t yoffset;
+  uint32_t cbcr_offset;
+  uint32_t size;
+  struct camera_size_type resolution;
+}cam_buf_info_t;
+
+typedef struct {
+  int x;
+  int y;
+}cam_point_t;
+
+typedef struct {
+  /* AF parameters */
+  uint8_t focus_position;
+  /* AEC parameters */
+  uint32_t line_count;
+  uint8_t luma_target;
+  /* AWB parameters */
+  int32_t r_gain;
+  int32_t b_gain;
+  int32_t g_gain;
+  uint8_t exposure_mode;
+  uint8_t exposure_program;
+  float exposure_time;
+  uint32_t iso_speed;
+} snapshotData_info_t;
+
+
+typedef enum {
+  CAMERA_HFR_MODE_OFF = 1,
+  CAMERA_HFR_MODE_60FPS,
+  CAMERA_HFR_MODE_90FPS,
+  CAMERA_HFR_MODE_120FPS,
+  CAMERA_HFR_MODE_150FPS,
+} camera_hfr_mode_t;
+
+/* frame Q*/
+struct fifo_node
+{
+  struct fifo_node *next;
+  void *f;
+};
+
+struct fifo_queue
+{
+  int num_of_frames;
+  struct fifo_node *front;
+  struct fifo_node *back;
+  pthread_mutex_t mut;
+  pthread_cond_t wait;
+  char* name;
+};
+
+typedef struct {
+  uint32_t buf_len;
+  uint8_t num;
+  uint8_t pmem_type;
+  uint32_t vaddr[8];
+} mm_camera_histo_mem_info_t;
+
+typedef enum {
+  MM_CAMERA_CTRL_EVT_ZOOM_DONE,
+  MM_CAMERA_CTRL_EVT_AUTO_FOCUS_DONE,
+  MM_CAMERA_CTRL_EVT_PREP_SNAPSHOT,
+  MM_CAMERA_CTRL_EVT_SNAPSHOT_CONFIG_DONE,
+  MM_CAMERA_CTRL_EVT_WDN_DONE, // wavelet denoise done
+  MM_CAMERA_CTRL_EVT_HDR_DONE,
+  MM_CAMERA_CTRL_EVT_ERROR,
+  MM_CAMERA_CTRL_EVT_MAX
+}mm_camera_ctrl_event_type_t;
+
+typedef struct {
+  mm_camera_ctrl_event_type_t evt;
+  cam_ctrl_status_t status;
+  unsigned long cookie;
+} mm_camera_ctrl_event_t;
+
+typedef enum {
+  MM_CAMERA_CH_EVT_STREAMING_ON,
+  MM_CAMERA_CH_EVT_STREAMING_OFF,
+  MM_CAMERA_CH_EVT_STREAMING_ERR,
+  MM_CAMERA_CH_EVT_DATA_DELIVERY_DONE,
+  MM_CAMERA_CH_EVT_DATA_REQUEST_MORE,
+  MM_CAMERA_CH_EVT_MAX
+}mm_camera_ch_event_type_t;
+
+typedef struct {
+  uint32_t ch;
+  mm_camera_ch_event_type_t evt;
+} mm_camera_ch_event_t;
+
+typedef struct {
+  uint32_t index;
+  /* TBD: need more fields for histo stats? */
+} mm_camera_stats_histo_t;
+
+typedef struct  {
+  uint32_t event_id;
+  union {
+    mm_camera_stats_histo_t    stats_histo;
+  } e;
+} mm_camera_stats_event_t;
+
+typedef enum {
+  FD_ROI_TYPE_HEADER,
+  FD_ROI_TYPE_DATA
+} fd_roi_type_t;
+
+typedef struct {
+  int fd_mode;
+  int num_fd;
+} fd_set_parm_t;
+
+typedef struct {
+  uint32_t frame_id;
+  int16_t num_face_detected;
+} fd_roi_header_type;
+
+struct fd_rect_t {
+  uint16_t x;
+  uint16_t y;
+  uint16_t dx;
+  uint16_t dy;
+};
+
+typedef struct {
+  struct fd_rect_t face_boundary;
+  uint16_t left_eye_center[2];
+  uint16_t right_eye_center[2];
+  uint16_t mouth_center[2];
+  uint8_t smile_degree;  //0 -100
+  uint8_t smile_confidence;  //
+  uint8_t blink_detected;  // 0 or 1
+  uint8_t is_face_recognised;  // 0 or 1
+  int8_t gaze_angle;  // -90 -45 0 45 90 for head left to rigth tilt
+  int8_t updown_dir;  // -90 to 90
+  int8_t leftright_dir;  //-90 to 90
+  int8_t roll_dir;  // -90 to 90
+  int8_t left_right_gaze;  // -50 to 50
+  int8_t top_bottom_gaze;  // -50 to 50
+  uint8_t left_blink;  // 0 - 100
+  uint8_t right_blink;  // 0 - 100
+  int8_t id;  // unique id for face tracking within view unless view changes
+  int8_t score;  // score of confidence( 0 -100)
+} fd_face_type;
+
+typedef struct {
+  uint32_t frame_id;
+  uint8_t idx;
+  fd_face_type face;
+} fd_roi_data_type;
+
+struct fd_roi_t {
+  fd_roi_type_t type;
+  union {
+    fd_roi_header_type hdr;
+    fd_roi_data_type data;
+  } d;
+};
+
+typedef struct  {
+  uint32_t event_id;
+  union {
+    mm_camera_histo_mem_info_t histo_mem_info;
+    struct fd_roi_t roi;
+  } e;
+} mm_camera_info_event_t;
+
+typedef struct  {
+  uint32_t trans_id;   /* transaction id */
+  uint32_t evt_type;   /* event type */
+  int32_t data_length; /* the length of valid data */
+  uint8_t evt_data[1]; /* buffer that holds the content of private event, must be flatten */
+} mm_camera_private_event_t;
+
+typedef enum {
+  MM_CAMERA_EVT_TYPE_CH,
+  MM_CAMERA_EVT_TYPE_CTRL,
+  MM_CAMERA_EVT_TYPE_STATS,
+  MM_CAMERA_EVT_TYPE_INFO,
+  MM_CAMERA_EVT_TYPE_PRIVATE_EVT,
+  MM_CAMERA_EVT_TYPE_MAX
+} mm_camera_event_type_t;
+
+typedef struct {
+  mm_camera_event_type_t event_type;
+  union {
+    mm_camera_ch_event_t ch;
+    mm_camera_ctrl_event_t ctrl;
+    mm_camera_stats_event_t stats;
+    mm_camera_info_event_t info;
+    mm_camera_private_event_t pri_evt;
+  } e;
+} mm_camera_event_t;
+
+typedef enum {
+  MM_CAMERA_REPRO_CMD_INVALID,
+  MM_CAMERA_REPRO_CMD_OPEN,
+  MM_CAMERA_REPRO_CMD_CONFIG,
+  MM_CAMERA_REPRO_CMD_ATTACH_DETACH,
+  MM_CAMERA_REPRO_CMD_START_STOP,
+  MM_CAMERA_REPRO_CMD_REPROCESS,
+  MM_CAMERA_REPRO_CMD_CLOSE,
+  MM_CAMERA_REPRO_CMD_MAX
+} mmcam_repro_cmd_type_t;
+
+/* re-process isp type defintion */
+typedef enum {
+  MM_CAMERA_REPRO_ISP_NOT_USED,
+  MM_CAMERA_REPRO_ISP_PIX,
+  MM_CAMERA_REPRO_ISP_CROP_AND_SCALING,
+  MM_CAMERA_REPRO_ISP_COLOR_CONVERSION,
+  MM_CAMERA_REPRO_ISP_DNOISE_AND_SHARPNESS,
+  MM_CAMERA_REPRO_ISP_MAX_NUM
+} mm_camera_repro_isp_type_t;
+
+typedef struct {
+  uint32_t addr_offset;
+  uint32_t length;
+  uint32_t data_offset;
+} mm_camera_repro_plane_t;
+
+typedef struct {
+  uint32_t repro_handle;  /* repo isp handle */
+  uint32_t inst_handle; /* instance handle */
+  int8_t   buf_idx;     /* buffer index    */
+  uint32_t frame_id;    /* frame id        */
+  uint32_t frame_len;   /* frame length    */
+  int8_t   num_planes;
+  mm_camera_repro_plane_t planes[VIDEO_MAX_PLANES];
+  struct timeval timestamp;
+} mm_camera_repro_cmd_reprocess_t;
+
+#define MM_CAMERA_MAX_NUM_REPROCESS_DEST 2
+
+typedef struct {
+  uint8_t  isp_type;      /* in: mm_camera_repro_isp_type_t */
+  uint32_t repro_handle;  /* out */
+} mm_camera_repro_cmd_open_t;
+
+typedef struct {
+  int image_mode;
+  int width;
+  int height;
+  cam_format_t format;
+  uint32_t inst_handle; /* stream handler */
+} mm_camera_repro_config_data_t;
+
+typedef struct {
+  uint32_t repro_handle;
+  int num_dest;
+  mm_camera_repro_config_data_t src;
+  mm_camera_repro_config_data_t dest[MM_CAMERA_MAX_NUM_REPROCESS_DEST];
+} mm_camera_repro_cmd_config_t;
+
+typedef struct {
+  uint32_t repro_handle;   /* repro isp handle */
+  uint32_t inst_handle;    /* instance handle of dest stream */
+  uint8_t  attach_flag;    /* flag: attach(TRUE)/detach(FALSE) */
+} mm_camera_repro_cmd_attach_detach_t;
+
+typedef struct {
+  uint32_t repro_handle;   /* repo isp handle */
+  uint32_t dest_handle;    /* Which destination to start/stop */
+  uint8_t  start_flag;     /* flag: start isp(TRUE)/stop isp(FALSE) */
+} mm_camera_repro_cmd_start_stop_t;
+
+typedef struct {
+  /* mm_camera_repro_cmd_type_t */
+  int cmd;
+  /* Union of the possible payloads for
+   * this reprocess command. */
+  union {
+    /* MM_CAMERA_REPRO_CMD_OPEN */
+    mm_camera_repro_cmd_open_t open;
+    /* MM_CAMERA_REPRO_CMD_CONFIG */
+    mm_camera_repro_cmd_config_t config;
+    /* MM_CAMERA_REPRO_CMD_ATTACH_DETACH */
+    mm_camera_repro_cmd_attach_detach_t attach_detach;
+    /* MM_CAMERA_REPRO_CMD_REPROCESS */
+    mm_camera_repro_cmd_reprocess_t reprocess;
+    /* MM_CAMERA_REPRO_CMD_START_STOP */
+    mm_camera_repro_cmd_start_stop_t start_stop;
+    /* MM_CAMERA_REPRO_CMD_CLOSE */
+    uint32_t repro_handle;
+  } payload;
+} mm_camera_repro_cmd_t;
+
+typedef struct {
+  /*input parameter*/
+  int enable;
+  /*output parameter*/
+  uint32_t mobicat_size;
+}mm_cam_mobicat_info_t;
+
+#define MAX_MOBICAT_SIZE 8092
+
+/*
+  WARNING: Since this data structure is huge,
+  never use it as local variable, otherwise, it so easy to cause
+  stack overflow
+  Always use malloc to allocate heap memory for it
+*/
+typedef struct {
+  int max_len;   //telling the client max sizen of tags, here 10k.
+  int data_len;  //client return real size including null "\0".
+  char tags[MAX_MOBICAT_SIZE];
+} cam_exif_tags_t;
+
+/******************************************************************************
+ * Function: exif_set_tag
+ * Description: Inserts or modifies an Exif tag to the Exif Info object. Typical
+ *              use is to call this function multiple times - to insert all the
+ *              desired Exif Tags individually to the Exif Info object and
+ *              then pass the info object to the Jpeg Encoder object so
+ *              the inserted tags would be emitted as tags in the Exif header.
+ * Input parameters:
+ *   obj       - The Exif Info object where the tag would be inserted to or
+ *               modified from.
+ *   tag_id    - The Exif Tag ID of the tag to be inserted/modified.
+ *   p_entry   - The pointer to the tag entry structure which contains the
+ *               details of tag. The pointer can be set to NULL to un-do
+ *               previous insertion for a certain tag.
+ * Return values:
+ *     JPEGERR_SUCCESS
+ *     JPEGERR_ENULLPTR
+ *     JPEGERR_EFAILED
+ * (See jpegerr.h for description of error values.)
+ * Notes: none
+ *****************************************************************************/
+int exif_set_tag(exif_info_obj_t    obj,
+                 exif_tag_id_t      tag_id,
+                 exif_tag_entry_t  *p_entry);
+
+
+#endif /* __QCAMERA_INTF_H__ */
diff --git a/msmcobalt/common.mk b/msmcobalt/common.mk
new file mode 100644
index 0000000..a872679
--- /dev/null
+++ b/msmcobalt/common.mk
@@ -0,0 +1,9 @@
+common_deps :=
+kernel_includes :=
+
+ifeq ($(call is-vendor-board-platform,QCOM),true)
+ifeq ($(TARGET_COMPILE_WITH_MSM_KERNEL),true)
+    common_deps += $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr
+    kernel_includes += $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr/include
+endif
+endif
diff --git a/msmcobalt/mm-image-codec/Android.mk b/msmcobalt/mm-image-codec/Android.mk
new file mode 100644
index 0000000..19b1346
--- /dev/null
+++ b/msmcobalt/mm-image-codec/Android.mk
@@ -0,0 +1,3 @@
+ifeq ($(TARGET_ARCH),$(filter $(TARGET_ARCH),arm arm64))
+include $(call all-subdir-makefiles)
+endif
diff --git a/msmcobalt/mm-image-codec/qexif/qexif.h b/msmcobalt/mm-image-codec/qexif/qexif.h
new file mode 100644
index 0000000..91aedde
--- /dev/null
+++ b/msmcobalt/mm-image-codec/qexif/qexif.h
@@ -0,0 +1,1728 @@
+/*Copyright (c) 2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of The Linux Foundation nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.*/
+
+
+#ifndef __QEXIF_H__
+#define __QEXIF_H__
+
+#include <stdio.h>
+
+/* Exif Info (opaque definition) */
+struct exif_info_t;
+typedef struct exif_info_t * exif_info_obj_t;
+
+/* Exif Tag ID */
+typedef uint32_t exif_tag_id_t;
+
+
+/* Exif Rational Data Type */
+typedef struct
+{
+    uint32_t  num;    // Numerator
+    uint32_t  denom;  // Denominator
+
+} rat_t;
+
+/* Exif Signed Rational Data Type */
+typedef struct
+{
+    int32_t  num;    // Numerator
+    int32_t  denom;  // Denominator
+
+} srat_t;
+
+/* Exif Tag Data Type */
+typedef enum
+{
+    EXIF_BYTE      = 1,
+    EXIF_ASCII     = 2,
+    EXIF_SHORT     = 3,
+    EXIF_LONG      = 4,
+    EXIF_RATIONAL  = 5,
+    EXIF_UNDEFINED = 7,
+    EXIF_SLONG     = 9,
+    EXIF_SRATIONAL = 10
+} exif_tag_type_t;
+
+/* Exif Tag Entry
+ * Used in exif_set_tag as an input argument and
+ * in exif_get_tag as an output argument. */
+typedef struct
+{
+    /* The Data Type of the Tag *
+     * Rational, etc */
+    exif_tag_type_t type;
+
+    /* Copy
+     * This field is used when a user pass this structure to
+     * be stored in an exif_info_t via the exif_set_tag method.
+     * The routine would look like this field and decide whether
+     * it is necessary to make a copy of the data pointed by this
+     * structure (all string and array types).
+     * If this field is set to false, only a pointer to the actual
+     * data is retained and it is the caller's responsibility to
+     * ensure the validity of the data before the exif_info_t object
+     * is destroyed.
+     */
+    uint8_t copy;
+
+    /* Data count
+     * This indicates the number of elements of the data. For example, if
+     * the type is EXIF_BYTE and the count is 1, that means the actual data
+     * is one byte and is accessible by data._byte. If the type is EXIF_BYTE
+     * and the count is more than one, the actual data is contained in an
+     * array and is accessible by data._bytes. In case of EXIF_ASCII, it
+     * indicates the string length and in case of EXIF_UNDEFINED, it indicates
+     * the length of the array.
+     */
+    uint32_t count;
+
+    /* Data
+     * A union which covers all possible data types. The user should pick
+     * the right field to use depending on the data type and the count.
+     * See in-line comment below.
+     */
+    union
+    {
+        char      *_ascii;      // EXIF_ASCII (count indicates string length)
+        uint8_t   *_bytes;      // EXIF_BYTE  (count > 1)
+        uint8_t    _byte;       // EXIF_BYTE  (count = 1)
+        uint16_t  *_shorts;     // EXIF_SHORT (count > 1)
+        uint16_t   _short;      // EXIF_SHORT (count = 1)
+        uint32_t  *_longs;      // EXIF_LONG  (count > 1)
+        uint32_t   _long;       // EXIF_LONG  (count = 1)
+        rat_t     *_rats;       // EXIF_RATIONAL  (count > 1)
+        rat_t      _rat;        // EXIF_RATIONAL  (count = 1)
+        uint8_t   *_undefined;  // EXIF_UNDEFINED (count indicates length)
+        int32_t   *_slongs;     // EXIF_SLONG (count > 1)
+        int32_t    _slong;      // EXIF_SLONG (count = 1)
+        srat_t    *_srats;      // EXIF_SRATIONAL (count > 1)
+        srat_t     _srat;       // EXIF_SRATIONAL (count = 1)
+
+    } data;
+
+} exif_tag_entry_t;
+
+/* =======================================================================
+**                          Macro Definitions
+** ======================================================================= */
+/* Enum defined to let compiler generate unique offset numbers for different
+ * tags - ordering matters! NOT INTENDED to be used by any application. */
+typedef enum
+{
+    // GPS IFD
+    GPS_VERSION_ID = 0,
+    GPS_LATITUDE_REF,
+    GPS_LATITUDE,
+    GPS_LONGITUDE_REF,
+    GPS_LONGITUDE,
+    GPS_ALTITUDE_REF,
+    GPS_ALTITUDE,
+    GPS_TIMESTAMP,
+    GPS_SATELLITES,
+    GPS_STATUS,
+    GPS_MEASUREMODE,
+    GPS_DOP,
+    GPS_SPEED_REF,
+    GPS_SPEED,
+    GPS_TRACK_REF,
+    GPS_TRACK,
+    GPS_IMGDIRECTION_REF,
+    GPS_IMGDIRECTION,
+    GPS_MAPDATUM,
+    GPS_DESTLATITUDE_REF,
+    GPS_DESTLATITUDE,
+    GPS_DESTLONGITUDE_REF,
+    GPS_DESTLONGITUDE,
+    GPS_DESTBEARING_REF,
+    GPS_DESTBEARING,
+    GPS_DESTDISTANCE_REF,
+    GPS_DESTDISTANCE,
+    GPS_PROCESSINGMETHOD,
+    GPS_AREAINFORMATION,
+    GPS_DATESTAMP,
+    GPS_DIFFERENTIAL,
+
+    // TIFF IFD
+    NEW_SUBFILE_TYPE,
+    SUBFILE_TYPE,
+    IMAGE_WIDTH,
+    IMAGE_LENGTH,
+    BITS_PER_SAMPLE,
+    COMPRESSION,
+    PHOTOMETRIC_INTERPRETATION,
+    THRESH_HOLDING,
+    CELL_WIDTH,
+    CELL_HEIGHT,
+    FILL_ORDER,
+    DOCUMENT_NAME,
+    IMAGE_DESCRIPTION,
+    MAKE,
+    MODEL,
+    STRIP_OFFSETS,
+    ORIENTATION,
+    SAMPLES_PER_PIXEL,
+    ROWS_PER_STRIP,
+    STRIP_BYTE_COUNTS,
+    MIN_SAMPLE_VALUE,
+    MAX_SAMPLE_VALUE,
+    X_RESOLUTION,
+    Y_RESOLUTION,
+    PLANAR_CONFIGURATION,
+    PAGE_NAME,
+    X_POSITION,
+    Y_POSITION,
+    FREE_OFFSET,
+    FREE_BYTE_COUNTS,
+    GRAY_RESPONSE_UNIT,
+    GRAY_RESPONSE_CURVE,
+    T4_OPTION,
+    T6_OPTION,
+    RESOLUTION_UNIT,
+    PAGE_NUMBER,
+    TRANSFER_FUNCTION,
+    SOFTWARE,
+    DATE_TIME,
+    ARTIST,
+    HOST_COMPUTER,
+    PREDICTOR,
+    WHITE_POINT,
+    PRIMARY_CHROMATICITIES,
+    COLOR_MAP,
+    HALFTONE_HINTS,
+    TILE_WIDTH,
+    TILE_LENGTH,
+    TILE_OFFSET,
+    TILE_BYTE_COUNTS,
+    INK_SET,
+    INK_NAMES,
+    NUMBER_OF_INKS,
+    DOT_RANGE,
+    TARGET_PRINTER,
+    EXTRA_SAMPLES,
+    SAMPLE_FORMAT,
+    TRANSFER_RANGE,
+    JPEG_PROC,
+    JPEG_INTERCHANGE_FORMAT,
+    JPEG_INTERCHANGE_FORMAT_LENGTH,
+    JPEG_RESTART_INTERVAL,
+    JPEG_LOSSLESS_PREDICTORS,
+    JPEG_POINT_TRANSFORMS,
+    JPEG_Q_TABLES,
+    JPEG_DC_TABLES,
+    JPEG_AC_TABLES,
+    YCBCR_COEFFICIENTS,
+    YCBCR_SUB_SAMPLING,
+    YCBCR_POSITIONING,
+    REFERENCE_BLACK_WHITE,
+    GAMMA,
+    ICC_PROFILE_DESCRIPTOR,
+    SRGB_RENDERING_INTENT,
+    IMAGE_TITLE,
+    COPYRIGHT,
+    EXIF_IFD,
+    ICC_PROFILE,
+    GPS_IFD,
+
+
+    // TIFF IFD (Thumbnail)
+    TN_IMAGE_WIDTH,
+    TN_IMAGE_LENGTH,
+    TN_BITS_PER_SAMPLE,
+    TN_COMPRESSION,
+    TN_PHOTOMETRIC_INTERPRETATION,
+    TN_IMAGE_DESCRIPTION,
+    TN_MAKE,
+    TN_MODEL,
+    TN_STRIP_OFFSETS,
+    TN_ORIENTATION,
+    TN_SAMPLES_PER_PIXEL,
+    TN_ROWS_PER_STRIP,
+    TN_STRIP_BYTE_COUNTS,
+    TN_X_RESOLUTION,
+    TN_Y_RESOLUTION,
+    TN_PLANAR_CONFIGURATION,
+    TN_RESOLUTION_UNIT,
+    TN_TRANSFER_FUNCTION,
+    TN_SOFTWARE,
+    TN_DATE_TIME,
+    TN_ARTIST,
+    TN_WHITE_POINT,
+    TN_PRIMARY_CHROMATICITIES,
+    TN_JPEGINTERCHANGE_FORMAT,
+    TN_JPEGINTERCHANGE_FORMAT_L,
+    TN_YCBCR_COEFFICIENTS,
+    TN_YCBCR_SUB_SAMPLING,
+    TN_YCBCR_POSITIONING,
+    TN_REFERENCE_BLACK_WHITE,
+    TN_COPYRIGHT,
+
+    // EXIF IFD
+    EXPOSURE_TIME,
+    F_NUMBER,
+    EXPOSURE_PROGRAM,
+    SPECTRAL_SENSITIVITY,
+    ISO_SPEED_RATING,
+    OECF,
+    EXIF_VERSION,
+    EXIF_DATE_TIME_ORIGINAL,
+    EXIF_DATE_TIME_DIGITIZED,
+    EXIF_COMPONENTS_CONFIG,
+    EXIF_COMPRESSED_BITS_PER_PIXEL,
+    SHUTTER_SPEED,
+    APERTURE,
+    BRIGHTNESS,
+    EXPOSURE_BIAS_VALUE,
+    MAX_APERTURE,
+    SUBJECT_DISTANCE,
+    METERING_MODE,
+    LIGHT_SOURCE,
+    FLASH,
+    FOCAL_LENGTH,
+    SUBJECT_AREA,
+    EXIF_MAKER_NOTE,
+    EXIF_USER_COMMENT,
+    SUBSEC_TIME,
+    SUBSEC_TIME_ORIGINAL,
+    SUBSEC_TIME_DIGITIZED,
+    EXIF_FLASHPIX_VERSION,
+    EXIF_COLOR_SPACE,
+    EXIF_PIXEL_X_DIMENSION,
+    EXIF_PIXEL_Y_DIMENSION,
+    RELATED_SOUND_FILE,
+    INTEROP,
+    FLASH_ENERGY,
+    SPATIAL_FREQ_RESPONSE,
+    FOCAL_PLANE_X_RESOLUTION,
+    FOCAL_PLANE_Y_RESOLUTION,
+    FOCAL_PLANE_RESOLUTION_UNIT,
+    SUBJECT_LOCATION,
+    EXPOSURE_INDEX,
+    SENSING_METHOD,
+    FILE_SOURCE,
+    SCENE_TYPE,
+    CFA_PATTERN,
+    CUSTOM_RENDERED,
+    EXPOSURE_MODE,
+    WHITE_BALANCE,
+    DIGITAL_ZOOM_RATIO,
+    FOCAL_LENGTH_35MM,
+    SCENE_CAPTURE_TYPE,
+    GAIN_CONTROL,
+    CONTRAST,
+    SATURATION,
+    SHARPNESS,
+    DEVICE_SETTINGS_DESCRIPTION,
+    SUBJECT_DISTANCE_RANGE,
+    IMAGE_UID,
+    PIM,
+
+    EXIF_TAG_MAX_OFFSET
+
+} exif_tag_offset_t;
+
+/* Below are the supported Tags (ID and structure for their data) */
+#define CONSTRUCT_TAGID(offset,ID) (offset << 16 | ID)
+
+// GPS tag version
+// Use EXIFTAGTYPE_GPS_VERSION_ID as the exif_tag_type (EXIF_BYTE)
+// Count should be 4
+#define _ID_GPS_VERSION_ID 0x0000
+#define EXIFTAGID_GPS_VERSION_ID \
+  CONSTRUCT_TAGID(GPS_VERSION_ID, _ID_GPS_VERSION_ID)
+#define EXIFTAGTYPE_GPS_VERSION_ID EXIF_BYTE
+// North or South Latitude
+// Use EXIFTAGTYPE_GPS_LATITUDE_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+#define _ID_GPS_LATITUDE_REF 0x0001
+#define EXIFTAGID_GPS_LATITUDE_REF \
+  CONSTRUCT_TAGID(GPS_LATITUDE_REF, _ID_GPS_LATITUDE_REF)
+#define EXIFTAGTYPE_GPS_LATITUDE_REF EXIF_ASCII
+// Latitude
+// Use EXIFTAGTYPE_GPS_LATITUDE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_GPS_LATITUDE 0x0002
+#define EXIFTAGID_GPS_LATITUDE CONSTRUCT_TAGID(GPS_LATITUDE, _ID_GPS_LATITUDE)
+#define EXIFTAGTYPE_GPS_LATITUDE EXIF_RATIONAL
+// East or West Longitude
+// Use EXIFTAGTYPE_GPS_LONGITUDE_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+#define _ID_GPS_LONGITUDE_REF 0x0003
+#define EXIFTAGID_GPS_LONGITUDE_REF \
+  CONSTRUCT_TAGID(GPS_LONGITUDE_REF, _ID_GPS_LONGITUDE_REF)
+#define EXIFTAGTYPE_GPS_LONGITUDE_REF EXIF_ASCII
+// Longitude
+// Use EXIFTAGTYPE_GPS_LONGITUDE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_GPS_LONGITUDE 0x0004
+#define EXIFTAGID_GPS_LONGITUDE \
+  CONSTRUCT_TAGID(GPS_LONGITUDE, _ID_GPS_LONGITUDE)
+#define EXIFTAGTYPE_GPS_LONGITUDE EXIF_RATIONAL
+// Altitude reference
+// Use EXIFTAGTYPE_GPS_ALTITUDE_REF as the exif_tag_type (EXIF_BYTE)
+#define _ID_GPS_ALTITUDE_REF 0x0005
+#define EXIFTAGID_GPS_ALTITUDE_REF \
+  CONSTRUCT_TAGID(GPS_ALTITUDE_REF, _ID_GPS_ALTITUDE_REF)
+#define EXIFTAGTYPE_GPS_ALTITUDE_REF EXIF_BYTE
+// Altitude
+// Use EXIFTAGTYPE_GPS_ALTITUDE as the exif_tag_type (EXIF_RATIONAL)
+#define _ID_GPS_ALTITUDE 0x0006
+#define EXIFTAGID_GPS_ALTITUDE CONSTRUCT_TAGID(GPS_ALTITUDE, _ID_GPS_ALTITUDE)
+#define EXIFTAGTYPE_GPS_ALTITUE EXIF_RATIONAL
+// GPS time (atomic clock)
+// Use EXIFTAGTYPE_GPS_TIMESTAMP as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_GPS_TIMESTAMP 0x0007
+#define EXIFTAGID_GPS_TIMESTAMP \
+  CONSTRUCT_TAGID(GPS_TIMESTAMP, _ID_GPS_TIMESTAMP)
+#define EXIFTAGTYPE_GPS_TIMESTAMP EXIF_RATIONAL
+// GPS Satellites
+// Use EXIFTAGTYPE_GPS_SATELLITES as the exif_tag_type (EXIF_ASCII)
+// Count can be anything.
+#define _ID_GPS_SATELLITES 0x0008
+#define EXIFTAGID_GPS_SATELLITES \
+ CONSTRUCT_TAGID(GPS_SATELLITES, _ID_GPS_SATELLITES)
+#define EXIFTAGTYPE_GPS_SATELLITES EXIF_ASCII
+// GPS Status
+// Use EXIFTAGTYPE_GPS_STATUS as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "A" - Measurement in progress
+// "V" - Measurement Interoperability
+// Other - Reserved
+#define _ID_GPS_STATUS 0x0009
+#define EXIFTAGID_GPS_STATUS CONSTRUCT_TAGID(GPS_STATUS, _ID_GPS_STATUS)
+#define EXIFTATTYPE_GPS_STATUS EXIF_ASCII
+// GPS Measure Mode
+// Use EXIFTAGTYPE_GPS_MEASUREMODE as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "2" - 2-dimensional measurement
+// "3" - 3-dimensional measurement
+// Other - Reserved
+#define _ID_GPS_MEASUREMODE 0x000a
+#define EXIFTAGID_GPS_MEASUREMODE \
+  CONSTRUCT_TAGID(GPS_MEASUREMODE, _ID_GPS_MEASUREMODE)
+#define EXIFTAGTYPE_GPS_MEASUREMODE EXIF_ASCII
+// GPS Measurement precision (DOP)
+// Use EXIFTAGTYPE_GPS_DOP as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_GPS_DOP 0x000b
+#define EXIFTAGID_GPS_DOP CONSTRUCT_TAGID(GPS_DOP, _ID_GPS_DOP)
+#define EXIFTAGTYPE_GPS_DOP EXIF_RATIONAL
+// Speed Unit
+// Use EXIFTAGTYPE_GPS_SPEED_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "K" - Kilometers per hour
+// "M" - Miles per hour
+// "N" - Knots
+// Other - Reserved
+#define _ID_GPS_SPEED_REF 0x000c
+#define EXIFTAGID_GPS_SPEED_REF \
+  CONSTRUCT_TAGID(GPS_SPEED_REF, _ID_GPS_SPEED_REF)
+#define EXIFTAGTYPE_GPS_SPEED_REF EXIF_ASCII
+// Speed of GPS receiver
+// Use EXIFTAGTYPE_GPS_SPEED as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_GPS_SPEED 0x000d
+#define EXIFTAGID_GPS_SPEED CONSTRUCT_TAGID(GPS_SPEED, _ID_GPS_SPEED)
+#define EXIFTAGTYPE_GPS_SPEED EXIF_RATIONAL
+// Reference of direction of movement
+// Use EXIFTAGTYPE_GPS_TRACK_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "T" - True direction
+// "M" - Magnetic direction
+// Other - Reserved
+#define _ID_GPS_TRACK_REF 0x000e
+#define EXIFTAGID_GPS_TRACK_REF \
+  CONSTRUCT_TAGID(GPS_TRACK_REF, _ID_GPS_TRACK_REF)
+#define EXIFTAGTYPE_GPS_TRACK_REF EXIF_ASCII
+// Direction of movement
+// Use EXIFTAGTYPE_GPS_TRACK as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_GPS_TRACK 0x000f
+#define EXIFTAGID_GPS_TRACK CONSTRUCT_TAGID(GPS_TRACK, _ID_GPS_TRACK)
+#define EXIFTAGTYPE_GPS_TRACK EXIF_RATIONAL
+// Reference of direction of image
+// Use EXIFTAGTYPE_GPS_IMGDIRECTION_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "T" - True direction
+// "M" - Magnetic direction
+// Other - Reserved
+#define _ID_GPS_IMGDIRECTION_REF 0x0010
+#define EXIFTAGID_GPS_IMGDIRECTION_REF \
+  CONSTRUCT_TAGID(GPS_IMGDIRECTION_REF, _ID_GPS_IMGDIRECTION_REF)
+#define EXIFTAGTYPE_GPS_IMGDIRECTION_REF EXIF_ASCII
+// Direction of image
+// Use EXIFTAGTYPE_GPS_IMGDIRECTION as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_GPS_IMGDIRECTION 0x0011
+#define EXIFTAGID_GPS_IMGDIRECTION \
+  CONSTRUCT_TAGID(GPS_IMGDIRECTION, _ID_GPS_IMGDIRECTION)
+#define EXIFTAGTYPE_GPS_IMGDIRECTION EXIF_RATIONAL
+// Geodetic survey data used
+// Use EXIFTAGTYPE_GPS_MAPDATUM as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_GPS_MAPDATUM 0x0012
+#define EXIFTAGID_GPS_MAPDATUM CONSTRUCT_TAGID(GPS_MAPDATUM, _ID_GPS_MAPDATUM)
+#define EXIFTAGTYPE_GPS_MAPDATUM EXIF_ASCII
+// Reference for latitude of destination
+// Use EXIFTAGTYPE_GPS_DESTLATITUDE_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "N" - North latitude
+// "S" - South latitude
+// Other - Reserved
+#define _ID_GPS_DESTLATITUDE_REF 0x0013
+#define EXIFTAGID_GPS_DESTLATITUDE_REF \
+  CONSTRUCT_TAGID(GPS_DESTLATITUDE_REF, _ID_GPS_DESTLATITUDE_REF)
+#define EXIFTAGTYPE_GPS_DESTLATITUDE_REF EXIF_ASCII
+// Latitude of destination
+// Use EXIFTAGTYPE_GPS_DESTLATITUDE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_GPS_DESTLATITUDE 0x0014
+#define EXIFTAGID_GPS_DESTLATITUDE \
+  CONSTRUCT_TAGID(GPS_DESTLATITUDE, _ID_GPS_DESTLATITUDE)
+#define EXIFTAGTYPE_GPS_DESTLATITUDE EXIF_RATIONAL
+// Reference for longitude of destination
+// Use EXIFTAGTYPE_GPS_DESTLONGITUDE_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "E" - East longitude
+// "W" - West longitude
+// Other - Reserved
+#define _ID_GPS_DESTLONGITUDE_REF 0x0015
+#define EXIFTAGID_GPS_DESTLONGITUDE_REF \
+  CONSTRUCT_TAGID(GPS_DESTLONGITUDE_REF, _ID_GPS_DESTLONGITUDE_REF)
+#define EXIFTAGTYPE_GPS_DESTLONGITUDE_REF EXIF_ASCII
+// Longitude of destination
+// Use EXIFTAGTYPE_GPS_DESTLONGITUDE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_GPS_DESTLONGITUDE 0x0016
+#define EXIFTAGID_GPS_DESTLONGITUDE CONSTRUCT_TAGID(GPS_DESTLONGITUDE, _ID_GPS_DESTLONGITUDE)
+#define EXIFTAGTYPE_GPS_DESTLONGITUDE EXIF_RATIONAL
+// Reference for bearing of destination
+// Use EXIFTAGTYPE_GPS_DESTBEARING_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "T" - True direction
+// "M" - Magnetic direction
+// Other - Reserved
+#define _ID_GPS_DESTBEARING_REF 0x0017
+#define EXIFTAGID_GPS_DESTBEARING_REF \
+  CONSTRUCT_TAGID(GPS_DESTBEARING_REF, _ID_GPS_DESTBEARING_REF)
+#define EXIFTAGTYPE_GPS_DESTBEARING_REF EXIF_ASCII
+// Bearing of destination
+// Use EXIFTAGTYPE_GPS_DESTBEARING as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_GPS_DESTBEARING 0x0018
+#define EXIFTAGID_GPS_DESTBEARING \
+  CONSTRUCT_TAGID(GPS_DESTBEARING, _ID_GPS_DESTBEARING)
+#define EXIFTAGTYPE_GPS_DESTBEARING EXIF_RATIONAL
+// Reference for distance to destination
+// Use EXIFTAGTYPE_GPS_DESTDISTANCE_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "K" - Kilometers per hour
+// "M" - Miles per hour
+// "N" - Knots
+// Other - Reserved
+#define _ID_GPS_DESTDISTANCE_REF 0x0019
+#define EXIFTAGID_GPS_DESTDISTANCE_REF \
+  CONSTRUCT_TAGID(GPS_DESTDISTANCE_REF, _ID_GPS_DESTDISTANCE_REF)
+#define EXIFTAGTYPE_GPS_DESTDISTANCE_REF EXIF_ASCII
+// Distance to destination
+// Use EXIFTAGTYPE_GPS_DESTDISTANCE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_GPS_DESTDISTANCE 0x001a
+#define EXIFTAGID_GPS_DESTDISTANCE \
+  CONSTRUCT_TAGID(GPS_DESTDISTANCE, _ID_GPS_DESTDISTANCE)
+#define EXIFTAGTYPE_GPS_DESTDISTANCE EXIF_RATIONAL
+// Name of GPS processing method
+// Use EXIFTAGTYPE_GPS_PROCESSINGMETHOD as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_GPS_PROCESSINGMETHOD 0x001b
+#define EXIFTAGID_GPS_PROCESSINGMETHOD \
+  CONSTRUCT_TAGID(GPS_PROCESSINGMETHOD, _ID_GPS_PROCESSINGMETHOD)
+#define EXIFTAGTYPE_GPS_PROCESSINGMETHOD EXIF_UNDEFINED
+// Name of GPS area
+// Use EXIFTAGTYPE_GPS_AREAINFORMATION as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_GPS_AREAINFORMATION 0x001c
+#define EXIFTAGID_GPS_AREAINFORMATION \
+  CONSTRUCT_TAGID(GPS_AREAINFORMATION, _ID_GPS_AREAINFORMATION)
+#define EXIFTAGTYPE_GPS_AREAINFORMATION EXIF_UNDEFINED
+// GPS date
+// Use EXIFTAGTYPE_GPS_DATESTAMP as the exif_tag_type (EXIF_ASCII)
+// It should be 11 characters long including the null-terminating character.
+#define _ID_GPS_DATESTAMP 0x001d
+#define EXIFTAGID_GPS_DATESTAMP \
+  CONSTRUCT_TAGID(GPS_DATESTAMP, _ID_GPS_DATESTAMP)
+#define EXIFTAGTYPE_GPS_DATESTAMP EXIF_ASCII
+// GPS differential correction
+// Use EXIFTAGTYPE_GPS_DIFFERENTIAL as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+// 0 - Measurement without differential correction
+// 1 - Differential correction applied
+// Other - Reserved
+#define _ID_GPS_DIFFERENTIAL 0x001e
+#define EXIFTAGID_GPS_DIFFERENTIAL \
+  CONSTRUCT_TAGID(GPS_DIFFERENTIAL, _ID_GPS_DIFFERENTIAL)
+#define EXIFTAGTYPE_GPS_DIFFERENTIAL EXIF_SHORT
+// Image width
+// Use EXIFTAGTYPE_IMAGE_WIDTH as the exif_tag_type (EXIF_LONG)
+// Count should be 1
+#define _ID_IMAGE_WIDTH 0x0100
+#define EXIFTAGID_IMAGE_WIDTH CONSTRUCT_TAGID(IMAGE_WIDTH, _ID_IMAGE_WIDTH)
+#define EXIFTAGTYPE_IMAGE_WIDTH EXIF_LONG
+// Image height
+// Use EXIFTAGTYPE_IMAGE_LENGTH as the exif_tag_type (EXIF_SHORT_OR_LONG)
+// Count should be 1
+#define _ID_IMAGE_LENGTH 0x0101
+#define EXIFTAGID_IMAGE_LENGTH CONSTRUCT_TAGID(IMAGE_LENGTH, _ID_IMAGE_LENGTH)
+#define EXIFTAGTYPE_IMAGE_LENGTH EXIF_LONG
+// Number of bits per component
+// Use EXIFTAGTYPE_BITS_PER_SAMPLE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_BITS_PER_SAMPLE 0x0102
+#define EXIFTAGID_BITS_PER_SAMPLE \
+  CONSTRUCT_TAGID(BITS_PER_SAMPLE, _ID_BITS_PER_SAMPLE)
+#define EXIFTAGTYPE_BITS_PER_SAMPLE EXIF_SHORT
+// Compression scheme
+// Use EXIFTAGTYPE_COMPRESSION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_COMPRESSION 0x0103
+#define EXIFTAGID_COMPRESSION CONSTRUCT_TAGID(COMPRESSION, _ID_COMPRESSION)
+#define EXIFTAGTYPE_COMPRESSION EXIF_SHORT
+// Pixel composition
+// Use EXIFTAGTYPE_PHOTOMETRIC_INTERPRETATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_PHOTOMETRIC_INTERPRETATION 0x0106
+#define EXIFTAGID_PHOTOMETRIC_INTERPRETATION \
+  CONSTRUCT_TAGID(PHOTOMETRIC_INTERPRETATION, _ID_PHOTOMETRIC_INTERPRETATION)
+#define EXIFTAGTYPE_PHOTOMETRIC_INTERPRETATION EXIF_SHORT
+
+// Thresholding
+// Use EXIFTAGTYPE_THRESH_HOLDING as the exif_tag_type (EXIF_SHORT)
+//
+//1 = No dithering or halftoning
+//2 = Ordered dither or halftone
+//3 = Randomized dither
+#define _ID_THRESH_HOLDING 0x0107
+#define EXIFTAGID_THRESH_HOLDING \
+  CONSTRUCT_TAGID(THRESH_HOLDING, _ID_THRESH_HOLDING)
+#define EXIFTAGTYPE_THRESH_HOLDING EXIF_SHORT
+
+// Cell Width
+// Use EXIFTAGTYPE_CELL_WIDTH as the exif_tag_type (EXIF_SHORT)
+//
+#define _ID_CELL_WIDTH 0x0108
+#define EXIFTAGID_CELL_WIDTH CONSTRUCT_TAGID(CELL_WIDTH, _ID_CELL_WIDTH)
+#define EXIFTAGTYPE_CELL_WIDTH EXIF_SHORT
+// Cell Height
+// Use EXIFTAGTYPE_CELL_HEIGHT as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_CELL_HEIGHT 0x0109
+#define EXIFTAGID_CELL_HEIGHT CONSTRUCT_TAGID(CELL_HEIGHT, _ID_CELL_HEIGHT)
+#define EXIFTAGTYPE_CELL_HEIGHT EXIF_SHORT
+// Fill Order
+// Use EXIFTAGTYPE_FILL_ORDER as the exif_tag_type (EXIF_SHORT)
+// 	1 = Normal
+//  2 = Reversed
+#define _ID_FILL_ORDER 0x010A
+#define EXIFTAGID_FILL_ORDER CONSTRUCT_TAGID(FILL_ORDER, _ID_FILL_ORDER)
+#define EXIFTAGTYPE_FILL_ORDER EXIF_SHORT
+
+// DOCUMENT NAME
+// Use EXIFTAGTYPE_DOCUMENT_NAME as the exif_tag_type (EXIF_ASCII)
+//
+#define _ID_DOCUMENT_NAME 0x010D
+#define EXIFTAGID_DOCUMENT_NAME CONSTRUCT_TAGID(DOCUMENT_NAME, _ID_DOCUMENT_NAME)
+#define EXIFTAGTYPE_DOCUMENT_NAME EXIF_ASCII
+
+// Image title
+// Use EXIFTAGTYPE_IMAGE_DESCRIPTION as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_IMAGE_DESCRIPTION 0x010e
+#define EXIFTAGID_IMAGE_DESCRIPTION \
+  CONSTRUCT_TAGID(IMAGE_DESCRIPTION, _ID_IMAGE_DESCRIPTION)
+#define EXIFTAGTYPE_IMAGE_DESCRIPTION EXIF_ASCII
+// Image input equipment manufacturer
+// Use EXIFTAGTYPE_MAKE as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_MAKE 0x010f
+#define EXIFTAGID_MAKE CONSTRUCT_TAGID(MAKE, _ID_MAKE)
+#define EXIFTAGTYPE_MAKE EXIF_ASCII
+// Image input equipment model
+// Use EXIFTAGTYPE_MODEL as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_MODEL 0x0110
+#define EXIFTAGID_MODEL CONSTRUCT_TAGID(MODEL, _ID_MODEL)
+#define EXIFTAGTYPE_MODEL EXIF_ASCII
+// Image data location
+// Use EXIFTAGTYPE_STRIP_OFFSETS as the exif_tag_type (EXIF_LONG)
+// Count = StripsPerImage                    when PlanarConfiguration = 1
+//       = SamplesPerPixel * StripsPerImage  when PlanarConfiguration = 2
+#define _ID_STRIP_OFFSETS 0x0111
+#define EXIFTAGID_STRIP_OFFSETS \
+  CONSTRUCT_TAGID(STRIP_OFFSETS, _ID_STRIP_OFFSETS)
+#define EXIFTAGTYPE_STRIP_OFFSETS EXIF_LONG
+// Orientation of image
+// Use EXIFTAGTYPE_ORIENTATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_ORIENTATION 0x0112
+#define EXIFTAGID_ORIENTATION CONSTRUCT_TAGID(ORIENTATION, _ID_ORIENTATION)
+#define EXIFTAGTYPE_ORIENTATION EXIF_SHORT
+// Number of components
+// Use EXIFTAGTYPE_SAMPLES_PER_PIXEL as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_SAMPLES_PER_PIXEL 0x0115
+#define EXIFTAGID_SAMPLES_PER_PIXEL \
+  CONSTRUCT_TAGID(SAMPLES_PER_PIXEL, _ID_SAMPLES_PER_PIXEL)
+#define EXIFTAGTYPE_SAMPLES_PER_PIXEL EXIF_SHORT
+// Number of rows per strip
+// Use EXIFTAGTYPE_ROWS_PER_STRIP as the exif_tag_type (EXIF_LONG)
+// Count should be 1
+#define _ID_ROWS_PER_STRIP 0x0116
+#define EXIFTAGID_ROWS_PER_STRIP \
+  CONSTRUCT_TAGID(ROWS_PER_STRIP, _ID_ROWS_PER_STRIP)
+#define EXIFTAGTYPE_ROWS_PER_STRIP EXIF_LONG
+// Bytes per compressed strip
+// Use EXIFTAGTYPE_STRIP_BYTE_COUNTS as the exif_tag_type (EXIF_LONG)
+// Count = StripsPerImage                    when PlanarConfiguration = 1
+//       = SamplesPerPixel * StripsPerImage  when PlanarConfiguration = 2
+#define _ID_STRIP_BYTE_COUNTS 0x0117
+#define EXIFTAGID_STRIP_BYTE_COUNTS \
+  CONSTRUCT_TAGID(STRIP_BYTE_COUNTS, _ID_STRIP_BYTE_COUNTS)
+#define EXIFTAGTYPE_STRIP_BYTE_COUNTS EXIF_LONG
+// MinSampleValue
+// Use EXIFTAGTYPE_MIN_SAMPLE_VALUE as the exif_tag_type (EXIF_SHORT)
+#define _ID_MIN_SAMPLE_VALUE 0x0118
+#define EXIFTAGID_MIN_SAMPLE_VALUE  \
+  CONSTRUCT_TAGID(MIN_SAMPLE_VALUE, _ID_MIN_SAMPLE_VALUE)
+#define EXIFTAGTYPE_MIN_SAMPLE_VALUE EXIF_SHORT
+// MaxSampleValue
+// Use EXIFTAGTYPE_MAX_SAMPLE_VALUE as the exif_tag_type (EXIF_SHORT)
+#define _ID_MAX_SAMPLE_VALUE 0x0119
+#define EXIFTAGID_MAX_SAMPLE_VALUE CONSTRUCT_TAGID(MAX_SAMPLE_VALUE, _ID_MAX_SAMPLE_VALUE)
+#define EXIFTAGTYPE_MAX_SAMPLE_VALUE EXIF_SHORT
+
+// Image resolution in width direction
+// Use EXIFTAGTYPE_X_RESOLUTION as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_X_RESOLUTION 0x011a
+#define EXIFTAGID_X_RESOLUTION \
+  CONSTRUCT_TAGID(X_RESOLUTION, _ID_X_RESOLUTION)
+#define EXIFTAGTYPE_X_RESOLUTION EXIF_RATIONAL
+// Image resolution in height direction
+// Use EXIFTAGTYPE_Y_RESOLUTION as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_Y_RESOLUTION 0x011b
+#define EXIFTAGID_Y_RESOLUTION \
+  CONSTRUCT_TAGID(Y_RESOLUTION, _ID_Y_RESOLUTION)
+#define EXIFTAGTYPE_Y_RESOLUTION EXIF_RATIONAL
+// Image data arrangement
+// Use EXIFTAGTYPE_PLANAR_CONFIGURATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_PLANAR_CONFIGURATION 0x011c
+#define EXIFTAGID_PLANAR_CONFIGURATION \
+  CONSTRUCT_TAGID(PLANAR_CONFIGURATION, _ID_PLANAR_CONFIGURATION)
+#define EXIFTAGTYPE_PLANAR_CONFIGURATION EXIF_SHORT
+// PageName
+// Use EXIFTAGTYPE_PAGE_NAME as the exif_tag_type (EXIF_ASCII)
+// Count should be 1
+#define _ID_PAGE_NAME 0x011d
+#define EXIFTAGID_PAGE_NAME CONSTRUCT_TAGID(PAGE_NAME, _ID_PAGE_NAME)
+#define EXIFTAGTYPE_PAGE_NAME EXIF_ASCII
+// XPosition
+// Use EXIFTAGTYPE_X_POSITION as the exif_tag_type (EXIF_RATIONAL)
+//
+#define _ID_X_POSITION 0x011e
+#define EXIFTAGID_X_POSITION CONSTRUCT_TAGID(X_POSITION, _ID_X_POSITION)
+#define EXIFTAGTYPE_X_POSITION EXIF_RATIONAL
+// YPosition
+// Use EXIFTAGTYPE_Y_POSITION as the exif_tag_type (EXIF_RATIONAL)
+//
+#define _ID_Y_POSITION 0x011f
+#define EXIFTAGID_Y_POSITION CONSTRUCT_TAGID(Y_POSITION, _ID_Y_POSITION)
+#define EXIFTAGTYPE_Y_POSITION EXIF_RATIONAL
+
+// FREE_OFFSET
+// Use EXIFTAGTYPE_FREE_OFFSET as the exif_tag_type (EXIF_LONG)
+//
+#define _ID_FREE_OFFSET 0x0120
+#define EXIFTAGID_FREE_OFFSET CONSTRUCT_TAGID(FREE_OFFSET, _ID_FREE_OFFSET)
+#define EXIFTAGTYPE_FREE_OFFSET EXIF_LONG
+// FREE_BYTE_COUNTS
+// Use EXIFTAGTYPE_FREE_BYTE_COUNTS as the exif_tag_type (EXIF_LONG)
+//
+#define _ID_FREE_BYTE_COUNTS 0x0121
+#define EXIFTAGID_FREE_BYTE_COUNTS \
+  CONSTRUCT_TAGID(FREE_BYTE_COUNTS, _ID_FREE_BYTE_COUNTS)
+#define EXIFTAGTYPE_FREE_BYTE_COUNTS EXIF_LONG
+
+// GrayResponseUnit
+// Use EXIFTAGTYPE_GRAY_RESPONSE_UNIT as the exif_tag_type (EXIF_SHORT)
+//
+#define _ID_GRAY_RESPONSE_UNIT 0x0122
+#define EXIFTAGID_GRAY_RESPONSE_UNIT \
+  CONSTRUCT_TAGID(GRAY_RESPONSE_UNIT, _ID_GRAY_RESPONSE_UNIT)
+#define EXIFTAGTYPE_GRAY_RESPONSE_UNIT EXIF_SHORT
+// GrayResponseCurve
+// Use EXIFTAGTYPE_GRAY_RESPONSE_CURVE  as the exif_tag_type (EXIF_SHORT)
+//
+#define _ID_GRAY_RESPONSE_CURVE 0x0123
+#define EXIFTAGID_GRAY_RESPONSE_CURVE \
+  CONSTRUCT_TAGID(GRAY_RESPONSE_CURVE , _ID_GRAY_RESPONSE_CURVE )
+#define EXIFTAGTYPE_GRAY_RESPONSE_CURVE EXIF_SHORT
+
+// T4_OPTION
+// Use EXIFTAGTYPE_T4_OPTION as the exif_tag_type (EXIF_LONG)
+//
+#define _ID_T4_OPTION  0x0124
+#define EXIFTAGID_T4_OPTION CONSTRUCT_TAGID(T4_OPTION, _ID_T4_OPTION)
+#define EXIFTAGTYPE_T4_OPTION EXIF_LONG
+// T6_OPTION
+// Use EXIFTAGTYPE_T6_OPTION as the exif_tag_type (EXIF_LONG)
+//
+#define _ID_T6_OPTION 0x0125
+#define EXIFTAGID_T6_OPTION CONSTRUCT_TAGID(T6_OPTION, _ID_T6_OPTION)
+#define EXIFTAGTYPE_T6_OPTION EXIF_LONG
+
+// Unit of X and Y resolution
+// Use EXIFTAGTYPE_RESOLUTION_UNIT as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_RESOLUTION_UNIT 0x0128
+#define EXIFTAGID_RESOLUTION_UNIT \
+  CONSTRUCT_TAGID(RESOLUTION_UNIT, _ID_RESOLUTION_UNIT)
+#define EXIFTAGTYPE_RESOLUTION_UNIT EXIF_SHORT
+
+// Page Number
+// Use EXIFTAGTYPE_PAGE_NUMBER  as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_PAGE_NUMBER 0x0129
+#define EXIFTAGID_PAGE_NUMBER CONSTRUCT_TAGID(PAGE_NUMBER, _ID_PAGE_NUMBER)
+#define EXIFTAGTYPE_PAGE_NUMBER EXIF_SHORT
+// Transfer function
+// Use EXIFTAGTYPE_TRANSFER_FUNCTION as the exif_tag_type (EXIF_SHORT)
+// Count should be 3*256
+#define _ID_TRANSFER_FUNCTION 0x012d
+#define EXIFTAGID_TRANSFER_FUNCTION \
+  CONSTRUCT_TAGID(TRANSFER_FUNCTION, _ID_TRANSFER_FUNCTION)
+#define EXIFTAGTYPE_TRANSFER_FUNCTION EXIF_SHORT
+// Software used
+// Use EXIFTAGTYPE_SOFTWARE as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_SOFTWARE 0x0131
+#define EXIFTAGID_SOFTWARE CONSTRUCT_TAGID(SOFTWARE, _ID_SOFTWARE)
+#define EXIFTAGTYPE_SOFTWARE EXIF_ASCII
+// File change date and time
+// Use EXIFTAGTYPE_DATE_TIME as the exif_tag_type (EXIF_ASCII)
+// Count should be 20
+#define _ID_DATE_TIME 0x0132
+#define EXIFTAGID_DATE_TIME CONSTRUCT_TAGID(DATE_TIME, _ID_DATE_TIME)
+#define EXIFTAGTYPE_DATE_TIME EXIF_ASCII
+// ARTIST, person who created this image
+// Use EXIFTAGTYPE_ARTIST as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_ARTIST 0x013b
+#define EXIFTAGID_ARTIST CONSTRUCT_TAGID(ARTIST, _ID_ARTIST)
+#define EXIFTAGTYPE_ARTIST EXIF_ASCII
+// Host Computer Name
+// Use EXIFTAGTYPE_HOST_COMPUTER as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_HOST_COMPUTER 0x013c
+#define EXIFTAGID_HOST_COMPUTER \
+  CONSTRUCT_TAGID(HOST_COMPUTER , _ID_HOST_COMPUTER )
+#define EXIFTAGTYPE_HOST_COMPUTER EXIF_ASCII
+// Predictor
+// Use EXIFTAGTYPE_PREDICTOR as the exif_tag_type (EXIF_SHORT)
+// Count can be any
+#define _ID_PREDICTOR 0x013d
+#define EXIFTAGID_PREDICTOR CONSTRUCT_TAGID(PREDICTOR , _ID_PREDICTOR )
+#define EXIFTAGTYPE_PREDICTOR EXIF_SHORT
+// White point chromaticity
+// Use EXIFTAGTYPE_WHITE_POINT as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 2
+#define _ID_WHITE_POINT 0x013e
+#define EXIFTAGID_WHITE_POINT CONSTRUCT_TAGID(WHITE_POINT, _ID_WHITE_POINT)
+#define EXIFTAGTYPE_WHITE_POINT EXIF_RATIONAL
+// Chromaticities of primaries
+// Use EXIFTAGTYPE_PRIMARY_CHROMATICITIES as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 6
+#define _ID_PRIMARY_CHROMATICITIES 0x013f
+#define EXIFTAGID_PRIMARY_CHROMATICITIES \
+  CONSTRUCT_TAGID(PRIMARY_CHROMATICITIES, _ID_PRIMARY_CHROMATICITIES)
+#define EXIFTAGTYPE_PRIMARY_CHROMATICITIES EXIF_RATIONAL
+
+// COLOR_MAP
+// Use EXIFTAGTYPE_COLOR_MAP as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_COLOR_MAP 0x0140
+#define EXIFTAGID_COLOR_MAP CONSTRUCT_TAGID(COLOR_MAP, _ID_COLOR_MAP)
+#define EXIFTAGTYPE_COLOR_MAP EXIF_SHORT
+// HALFTONE_HINTS
+// Use EXIFTAGTYPE_HALFTONE_HINTS as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_HALFTONE_HINTS 0x0141
+#define EXIFTAGID_HALFTONE_HINTS \
+  CONSTRUCT_TAGID(HALFTONE_HINTS, _ID_HALFTONE_HINTS)
+#define EXIFTAGTYPE_HALFTONE_HINTS EXIF_SHORT
+
+// TILE_WIDTH
+// Use EXIFTAGTYPE_TILE_WIDTH as the exif_tag_type (EXIF_LONG)
+// Count should be 6
+#define _ID_TILE_WIDTH 0x0142
+#define EXIFTAGID_TILE_WIDTH CONSTRUCT_TAGID(TILE_WIDTH, _ID_TILE_WIDTH)
+#define EXIFTAGTYPE_TILE_WIDTH EXIF_LONG
+// TILE_LENGTH
+// Use EXIFTAGTYPE_TILE_LENGTH  as the exif_tag_type (EXIF_LONG)
+// Count should be 6
+#define _ID_TILE_LENGTH 0x0143
+#define EXIFTAGID_TILE_LENGTH CONSTRUCT_TAGID(TILE_LENGTH , _ID_TILE_LENGTH )
+#define EXIFTAGTYPE_TILE_LENGTH EXIF_LONG
+// TILE_OFFSET
+// Use EXIFTAGTYPE_TILE_OFFSET as the exif_tag_type (EXIF_LONG)
+//
+#define _ID_TILE_OFFSET 0x0144
+#define EXIFTAGID_TILE_OFFSET CONSTRUCT_TAGID(TILE_OFFSET , _ID_TILE_OFFSET )
+#define EXIFTAGTYPE_TILE_OFFSET EXIF_LONG
+// tile Byte Counts
+// Use EXIFTAGTYPE_TILE_OFFSET as the exif_tag_type (EXIF_LONG)
+//
+#define _ID_TILE_BYTE_COUNTS 0x0145
+#define EXIFTAGID_TILE_BYTE_COUNTS  \
+  CONSTRUCT_TAGID(TILE_BYTE_COUNTS  , _ID_TILE_BYTE_COUNTS  )
+#define EXIFTAGTYPE_TILE_BYTE_COUNTS EXIF_LONG
+
+// INK_SET
+// Use EXIFTAGTYPE_TILE_LENGTH  as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_INK_SET 0x014c
+#define EXIFTAGID_INK_SET CONSTRUCT_TAGID(INK_SET , _ID_INK_SET )
+#define EXIFTAGTYPE_INK_SET EXIF_SHORT
+// INK_NAMES
+// Use EXIFTAGTYPE_INK_NAMES  as the exif_tag_type (EXIF_ASCII)
+// Count should be 6
+#define _ID_INK_NAMES 0x014D
+#define EXIFTAGID_INK_NAMES CONSTRUCT_TAGID(INK_NAMES , _ID_INK_NAMES)
+#define EXIFTAGTYPE_INK_NAMES EXIF_ASCII
+// NUMBER_OF_INKS
+// Use EXIFTAGTYPE_NUMBER_OF_INKS  as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_NUMBER_OF_INKS 0x014e
+#define EXIFTAGID_NUMBER_OF_INKS \
+  CONSTRUCT_TAGID(NUMBER_OF_INKS , _ID_NUMBER_OF_INKS )
+#define EXIFTAGTYPE_NUMBER_OF_INKS EXIF_SHORT
+
+// DOT_RANGE
+// Use EXIFTAGTYPE_DOT_RANGE  as the exif_tag_type (EXIF_ASCII)
+// Count should be 6
+#define _ID_DOT_RANGE 0x0150
+#define EXIFTAGID_DOT_RANGE CONSTRUCT_TAGID(DOT_RANGE , _ID_DOT_RANGE )
+#define EXIFTAGTYPE_DOT_RANGE EXIF_ASCII
+
+// TARGET_PRINTER
+// Use EXIFTAGTYPE_TARGET_PRINTER  as the exif_tag_type (EXIF_ASCII)
+// Count should be 6
+#define _ID_TARGET_PRINTER 0x0151
+#define EXIFTAGID_TARGET_PRINTER \
+  CONSTRUCT_TAGID(TARGET_PRINTER , _ID_TARGET_PRINTER)
+#define EXIFTAGTYPE_TARGET_PRINTER EXIF_ASCII
+// EXTRA_SAMPLES
+// Use EXIFTAGTYPE_EXTRA_SAMPLES as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_EXTRA_SAMPLES 0x0152
+#define EXIFTAGID_EXTRA_SAMPLES \
+  CONSTRUCT_TAGID(EXTRA_SAMPLES , _ID_EXTRA_SAMPLES )
+#define EXIFTAGTYPE_EXTRA_SAMPLES EXIF_SHORT
+
+// SAMPLE_FORMAT
+// Use EXIFTAGTYPE_SAMPLE_FORMAT  as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_SAMPLE_FORMAT 0x0153
+#define EXIFTAGID_SAMPLE_FORMAT \
+  CONSTRUCT_TAGID(SAMPLE_FORMAT , _ID_SAMPLE_FORMAT )
+#define EXIFTAGTYPE_SAMPLE_FORMAT EXIF_SHORT
+
+// Table of values that extends the range of the transfer function.
+// Use EXIFTAGTYPE_TRANSFER_RANGE as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_TRANSFER_RANGE 0x0156
+#define EXIFTAGID_TRANSFER_RANGE \
+  CONSTRUCT_TAGID(TRANSFER_RANGE , _ID_TRANSFER_RANGE )
+#define EXIFTAGTYPE_TRANSFER_RANGE EXIF_SHORT
+
+// JPEG compression process.
+// Use EXIFTAGTYPE_JPEG_PROC as the exif_tag_type (EXIF_SHORT)
+//
+#define _ID_JPEG_PROC 0x0200
+#define EXIFTAGID_JPEG_PROC CONSTRUCT_TAGID(JPEG_PROC , _ID_JPEG_PROC )
+#define EXIFTAGTYPE_JPEG_PROC EXIF_SHORT
+
+
+// Offset to JPEG SOI
+// Use EXIFTAGTYPE_JPEG_INTERCHANGE_FORMAT as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_JPEG_INTERCHANGE_FORMAT 0x0201
+#define EXIFTAGID_JPEG_INTERCHANGE_FORMAT \
+  CONSTRUCT_TAGID(JPEG_INTERCHANGE_FORMAT, _ID_JPEG_INTERCHANGE_FORMAT)
+#define EXIFTAGTYPE_JPEG_INTERCHANGE_FORMAT EXIF_LONG
+// Bytes of JPEG data
+// Use EXIFTAGTYPE_JPEG_INTERCHANGE_FORMAT_LENGTH as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_JPEG_INTERCHANGE_FORMAT_LENGTH 0x0202
+#define EXIFTAGID_JPEG_INTERCHANGE_FORMAT_LENGTH \
+  CONSTRUCT_TAGID(JPEG_INTERCHANGE_FORMAT_LENGTH, \
+  _ID_JPEG_INTERCHANGE_FORMAT_LENGTH)
+#define EXIFTAGTYPE_JPEG_INTERCHANGE_FORMAT_LENGTH EXIF_LONG
+
+// Length of the restart interval.
+// Use EXIFTAGTYPE_JPEG_RESTART_INTERVAL as the exif_tag_type (EXIF_SHORT)
+// Count is undefined
+#define _ID_JPEG_RESTART_INTERVAL 0x0203
+#define EXIFTAGID_JPEG_RESTART_INTERVAL \
+  CONSTRUCT_TAGID(JPEG_RESTART_INTERVAL, _ID_JPEG_RESTART_INTERVAL)
+#define EXIFTAGTYPE_JPEG_RESTART_INTERVAL EXIF_SHORT
+
+// JPEGLosslessPredictors
+// Use EXIFTAGTYPE_JPEG_LOSSLESS_PREDICTORS as the exif_tag_type (EXIF_SHORT)
+// Count is undefined
+#define _ID_JPEG_LOSSLESS_PREDICTORS 0x0205
+#define EXIFTAGID_JPEG_LOSSLESS_PREDICTORS  \
+  CONSTRUCT_TAGID(JPEG_LOSSLESS_PREDICTORS, _ID_JPEG_LOSSLESS_PREDICTORS)
+#define EXIFTAGTYPE_JPEG_LOSSLESS_PREDICTORS EXIF_SHORT
+
+// JPEGPointTransforms
+// Use EXIFTAGTYPE_JPEG_POINT_TRANSFORMS as the exif_tag_type (EXIF_SHORT)
+// Count is undefined
+#define _ID_JPEG_POINT_TRANSFORMS 0x0206
+#define EXIFTAGID_JPEG_POINT_TRANSFORMS  \
+  CONSTRUCT_TAGID(JPEG_POINT_TRANSFORMS, _ID_JPEG_POINT_TRANSFORMS)
+#define EXIFTAGTYPE_JPEG_POINT_TRANSFORMS EXIF_SHORT
+
+// JPEG_Q_TABLES
+// Use EXIFTAGTYPE_JPEG_Q_TABLES as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_JPEG_Q_TABLES 0x0207
+#define EXIFTAGID_JPEG_Q_TABLES \
+  CONSTRUCT_TAGID(JPEG_Q_TABLES, _ID_JPEG_Q_TABLES)
+#define EXIFTAGTYPE_JPEG_Q_TABLES EXIF_LONG
+// JPEG_DC_TABLES
+// Use EXIFTAGTYPE_JPEG_DC_TABLES as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_JPEG_DC_TABLES 0x0208
+#define EXIFTAGID_JPEG_DC_TABLES \
+  CONSTRUCT_TAGID(JPEG_DC_TABLES, _ID_JPEG_DC_TABLES)
+#define EXIFTAGTYPE_JPEG_DC_TABLES EXIF_LONG
+// JPEG_AC_TABLES
+// Use EXIFTAGTYPE_JPEG_AC_TABLES as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_JPEG_AC_TABLES 0x0209
+#define EXIFTAGID_JPEG_AC_TABLES \
+  CONSTRUCT_TAGID(JPEG_AC_TABLES, _ID_JPEG_AC_TABLES)
+#define EXIFTAGTYPE_JPEG_AC_TABLES EXIF_LONG
+
+// Color space transformation matrix coefficients
+// Use EXIFTAGTYPE_YCBCR_COEFFICIENTS as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_YCBCR_COEFFICIENTS 0x0211
+#define EXIFTAGID_YCBCR_COEFFICIENTS \
+  CONSTRUCT_TAGID(YCBCR_COEFFICIENTS, _ID_YCBCR_COEFFICIENTS)
+#define EXIFTAGTYPE_YCBCR_COEFFICIENTS EXIF_RATIONAL
+// Subsampling ratio of Y to C
+// Use EXIFTAGTYPE_YCBCR_SUB_SAMPLING as the exif_tag_type (EXIF_SHORT)
+// Count should be 2
+#define _ID_YCBCR_SUB_SAMPLING 0x0212
+#define EXIFTAGID_YCBCR_SUB_SAMPLING  \
+  CONSTRUCT_TAGID(YCBCR_SUB_SAMPLING, _ID_YCBCR_SUB_SAMPLING)
+#define EXIFTAGTYPE_YCBCR_SUB_SAMPLING EXIF_SHORT
+// Y and C positioning
+// Use EXIFTAGTYPE_YCBCR_POSITIONING as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_YCBCR_POSITIONING 0x0213
+#define EXIFTAGID_YCBCR_POSITIONING  \
+  CONSTRUCT_TAGID(YCBCR_POSITIONING, _ID_YCBCR_POSITIONING)
+#define EXIFTAGTYPE_YCBCR_POSITIONING EXIF_SHORT
+// Pair of black and white reference values
+// Use EXIFTAGTYPE_REFERENCE_BLACK_WHITE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 6
+#define _ID_REFERENCE_BLACK_WHITE 0x0214
+#define EXIFTAGID_REFERENCE_BLACK_WHITE \
+  CONSTRUCT_TAGID(REFERENCE_BLACK_WHITE, _ID_REFERENCE_BLACK_WHITE)
+#define EXIFTAGTYPE_REFERENCE_BLACK_WHITE EXIF_RATIONAL
+// GAMMA
+// Use EXIFTAGTYPE_GAMMA as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 6
+#define _ID_GAMMA 0x0301
+#define EXIFTAGID_GAMMA CONSTRUCT_TAGID(GAMMA, _ID_GAMMA)
+#define EXIFTAGTYPE_GAMMA EXIF_RATIONAL
+// Null-terminated character string that identifies an ICC profile.
+// Use EXIFTAGTYPE_ICC_PROFILE_DESCRIPTOR as the exif_tag_type (EXIF_ASCII)
+// Count should be 6
+#define _ID_ICC_PROFILE_DESCRIPTOR 0x0302
+#define EXIFTAGID_ICC_PROFILE_DESCRIPTOR \
+  CONSTRUCT_TAGID(ICC_PROFILE_DESCRIPTOR, _ID_ICC_PROFILE_DESCRIPTOR)
+#define EXIFTAGTYPE_ICC_PROFILE_DESCRIPTOR EXIF_ASCII
+// SRGB_RENDERING_INTENT
+// Use EXIFTAGTYPE_SRGB_RENDERING_INTENT as the exif_tag_type (EXIF_BYTE)
+// Count should be 6
+#define _ID_SRGB_RENDERING_INTENT 0x0303
+#define EXIFTAGID_SRGB_RENDERING_INTENT \
+  CONSTRUCT_TAGID(SRGB_RENDERING_INTENT, _ID_SRGB_RENDERING_INTENT)
+#define EXIFTAGTYPE_SRGB_RENDERING_INTENT EXIF_BYTE
+
+// Null-terminated character string that specifies the title of the image.
+// Use EXIFTAGTYPE_IMAGE_TITLE as the exif_tag_type (EXIF_ASCII		)
+//
+#define _ID_IMAGE_TITLE 0x0320
+#define EXIFTAGID_IMAGE_TITLE CONSTRUCT_TAGID(IMAGE_TITLE, _ID_IMAGE_TITLE)
+#define EXIFTAGTYPE_IMAGE_TITLE EXIF_ASCII
+
+// Copyright holder
+// Use EXIFTAGTYPE_COPYRIGHT as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_COPYRIGHT 0x8298
+#define EXIFTAGID_COPYRIGHT CONSTRUCT_TAGID(COPYRIGHT, _ID_COPYRIGHT)
+#define EXIFTAGTYPE_COPYRIGHT EXIF_ASCII
+// Old Subfile Type
+// Use EXIFTAGTYPE_NEW_SUBFILE_TYPE as the exif_tag_type (EXIF_SHORT)
+// Count can be any
+#define _ID_NEW_SUBFILE_TYPE 0x00fe
+#define EXIFTAGID_NEW_SUBFILE_TYPE \
+  CONSTRUCT_TAGID(NEW_SUBFILE_TYPE, _ID_NEW_SUBFILE_TYPE)
+#define EXIFTAGTYPE_NEW_SUBFILE_TYPE EXIF_SHORT
+
+// New Subfile Type
+// Use EXIFTAGTYPE_NEW_SUBFILE_TYPE as the exif_tag_type (EXIF_LONG)
+// Count can be any
+#define _ID_SUBFILE_TYPE 0x00ff
+#define EXIFTAGID_SUBFILE_TYPE CONSTRUCT_TAGID(SUBFILE_TYPE, _ID_SUBFILE_TYPE)
+#define EXIFTAGTYPE_SUBFILE_TYPE EXIF_LONG
+
+// Image width (of thumbnail)
+// Use EXIFTAGTYPE_TN_IMAGE_WIDTH as the exif_tag_type (EXIF_LONG)
+// Count should be 1
+#define _ID_TN_IMAGE_WIDTH 0x0100
+#define EXIFTAGID_TN_IMAGE_WIDTH \
+  CONSTRUCT_TAGID(TN_IMAGE_WIDTH, _ID_TN_IMAGE_WIDTH)
+#define EXIFTAGTYPE_TN_IMAGE_WIDTH EXIF_LONG
+// Image height (of thumbnail)
+// Use EXIFTAGTYPE_TN_IMAGE_LENGTH as the exif_tag_type (EXIF_SHORT_OR_LONG)
+// Count should be 1
+#define _ID_TN_IMAGE_LENGTH 0x0101
+#define EXIFTAGID_TN_IMAGE_LENGTH \
+  CONSTRUCT_TAGID(TN_IMAGE_LENGTH, _ID_TN_IMAGE_LENGTH)
+#define EXIFTAGTYPE_TN_IMAGE_LENGTH EXIF_LONG
+// Number of bits per component (of thumbnail)
+// Use EXIFTAGTYPE_TN_BITS_PER_SAMPLE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_BITS_PER_SAMPLE 0x0102
+#define EXIFTAGID_TN_BITS_PER_SAMPLE \
+  CONSTRUCT_TAGID(TN_BITS_PER_SAMPLE, _ID_TN_BITS_PER_SAMPLE)
+#define EXIFTAGTYPE_TN_BITS_PER_SAMPLE EXIF_SHORT
+// Compression scheme (of thumbnail)
+// Use EXIFTAGTYPE_TN_COMPRESSION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_COMPRESSION 0x0103
+#define EXIFTAGID_TN_COMPRESSION \
+  CONSTRUCT_TAGID(TN_COMPRESSION, _ID_TN_COMPRESSION)
+#define EXIFTAGTYPE_TN_COMPRESSION EXIF_SHORT
+// Pixel composition (of thumbnail)
+// Use EXIFTAGTYPE_TN_PHOTOMETRIC_INTERPRETATION as the
+// exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_PHOTOMETRIC_INTERPRETATION 0x0106
+#define EXIFTAGID_TN_PHOTOMETRIC_INTERPRETATION \
+  CONSTRUCT_TAGID(TN_PHOTOMETRIC_INTERPRETATION, \
+  _ID_TN_PHOTOMETRIC_INTERPRETATION)
+#define EXIFTAGTYPE_TN_PHOTOMETRIC_INTERPRETATION EXIF_SHORT
+// Image title (of thumbnail)
+// Use EXIFTAGTYPE_TN_IMAGE_DESCRIPTION as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_TN_IMAGE_DESCRIPTION 0x010e
+#define EXIFTAGID_TN_IMAGE_DESCRIPTION \
+  CONSTRUCT_TAGID(TN_IMAGE_DESCRIPTION, _ID_TN_IMAGE_DESCRIPTION)
+#define EXIFTAGTYPE_TN_IMAGE_DESCRIPTION EXIF_ASCII
+// Image input equipment manufacturer (of thumbnail)
+// Use EXIFTAGTYPE_TN_MAKE as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_TN_MAKE 0x010f
+#define EXIFTAGID_TN_MAKE CONSTRUCT_TAGID(TN_MAKE, _ID_TN_MAKE)
+#define EXIFTAGTYPE_TN_MAKE EXIF_ASCII
+// Image input equipment model (of thumbnail)
+// Use EXIFTAGTYPE_TN_MODEL as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_TN_MODEL 0x0110
+#define EXIFTAGID_TN_MODEL CONSTRUCT_TAGID(TN_MODEL, _ID_TN_MODEL)
+#define EXIFTAGTYPE_TN_MODEL EXIF_ASCII
+// Image data location (of thumbnail)
+// Use EXIFTAGTYPE_TN_STRIP_OFFSETS as the exif_tag_type (EXIF_LONG)
+// Count = StripsPerImage                    when PlanarConfiguration = 1
+//       = SamplesPerPixel * StripsPerImage  when PlanarConfiguration = 2
+#define _ID_TN_STRIP_OFFSETS 0x0111
+#define EXIFTAGID_TN_STRIP_OFFSETS \
+  CONSTRUCT_TAGID(STRIP_TN_OFFSETS, _ID_TN_STRIP_OFFSETS)
+#define EXIFTAGTYPE_TN_STRIP_OFFSETS EXIF_LONG
+// Orientation of image (of thumbnail)
+// Use EXIFTAGTYPE_TN_ORIENTATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_ORIENTATION 0x0112
+#define EXIFTAGID_TN_ORIENTATION \
+  CONSTRUCT_TAGID(TN_ORIENTATION, _ID_TN_ORIENTATION)
+#define EXIFTAGTYPE_TN_ORIENTATION EXIF_SHORT
+// Number of components (of thumbnail)
+// Use EXIFTAGTYPE_TN_SAMPLES_PER_PIXEL as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_SAMPLES_PER_PIXEL 0x0115
+#define EXIFTAGID_TN_SAMPLES_PER_PIXEL \
+  CONSTRUCT_TAGID(TN_SAMPLES_PER_PIXEL, _ID_TN_SAMPLES_PER_PIXEL)
+#define EXIFTAGTYPE_TN_SAMPLES_PER_PIXEL EXIF_SHORT
+// Number of rows per strip (of thumbnail)
+// Use EXIFTAGTYPE_TN_ROWS_PER_STRIP as the exif_tag_type (EXIF_LONG)
+// Count should be 1
+#define _ID_TN_ROWS_PER_STRIP 0x0116
+#define EXIFTAGID_TN_ROWS_PER_STRIP \
+  CONSTRUCT_TAGID(TN_ROWS_PER_STRIP, _ID_TN_ROWS_PER_STRIP)
+#define EXIFTAGTYPE_TN_ROWS_PER_STRIP EXIF_LONG
+// Bytes per compressed strip (of thumbnail)
+// Use EXIFTAGTYPE_TN_STRIP_BYTE_COUNTS as the exif_tag_type (EXIF_LONG)
+// Count = StripsPerImage                    when PlanarConfiguration = 1
+//       = SamplesPerPixel * StripsPerImage  when PlanarConfiguration = 2
+#define _ID_TN_STRIP_BYTE_COUNTS 0x0117
+#define EXIFTAGID_TN_STRIP_BYTE_COUNTS \
+  CONSTRUCT_TAGID(TN_STRIP_BYTE_COUNTS, _ID_TN_STRIP_BYTE_COUNTS)
+#define EXIFTAGTYPE_TN_STRIP_BYTE_COUNTS EXIF_LONG
+// Image resolution in width direction (of thumbnail)
+// Use EXIFTAGTYPE_TN_X_RESOLUTION as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_TN_X_RESOLUTION 0x011a
+#define EXIFTAGID_TN_X_RESOLUTION \
+  CONSTRUCT_TAGID(TN_X_RESOLUTION, _ID_TN_X_RESOLUTION)
+#define EXIFTAGTYPE_TN_X_RESOLUTION EXIF_RATIONAL
+// Image resolution in height direction  (of thumbnail)
+// Use EXIFTAGTYPE_TN_Y_RESOLUTION as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_TN_Y_RESOLUTION 0x011b
+#define EXIFTAGID_TN_Y_RESOLUTION \
+  CONSTRUCT_TAGID(TN_Y_RESOLUTION, _ID_TN_Y_RESOLUTION)
+#define EXIFTAGTYPE_TN_Y_RESOLUTION EXIF_RATIONAL
+// Image data arrangement (of thumbnail)
+// Use EXIFTAGTYPE_TN_PLANAR_CONFIGURATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_PLANAR_CONFIGURATION 0x011c
+#define EXIFTAGID_TN_PLANAR_CONFIGURATION \
+  CONSTRUCT_TAGID(TN_PLANAR_CONFIGURATION, _ID_TN_PLANAR_CONFIGURATION)
+#define EXIFTAGTYPE_TN_PLANAR_CONFIGURATION EXIF_SHORT
+// Unit of X and Y resolution (of thumbnail)
+// Use EXIFTAGTYPE_TN_RESOLUTION_UNIT as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_RESOLUTION_UNIT 0x128
+#define EXIFTAGID_TN_RESOLUTION_UNIT \
+  CONSTRUCT_TAGID(TN_RESOLUTION_UNIT, _ID_TN_RESOLUTION_UNIT)
+#define EXIFTAGTYPE_TN_RESOLUTION_UNIT EXIF_SHORT
+// Transfer function (of thumbnail)
+// Use EXIFTAGTYPE_TN_TRANSFER_FUNCTION as the exif_tag_type (EXIF_SHORT)
+// Count should be 3*256
+#define _ID_TN_TRANSFER_FUNCTION 0x012d
+#define EXIFTAGID_TN_TRANSFER_FUNCTION \
+  CONSTRUCT_TAGID(TN_TRANSFER_FUNCTION, _ID_TN_TRANSFER_FUNCTION)
+#define EXIFTAGTYPE_TN_TRANSFER_FUNCTION EXIF_SHORT
+// Software used (of thumbnail)
+// Use EXIFTAGTYPE_TN_SOFTWARE as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_TN_SOFTWARE 0x0131
+#define EXIFTAGID_TN_SOFTWARE CONSTRUCT_TAGID(TN_SOFTWARE, _ID_TN_SOFTWARE)
+#define EXIFTAGTYPE_TN_SOFTWARE EXIF_ASCII
+// File change date and time (of thumbnail)
+// Use EXIFTAGTYPE_TN_DATE_TIME as the exif_tag_type (EXIF_ASCII)
+// Count should be 20
+#define _ID_TN_DATE_TIME 0x0132
+#define EXIFTAGID_TN_DATE_TIME CONSTRUCT_TAGID(TN_DATE_TIME, _ID_TN_DATE_TIME)
+#define EXIFTAGTYPE_TN_DATE_TIME EXIF_ASCII
+// ARTIST, person who created this image (of thumbnail)
+// Use EXIFTAGTYPE_TN_ARTIST as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_TN_ARTIST 0x013b
+#define EXIFTAGID_TN_ARTIST CONSTRUCT_TAGID(TN_ARTIST, _ID_TN_ARTIST)
+#define EXIFTAGTYPE_TN_ARTIST EXIF_ASCII
+// White point chromaticity (of thumbnail)
+// Use EXIFTAGTYPE_TN_WHITE_POINT as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 2
+#define _ID_TN_WHITE_POINT 0x013e
+#define EXIFTAGID_TN_WHITE_POINT \
+  CONSTRUCT_TAGID(TN_WHITE_POINT, _ID_TN_WHITE_POINT)
+#define EXIFTAGTYPE_TN_WHITE_POINT EXIF_RATIONAL
+// Chromaticities of primaries (of thumbnail)
+// Use EXIFTAGTYPE_TN_PRIMARY_CHROMATICITIES as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 6
+#define _ID_TN_PRIMARY_CHROMATICITIES 0x013f
+#define EXIFTAGID_TN_PRIMARY_CHROMATICITIES \
+  CONSTRUCT_TAGID(TN_PRIMARY_CHROMATICITIES, _ID_TN_PRIMARY_CHROMATICITIES)
+#define EXIFTAGTYPE_TN_PRIMARY_CHROMATICITIES EXIF_RATIONAL
+// Offset to JPEG SOI (of thumbnail)
+// Use EXIFTAGTYPE_TN_JPEGINTERCHANGE_FORMAT as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_TN_JPEGINTERCHANGE_FORMAT 0x0201
+#define EXIFTAGID_TN_JPEGINTERCHANGE_FORMAT \
+  CONSTRUCT_TAGID(TN_JPEGINTERCHANGE_FORMAT, _ID_TN_JPEGINTERCHANGE_FORMAT)
+#define EXIFTAGTYPE_TN_JPEGINTERCHANGE_FORMAT EXIF_LONG
+// Bytes of JPEG data (of thumbnail)
+// Use EXIFTAGTYPE_TN_JPEGINTERCHANGE_FORMAT_L as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_TN_JPEGINTERCHANGE_FORMAT_L 0x0202
+#define EXIFTAGID_TN_JPEGINTERCHANGE_FORMAT_L \
+  CONSTRUCT_TAGID(TN_JPEGINTERCHANGE_FORMAT_L, _ID_TN_JPEGINTERCHANGE_FORMAT_L)
+#define EXIFTAGTYPE_TN_JPEGINTERCHANGE_FORMAT_L EXIF_LONG
+// Color space transformation matrix coefficients (of thumbnail)
+// Use EXIFTAGTYPE_TN_YCBCR_COEFFICIENTS as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_TN_YCBCR_COEFFICIENTS 0x0211
+#define EXIFTAGID_TN_YCBCR_COEFFICIENTS \
+  CONSTRUCT_TAGID(TN_YCBCR_COEFFICIENTS, _ID_TN_YCBCR_COEFFICIENTS)
+#define EXIFTAGTYPE_TN_YCBCR_COEFFICIENTS EXIF_RATIONAL
+// Subsampling ratio of Y to C (of thumbnail)
+// Use EXIFTAGTYPE_TN_YCBCR_SUB_SAMPLING as the exif_tag_type (EXIF_SHORT)
+// Count should be 2
+#define _ID_TN_YCBCR_SUB_SAMPLING 0x0212
+#define EXIFTAGID_TN_YCBCR_SUB_SAMPLING \
+  CONSTRUCT_TAGID(TN_YCBCR_SUB_SAMPLING, _ID_TN_YCBCR_SUB_SAMPLING)
+#define EXIFTAGTYPE_TN_YCBCR_SUB_SAMPLING EXIF_SHORT
+// Y and C positioning (of thumbnail)
+// Use EXIFTAGTYPE_TN_YCBCR_POSITIONING as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_YCBCR_POSITIONING 0x0213
+#define EXIFTAGID_TN_YCBCR_POSITIONING \
+  CONSTRUCT_TAGID(TN_YCBCR_POSITIONING, _ID_TN_YCBCR_POSITIONING)
+#define EXIFTAGTYPE_TN_YCBCR_POSITIONING    EXIF_SHORT
+// Pair of black and white reference values (of thumbnail)
+// Use EXIFTAGTYPE_TN_REFERENCE_BLACK_WHITE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 6
+#define _ID_TN_REFERENCE_BLACK_WHITE 0x0214
+#define EXIFTAGID_TN_REFERENCE_BLACK_WHITE \
+  CONSTRUCT_TAGID(TN_REFERENCE_BLACK_WHITE, _ID_TN_REFERENCE_BLACK_WHITE)
+#define EXIFTAGTYPE_TN_REFERENCE_BLACK_WHITE EXIF_RATIONAL
+// Copyright holder (of thumbnail)
+// Use EXIFTAGTYPE_TN_COPYRIGHT as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_TN_COPYRIGHT 0x8298
+#define EXIFTAGID_TN_COPYRIGHT CONSTRUCT_TAGID(TN_COPYRIGHT, _ID_TN_COPYRIGHT)
+#define EXIFTAGTYPE_TN_COPYRIGHT EXIF_ASCII
+// Exposure time
+// Use EXIFTAGTYPE_EXPOSURE_TIME as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_EXPOSURE_TIME 0x829a
+#define EXIFTAGID_EXPOSURE_TIME \
+  CONSTRUCT_TAGID(EXPOSURE_TIME, _ID_EXPOSURE_TIME)
+#define EXIFTAGTYPE_EXPOSURE_TIME EXIF_RATIONAL
+// F number
+// Use EXIFTAGTYPE_F_NUMBER as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_F_NUMBER 0x829d
+#define EXIFTAGID_F_NUMBER \
+  CONSTRUCT_TAGID(F_NUMBER, _ID_F_NUMBER)
+#define EXIFTAGTYPE_F_NUMBER EXIF_RATIONAL
+// Exif IFD pointer (NOT INTENDED to be accessible to user)
+#define _ID_EXIF_IFD_PTR 0x8769
+#define EXIFTAGID_EXIF_IFD_PTR \
+  CONSTRUCT_TAGID(EXIF_IFD, _ID_EXIF_IFD_PTR)
+#define EXIFTAGTYPE_EXIF_IFD_PTR EXIF_LONG
+
+// ICC_PROFILE (NOT INTENDED to be accessible to user)
+#define _ID_ICC_PROFILE 0x8773
+#define EXIFTAGID_ICC_PROFILE CONSTRUCT_TAGID(ICC_PROFILE, _ID_ICC_PROFILE)
+#define EXIFTAGTYPE_ICC_PROFILE EXIF_LONG
+// Exposure program
+// Use EXIFTAGTYPE_EXPOSURE_PROGRAM as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_EXPOSURE_PROGRAM 0x8822
+#define EXIFTAGID_EXPOSURE_PROGRAM \
+  CONSTRUCT_TAGID(EXPOSURE_PROGRAM, _ID_EXPOSURE_PROGRAM)
+#define EXIFTAGTYPE_EXPOSURE_PROGRAM EXIF_SHORT
+// Spectral sensitivity
+// Use EXIFTAGTYPE_SPECTRAL_SENSITIVITY as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_SPECTRAL_SENSITIVITY 0x8824
+#define EXIFTAGID_SPECTRAL_SENSITIVITY \
+  CONSTRUCT_TAGID(SPECTRAL_SENSITIVITY, _ID_SPECTRAL_SENSITIVITY)
+#define EXIFTAGTYPE_SPECTRAL_SENSITIVITY EXIF_ASCII
+// GPS IFD pointer (NOT INTENDED to be accessible to user)
+#define _ID_GPS_IFD_PTR 0x8825
+#define EXIFTAGID_GPS_IFD_PTR \
+  CONSTRUCT_TAGID(GPS_IFD, _ID_GPS_IFD_PTR)
+#define EXIFTAGTYPE_GPS_IFD_PTR EXIF_LONG
+// ISO Speed Rating
+// Use EXIFTAGTYPE_ISO_SPEED_RATING as the exif_tag_type (EXIF_SHORT)
+// Count can be any
+#define _ID_ISO_SPEED_RATING 0x8827
+#define EXIFTAGID_ISO_SPEED_RATING \
+  CONSTRUCT_TAGID(ISO_SPEED_RATING, _ID_ISO_SPEED_RATING)
+#define EXIFTAGTYPE_ISO_SPEED_RATING EXIF_SHORT
+// Optoelectric conversion factor
+// Use EXIFTAGTYPE_OECF as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_OECF 0x8828
+#define EXIFTAGID_OECF CONSTRUCT_TAGID(OECF, _ID_OECF)
+#define EXIFTAGTYPE_OECF EXIF_UNDEFINED
+// Exif version
+// Use EXIFTAGTYPE_EXIF_VERSION as the exif_tag_type (EXIF_UNDEFINED)
+// Count should be 4
+#define _ID_EXIF_VERSION 0x9000
+#define EXIFTAGID_EXIF_VERSION \
+  CONSTRUCT_TAGID(EXIF_VERSION, _ID_EXIF_VERSION)
+#define EXIFTAGTYPE_EXIF_VERSION EXIF_UNDEFINED
+// Date and time of original data gerneration
+// Use EXIFTAGTYPE_EXIF_DATE_TIME_ORIGINAL as the exif_tag_type (EXIF_ASCII)
+// It should be 20 characters long including the null-terminating character.
+#define _ID_EXIF_DATE_TIME_ORIGINAL 0x9003
+#define EXIFTAGID_EXIF_DATE_TIME_ORIGINAL \
+  CONSTRUCT_TAGID(EXIF_DATE_TIME_ORIGINAL, _ID_EXIF_DATE_TIME_ORIGINAL)
+#define EXIFTAGTYPE_EXIF_DATE_TIME_ORIGINAL EXIF_ASCII
+// Date and time of digital data generation
+// Use EXIFTAGTYPE_EXIF_DATE_TIME_DIGITIZED as the exif_tag_type (EXIF_ASCII)
+// It should be 20 characters long including the null-terminating character.
+#define _ID_EXIF_DATE_TIME_DIGITIZED 0x9004
+#define EXIFTAGID_EXIF_DATE_TIME_DIGITIZED \
+  CONSTRUCT_TAGID(EXIF_DATE_TIME_DIGITIZED, _ID_EXIF_DATE_TIME_DIGITIZED)
+#define EXIFTAGTYPE_EXIF_DATE_TIME_DIGITIZED EXIF_ASCII
+// Meaning of each component
+// Use EXIFTAGTYPE_EXIF_COMPONENTS_CONFIG as the exif_tag_type (EXIF_UNDEFINED)
+// Count should be 4
+#define _ID_EXIF_COMPONENTS_CONFIG 0x9101
+#define EXIFTAGID_EXIF_COMPONENTS_CONFIG \
+  CONSTRUCT_TAGID(EXIF_COMPONENTS_CONFIG, _ID_EXIF_COMPONENTS_CONFIG)
+#define EXIFTAGTYPE_EXIF_COMPONENTS_CONFIG EXIF_UNDEFINED
+// Meaning of Image compression mode
+// Use EXIFTAGTYPE_EXIF_COMPRESSED_BITS_PER_PIXEL as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_EXIF_COMPRESSED_BITS_PER_PIXEL 0x9102
+#define EXIFTAGID_EXIF_COMPRESSED_BITS_PER_PIXEL \
+  CONSTRUCT_TAGID(EXIF_COMPRESSED_BITS_PER_PIXEL, _ID_EXIF_COMPRESSED_BITS_PER_PIXEL)
+#define EXIFTAGTYPE_EXIF_COMPRESSED_BITS_PER_PIXEL EXIF_RATIONAL
+// Shutter speed
+// Use EXIFTAGTYPE_SHUTTER_SPEED as the exif_tag_type (EXIF_SRATIONAL)
+// Count should be 1
+#define _ID_SHUTTER_SPEED 0x9201
+#define EXIFTAGID_SHUTTER_SPEED \
+  CONSTRUCT_TAGID(SHUTTER_SPEED, _ID_SHUTTER_SPEED)
+#define EXIFTAGTYPE_SHUTTER_SPEED EXIF_SRATIONAL
+// Aperture
+// Use EXIFTAGTYPE_APERTURE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_APERTURE 0x9202
+#define EXIFTAGID_APERTURE CONSTRUCT_TAGID(APERTURE, _ID_APERTURE)
+#define EXIFTAGTYPE_APERTURE EXIF_RATIONAL
+// Brigthness
+// Use EXIFTAGTYPE_BRIGHTNESS as the exif_tag_type (EXIF_SRATIONAL)
+// Count should be 1
+#define _ID_BRIGHTNESS 0x9203
+#define EXIFTAGID_BRIGHTNESS CONSTRUCT_TAGID(BRIGHTNESS, _ID_BRIGHTNESS)
+#define EXIFTAGTYPE_BRIGHTNESS EXIF_SRATIONAL
+// Exposure bias
+// Use EXIFTAGTYPE_EXPOSURE_BIAS_VALUE as the exif_tag_type (EXIF_SRATIONAL)
+// Count should be 1
+#define _ID_EXPOSURE_BIAS_VALUE 0x9204
+#define EXIFTAGID_EXPOSURE_BIAS_VALUE \
+  CONSTRUCT_TAGID(EXPOSURE_BIAS_VALUE, _ID_EXPOSURE_BIAS_VALUE)
+#define EXIFTAGTYPE_EXPOSURE_BIAS_VALUE EXIF_SRATIONAL
+// Maximum lens aperture
+// Use EXIFTAGTYPE_MAX_APERTURE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_MAX_APERTURE 0x9205
+#define EXIFTAGID_MAX_APERTURE CONSTRUCT_TAGID(MAX_APERTURE, _ID_MAX_APERTURE)
+#define EXIFTAGTYPE_MAX_APERTURE EXIF_RATIONAL
+// Subject distance
+// Use EXIFTAGTYPE_SUBJECT_DISTANCE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_SUBJECT_DISTANCE 0x9206
+#define EXIFTAGID_SUBJECT_DISTANCE \
+  CONSTRUCT_TAGID(SUBJECT_DISTANCE, _ID_SUBJECT_DISTANCE)
+#define EXIFTAGTYPE_SUBJECT_DISTANCE EXIF_RATIONAL
+// Metering mode
+// Use EXIFTAGTYPE_METERING_MODE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_METERING_MODE 0x9207
+#define EXIFTAGID_METERING_MODE \
+  CONSTRUCT_TAGID(METERING_MODE, _ID_METERING_MODE)
+#define EXIFTAGTYPE_METERING_MODE EXIF_SHORT
+// Light source
+// Use EXIFTAGTYPE_LIGHT_SOURCE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_LIGHT_SOURCE 0x9208
+#define EXIFTAGID_LIGHT_SOURCE CONSTRUCT_TAGID(LIGHT_SOURCE, _ID_LIGHT_SOURCE)
+#define EXIFTAGTYPE_LIGHT_SOURCE EXIF_SHORT
+// Flash
+// Use EXIFTAGTYPE_FLASH as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_FLASH 0x9209
+#define EXIFTAGID_FLASH CONSTRUCT_TAGID(FLASH, _ID_FLASH)
+#define EXIFTAGTYPE_FLASH EXIF_SHORT
+// Lens focal length
+// Use EXIFTAGTYPE_FOCAL_LENGTH as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_FOCAL_LENGTH 0x920a
+#define EXIFTAGID_FOCAL_LENGTH CONSTRUCT_TAGID(FOCAL_LENGTH, _ID_FOCAL_LENGTH)
+#define EXIFTAGTYPE_FOCAL_LENGTH EXIF_RATIONAL
+// Subject area
+// Use EXIFTAGTYPE_SUBJECT_AREA as exif_tag_type (EXIF_SHORT)
+// Count should be 2 or 3 or 4
+#define _ID_SUBJECT_AREA 0x9214
+#define EXIFTAGID_SUBJECT_AREA CONSTRUCT_TAGID(SUBJECT_AREA, _ID_SUBJECT_AREA)
+#define EXIFTAGTYPE_SUBJECT_AREA EXIF_SHORT
+// Maker note
+// Use EXIFTAGTYPE_EXIF_MAKER_NOTE as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_EXIF_MAKER_NOTE 0x927c
+#define EXIFTAGID_EXIF_MAKER_NOTE \
+  CONSTRUCT_TAGID(EXIF_MAKER_NOTE, _ID_EXIF_MAKER_NOTE)
+#define EXIFTAGTYPE_EXIF_MAKER_NOTE EXIF_UNDEFINED
+// User comments
+// Use EXIFTAGTYPE_EXIF_USER_COMMENT as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_EXIF_USER_COMMENT 0x9286
+#define EXIFTAGID_EXIF_USER_COMMENT \
+  CONSTRUCT_TAGID(EXIF_USER_COMMENT, _ID_EXIF_USER_COMMENT)
+#define EXIFTAGTYPE_EXIF_USER_COMMENT EXIF_UNDEFINED
+// Date time sub-seconds
+// Use EXIFTAGTYPE_SUBSEC_TIME as the exif_tag_type (EXIF_ASCII)
+// Count could be any
+#define _ID_SUBSEC_TIME 0x9290
+#define EXIFTAGID_SUBSEC_TIME CONSTRUCT_TAGID(SUBSEC_TIME, _ID_SUBSEC_TIME)
+#define EXIFTAGTYPE_SEBSEC_TIME EXIF_ASCII
+// Date time original sub-seconds
+// use EXIFTAGTYPE_SUBSEC_TIME_ORIGINAL as the exif_tag_type (EXIF_ASCII)
+// Count could be any
+#define _ID_SUBSEC_TIME_ORIGINAL 0x9291
+#define EXIFTAGID_SUBSEC_TIME_ORIGINAL \
+  CONSTRUCT_TAGID(SUBSEC_TIME_ORIGINAL, _ID_SUBSEC_TIME_ORIGINAL)
+#define EXIFTAGTYPE_SUBSEC_TIME_ORIGINAL EXIF_ASCII
+// Date time digitized sub-seconds
+// use EXIFTAGTYPE_SUBSEC_TIME_DIGITIZED as the exif_tag_type (EXIF_ASCII)
+// Count could be any
+#define _ID_SUBSEC_TIME_DIGITIZED 0x9292
+#define EXIFTAGID_SUBSEC_TIME_DIGITIZED \
+  CONSTRUCT_TAGID(SUBSEC_TIME_DIGITIZED, _ID_SUBSEC_TIME_DIGITIZED)
+#define EXIFTAGTYPE_SUBSEC_TIME_DIGITIZED EXIF_ASCII
+// Supported Flashpix version
+// Use EXIFTAGTYPE_EXIF_FLASHPIX_VERSION as the exif_tag_type (EXIF_UNDEFINED)
+// Count should be 4
+#define _ID_EXIF_FLASHPIX_VERSION 0xa000
+#define EXIFTAGID_EXIF_FLASHPIX_VERSION \
+  CONSTRUCT_TAGID(EXIF_FLASHPIX_VERSION, _ID_EXIF_FLASHPIX_VERSION)
+#define EXIFTAGTYPE_EXIF_FLASHPIX_VERSION EXIF_UNDEFINED
+//  Color space information
+// Use EXIFTAGTYPE_EXIF_COLOR_SPACE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_EXIF_COLOR_SPACE 0xa001
+#define EXIFTAGID_EXIF_COLOR_SPACE \
+  CONSTRUCT_TAGID(EXIF_COLOR_SPACE, _ID_EXIF_COLOR_SPACE)
+#define EXIFTAGTYPE_EXIF_COLOR_SPACE EXIF_SHORT
+//  Valid image width
+// Use EXIFTAGTYPE_EXIF_PIXEL_X_DIMENSION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_EXIF_PIXEL_X_DIMENSION 0xa002
+#define EXIFTAGID_EXIF_PIXEL_X_DIMENSION \
+  CONSTRUCT_TAGID(EXIF_PIXEL_X_DIMENSION, _ID_EXIF_PIXEL_X_DIMENSION)
+#define EXIFTAGTYPE_EXIF_PIXEL_X_DIMENSION EXIF_SHORT
+// Valid image height
+// Use EXIFTAGTYPE_EXIF_PIXEL_Y_DIMENSION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_EXIF_PIXEL_Y_DIMENSION 0xa003
+#define EXIFTAGID_EXIF_PIXEL_Y_DIMENSION \
+  CONSTRUCT_TAGID(EXIF_PIXEL_Y_DIMENSION, _ID_EXIF_PIXEL_Y_DIMENSION)
+#define EXIFTAGTYPE_EXIF_PIXEL_Y_DIMENSION  EXIF_SHORT
+// Related audio file
+// Use EXIFTAGTYPE_EXIF_RELATED_SOUND_FILE as the exif_tag_type (EXIF_ASCII)
+// Count should be 13
+#define _ID_RELATED_SOUND_FILE 0xa004
+#define EXIFTAGID_RELATED_SOUND_FILE \
+  CONSTRUCT_TAGID(RELATED_SOUND_FILE, _ID_RELATED_SOUND_FILE)
+#define EXIFTAGTYPE_RELATED_SOUND_FILE EXIF_ASCII
+// Interop IFD pointer (NOT INTENDED to be accessible to user)
+#define _ID_INTEROP_IFD_PTR 0xa005
+#define EXIFTAGID_INTEROP_IFD_PTR CONSTRUCT_TAGID(INTEROP, _ID_INTEROP_IFD_PTR)
+#define EXIFTAGTYPE_INTEROP_IFD_PTR EXIF_LONG
+// Flash energy
+// Use EXIFTAGTYPE_EXIF_FLASH_ENERGY as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_FLASH_ENERGY 0xa20b
+#define EXIFTAGID_FLASH_ENERGY CONSTRUCT_TAGID(FLASH_ENERGY, _ID_FLASH_ENERGY)
+#define EXIFTAGTYPE_FLASH_ENERGY EXIF_RATIONAL
+// Spatial frequency response
+// Use EXIFTAGTYPE_SPATIAL_FREQ_RESPONSE as exif_tag_type (EXIF_UNDEFINED)
+// Count would be any
+#define _ID_SPATIAL_FREQ_RESPONSE 0xa20c
+#define EXIFTAGID_SPATIAL_FREQ_RESPONSE \
+  CONSTRUCT_TAGID(SPATIAL_FREQ_RESPONSE, _ID_SPATIAL_FREQ_RESPONSE)
+#define EXIFTAGTYPE_SPATIAL_FREQ_RESPONSE EXIF_UNDEFINED
+// Focal plane x resolution
+// Use EXIFTAGTYPE_FOCAL_PLANE_X_RESOLUTION as exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_FOCAL_PLANE_X_RESOLUTION 0xa20e
+#define EXIFTAGID_FOCAL_PLANE_X_RESOLUTION \
+  CONSTRUCT_TAGID(FOCAL_PLANE_X_RESOLUTION, _ID_FOCAL_PLANE_X_RESOLUTION)
+#define EXIFTAGTYPE_FOCAL_PLANE_X_RESOLUTION EXIF_RATIONAL
+// Focal plane y resolution
+// Use EXIFTAGTYPE_FOCAL_PLANE_Y_RESOLUTION as exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_FOCAL_PLANE_Y_RESOLUTION 0xa20f
+#define EXIFTAGID_FOCAL_PLANE_Y_RESOLUTION \
+  CONSTRUCT_TAGID(FOCAL_PLANE_Y_RESOLUTION, _ID_FOCAL_PLANE_Y_RESOLUTION)
+#define EXIFTAGTYPE_FOCAL_PLANE_Y_RESOLUTION EXIF_RATIONAL
+// Focal plane  resolution unit
+// Use EXIFTAGTYPE_FOCAL_PLANE_RESOLUTION_UNIT as exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_FOCAL_PLANE_RESOLUTION_UNIT 0xa210
+#define EXIFTAGID_FOCAL_PLANE_RESOLUTION_UNIT \
+  CONSTRUCT_TAGID(FOCAL_PLANE_RESOLUTION_UNIT, _ID_FOCAL_PLANE_RESOLUTION_UNIT)
+#define EXIFTAGTYPE_FOCAL_PLANE_RESOLUTION_UNIT EXIF_SHORT
+// Subject location
+// Use EXIFTAGTYPE_SUBJECT_LOCATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 2
+#define _ID_SUBJECT_LOCATION 0xa214
+#define EXIFTAGID_SUBJECT_LOCATION \
+  CONSTRUCT_TAGID(SUBJECT_LOCATION, _ID_SUBJECT_LOCATION)
+#define EXIFTAGTYPE_SUBJECT_LOCATION EXIF_SHORT
+// Exposure index
+// Use EXIFTAGTYPE_EXPOSURE_INDEX as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_EXPOSURE_INDEX 0xa215
+#define EXIFTAGID_EXPOSURE_INDEX \
+  CONSTRUCT_TAGID(EXPOSURE_INDEX, _ID_EXPOSURE_INDEX)
+#define EXIFTAGTYPE_EXPOSURE_INDEX EXIF_RATIONAL
+// Sensing method
+// Use EXIFTAGTYPE_SENSING_METHOD as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_SENSING_METHOD 0xa217
+#define EXIFTAGID_SENSING_METHOD \
+  CONSTRUCT_TAGID(SENSING_METHOD, _ID_SENSING_METHOD)
+#define EXIFTAGTYPE_SENSING_METHOD EXIF_SHORT
+// File source
+// Use EXIFTAGTYPE_FILE_SOURCE as the exif_tag_type (EXIF_UNDEFINED)
+// Count should be 1
+#define _ID_FILE_SOURCE 0xa300
+#define EXIFTAGID_FILE_SOURCE CONSTRUCT_TAGID(FILE_SOURCE, _ID_FILE_SOURCE)
+#define EXIFTAGTYPE_FILE_SOURCE EXIF_UNDEFINED
+// Scene type
+// Use EXIFTAGTYPE_SCENE_TYPE as the exif_tag_type (EXIF_UNDEFINED)
+// Count should be 1
+#define _ID_SCENE_TYPE 0xa301
+#define EXIFTAGID_SCENE_TYPE CONSTRUCT_TAGID(SCENE_TYPE, _ID_SCENE_TYPE)
+#define EXIFTAGTYPE_SCENE_TYPE EXIF_UNDEFINED
+// CFA pattern
+// Use EXIFTAGTYPE_CFA_PATTERN as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_CFA_PATTERN 0xa302
+#define EXIFTAGID_CFA_PATTERN CONSTRUCT_TAGID(CFA_PATTERN, _ID_CFA_PATTERN)
+#define EXIFTAGTYPE_CFA_PATTERN EXIF_UNDEFINED
+// Custom image processing
+// Use EXIFTAGTYPE_CUSTOM_RENDERED as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_CUSTOM_RENDERED 0xa401
+#define EXIFTAGID_CUSTOM_RENDERED \
+  CONSTRUCT_TAGID(CUSTOM_RENDERED, _ID_CUSTOM_RENDERED)
+#define EXIFTAGTYPE_CUSTOM_RENDERED EXIF_SHORT
+// Exposure mode
+// Use EXIFTAGTYPE_EXPOSURE_MODE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_EXPOSURE_MODE 0xa402
+#define EXIFTAGID_EXPOSURE_MODE \
+  CONSTRUCT_TAGID(EXPOSURE_MODE, _ID_EXPOSURE_MODE)
+#define EXIFTAGTYPE_EXPOSURE_MODE EXIF_SHORT
+// White balance
+// Use EXIFTAGTYPE_WHITE_BALANCE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_WHITE_BALANCE 0xa403
+#define EXIFTAGID_WHITE_BALANCE \
+  CONSTRUCT_TAGID(WHITE_BALANCE, _ID_WHITE_BALANCE)
+#define EXIFTAGTYPE_WHITE_BALANCE EXIF_SHORT
+// Digital zoom ratio
+// Use EXIFTAGTYPE_DIGITAL_ZOOM_RATIO as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_DIGITAL_ZOOM_RATIO 0xa404
+#define EXIFTAGID_DIGITAL_ZOOM_RATIO \
+  CONSTRUCT_TAGID(DIGITAL_ZOOM_RATIO, _ID_DIGITAL_ZOOM_RATIO)
+#define EXIFTAGTYPE_DIGITAL_ZOOM_RATIO EXIF_RATIONAL
+// Focal length in 35mm film
+// Use EXIFTAGTYPE_FOCAL_LENGTH_35MM as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_FOCAL_LENGTH_35MM 0xa405
+#define EXIFTAGID_FOCAL_LENGTH_35MM CONSTRUCT_TAGID(FOCAL_LENGTH_35MM, _ID_FOCAL_LENGTH_35MM)
+#define EXIFTAGTYPE_FOCAL_LENGTH_35MM EXIF_SHORT
+// Scene capture type
+// Use EXIFTAGTYPE_SCENE_CAPTURE_TYPE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_SCENE_CAPTURE_TYPE 0xa406
+#define EXIFTAGID_SCENE_CAPTURE_TYPE \
+  CONSTRUCT_TAGID(SCENE_CAPTURE_TYPE, _ID_SCENE_CAPTURE_TYPE)
+#define EXIFTAGTYPE_SCENE_CAPTURE_TYPE EXIF_SHORT
+// Gain control
+// Use EXIFTAGTYPE_GAIN_CONTROL as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_GAIN_CONTROL 0xa407
+#define EXIFTAGID_GAIN_CONTROL CONSTRUCT_TAGID(GAIN_CONTROL, _ID_GAIN_CONTROL)
+#define EXIFTAGTYPE_GAIN_CONTROL EXIF_SHORT
+// Contrast
+// Use EXIFTAGTYPE_CONTRAST as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_CONTRAST 0xa408
+#define EXIFTAGID_CONTRAST CONSTRUCT_TAGID(CONTRAST, _ID_CONTRAST)
+#define EXIFTAGTYPE_CONTRAST EXIF_SHORT
+// Saturation
+// Use EXIFTAGTYPE_SATURATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_SATURATION  0xa409
+#define EXIFTAGID_SATURATION CONSTRUCT_TAGID(SATURATION, _ID_SATURATION)
+#define EXIFTAGTYPE_SATURATION EXIF_SHORT
+// Sharpness
+// Use EXIFTAGTYPE_SHARPNESS as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_SHARPNESS 0xa40a
+#define EXIFTAGID_SHARPNESS CONSTRUCT_TAGID(SHARPNESS, _ID_SHARPNESS)
+#define EXIFTAGTYPE_SHARPNESS EXIF_SHORT
+// Device settings description
+// Use EXIFTAGID_DEVICE_SETTINGS_DESCRIPTION as exif_tag_type (EXIF_UNDEFINED)
+// Count could be any
+#define _ID_DEVICE_SETTINGS_DESCRIPTION 0xa40b
+#define EXIFTAGID_DEVICE_SETTINGS_DESCRIPTION \
+  CONSTRUCT_TAGID(DEVICE_SETTINGS_DESCRIPTION, _ID_DEVICE_SETTINGS_DESCRIPTION)
+#define EXIFTAGTYPE_DEVIC_SETTIGNS_DESCRIPTION EXIF_UNDEFINED
+// Subject distance range
+// Use EXIFTAGTYPE_SUBJECT_DISTANCE_RANGE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_SUBJECT_DISTANCE_RANGE 0xa40c
+#define EXIFTAGID_SUBJECT_DISTANCE_RANGE \
+  CONSTRUCT_TAGID(SUBJECT_DISTANCE_RANGE, _ID_SUBJECT_DISTANCE_RANGE)
+#define EXIFTAGTYPE_SUBJECT_DISTANCE_RANGE EXIF_SHORT
+// Unique image id
+// Use EXIFTAG_TYPE_IMAGE_UIDas the exif_tag_type (EXIF_ASCII)
+// Count should be 33
+#define _ID_IMAGE_UID 0xa420
+#define EXIFTAGID_IMAGE_UID CONSTRUCT_TAGID(IMAGE_UID, _ID_IMAGE_UID)
+#define EXIFTAGTYPE_IMAGE_UID EXIF_ASCII
+// PIM tag
+// Use EXIFTAGTYPE_PIM_TAG as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_PIM 0xc4a5
+#define EXIFTAGID_PIM_TAG CONSTRUCT_TAGID(PIM, _ID_PIM)
+#define EXIFTAGTYPE_PIM_TAG EXIF_UNDEFINED
+#endif // __QEXIF_H__
+
diff --git a/msmcobalt/mm-image-codec/qexif/qmpo.h b/msmcobalt/mm-image-codec/qexif/qmpo.h
new file mode 100644
index 0000000..bcfd59b
--- /dev/null
+++ b/msmcobalt/mm-image-codec/qexif/qmpo.h
@@ -0,0 +1,150 @@
+/*Copyright (c) 2015, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of The Linux Foundation nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.*/
+
+#ifndef __QMPO_H__
+#define __QMPO_H__
+
+#include <stdio.h>
+#include <qexif.h>
+
+//Length of MPO header fields
+#define MP_APP2_FIELD_LENGTH_BYTES 2
+#define MP_FORMAT_IDENTIFIER_BYTES 4
+#define MP_ENDIAN_BYTES 4
+#define MP_HEADER_OFFSET_TO_FIRST_IFD_BYTES 4
+#define MP_INDEX_COUNT_BYTES 2
+#define MP_INDEX_VERSION_BYTES 12
+#define MP_INDEX_NUMBER_OF_IMAGES_BYTES 12
+#define MP_INDEX_ENTRY_BYTES 12
+#define MP_INDEX_IMAGE_UNIQUE_ID_LIST_BYTES 12
+#define MP_INDEX_TOTAL_CAPURED_FRAMES 12
+#define MP_INDEX_OFFSET_OF_NEXT_IFD_BYTES 4
+#define MP_INDEX_ENTRY_VALUE_BYTES 16
+#define MP_INDEX_ENTRY_INDIVIDUAL_IMAGE_ATTRIBUTE_BYTES 4
+#define MP_INDEX_ENTRY_INDIVIDUAL_IMAGE_SIZE_BYTES 4
+#define MP_INDEX_ENTRY_INDIVIDUAL_IMAGE_DATA_OFFSET_BYTES 4
+#define MP_ATTRIBUTE_COUNT_BYTES 2
+#define MP_ATTRIBUTE_OFFSET_OF_NEXT_IFD_BYTES 4
+#define MP_TAG_BYTES 12
+#define MP_INDIVIDUAL_IMAGE_ID_BYTES 33
+#define MP_INDEX_IFD_START 2
+
+#define MPO_BIG_ENDIAN 0x4D4D002A
+#define MPO_LITTLE_ENDIAN 0x49492A00
+
+/* MPO Dependent Type */
+typedef enum
+{
+   NON_DEPENDENT_IMAGE    = 0x00000000,   // Non dependent image
+   DEPENDENT_CHILD_IMAGE  = 0x40000000,   // Dependent child image flag
+   DEPENDENT_PARENT_IMAGE = 0x80000000,   // Dependent parent image flag
+   DEPENDENT_MASK         = 0xc0000000,   // Dependent mask
+   DEPENDENT_MAX,
+} qmpo_dependent_t;
+
+/* MPO Representative Type */
+typedef enum
+{
+  NOT_REPRESENTATIVE_IMAGE = 0x00000000,   // Not a representative image
+  REPRESENTATIVE_IMAGE     = 0x20000000,   // Representative image flag
+  REPRESENTATIVE_MASK      = 0x20000000,   // Representative mask
+  REPRESENTATIVE_MAX,
+} qmpo_representative_t;
+
+/* MPO Image Data Format Type */
+typedef enum
+{
+  JPEG                   = 0x00000000,   // Image is in JPEG format
+  NON_JPEG               = 0x07000000,   // Image is not JPEG
+  IMAGE_DATA_FORMAT_MASK = 0x07000000,   // Image mask
+  IMAGE_DATA_FORMAT_MAX,
+} qmpo_image_data_format_t;
+
+/* MPO Type */
+typedef enum
+{
+  UNDEFINED              = 0x00000000,   // MP types undefined
+  LARGE_TN_CLASS_1       = 0x00010001,   // Large thumbnail class 1 image
+  LARGE_TN_CLASS_2       = 0x00010002,   // Large thumbnail class 2 image
+  MULTI_VIEW_PANORAMA    = 0x00020001,   // Multi-view Panorama image
+  MULTI_VIEW_DISPARITY   = 0x00020002,   // Multi-view Disparity image
+  MULTI_VIEW_MULTI_ANGLE = 0x00020003,   // Multi-view Multi-angle image
+  BASELINE_PRIMARY       = 0x00030000,   // Baseline MP Primary image
+  TYPE_MASK              = 0x00ffffff,   // Type mask
+  TYPE_MAX,
+} qmpo_type_t;
+
+// MP Format Version
+// Use MPOTAGTYPE_MP_F_VERSION as the exif_tag_type (EXIF_UNDEFINED)
+// Count should be 4
+#define _ID_MP_F_VERSION_FIRST           0xb000
+#define MPOTAGID_MP_F_VERSION_FIRST      CONSTRUCT_TAGID(MP_F_VERSION_FIRST, _ID_MP_F_VERSION_FIRST)
+#define MPOTAGTYPE_MP_F_VERSION_FIRST    EXIF_UNDEFINED
+
+// Number of Images
+// Use MPOTAGTYPE_NUMBER_OF_IMAGES as the exif_tag_type (EXIF_LONG)
+// Count should be 1
+#define _ID_NUMBER_OF_IMAGES             0xb001
+#define MPOTAGID_NUMBER_OF_IMAGES        CONSTRUCT_TAGID(NUMBER_OF_IMAGES, _ID_NUMBER_OF_IMAGES)
+#define MPOTAGTYPE_NUMBER_OF_IMAGES      EXIF_LONG
+
+// MP Entry
+// Use MPOTAGTYPE_MP_ENTRY as the exif_tag_type (EXIF_UNDEFINED)
+// Count should be 16 x NumberOfImages
+#define _ID_MP_ENTRY                     0xb002
+#define MPOTAGID_MP_ENTRY                CONSTRUCT_TAGID(MP_ENTRY, _ID_MP_ENTRY)
+#define MPOTAGTYPE_MP_ENTRY              EXIF_UNDEFINED
+
+// Individual Image Unique ID List
+// Use MPOTAGTYPE_IMAGE_UID_LIST as the exif_tag_type (EXIF_UNDEFINED)
+// Count should be 33 x NumberOfImages
+#define _ID_IMAGE_UID_LIST               0xb003
+#define MPOTAGID_IMAGE_UID_LIST          CONSTRUCT_TAGID(IMAGE_UID_LIST, _ID_IMAGE_UID_LIST)
+#define MPOTAGTYPE_IMAGE_UID_LIST        EXIF_UNDEFINED
+
+// Total Number of Camptured Frames
+// Use MPOTAGTYPE_TOTAL_FRAMES as the exif_tag_type (EXIF_LONG)
+// Count should be 1
+#define _ID_TOTAL_FRAMES                 0xb004
+#define MPOTAGID_TOTAL_FRAMES            CONSTRUCT_TAGID(TOTAL_FRAMES, _ID_TOTAL_FRAMES)
+#define MPOTAGTYPE_TOTAL_FRAMES          EXIF_LONG
+
+// MP Format Version
+// Use MPOTAGTYPE_MP_F_VERSION as the exif_tag_type (EXIF_UNDEFINED)
+// Count should be 4
+#define _ID_MP_F_VERSION                 0xb000
+#define MPOTAGID_MP_F_VERSION            CONSTRUCT_TAGID(MP_F_VERSION, _ID_MP_F_VERSION)
+#define MPOTAGTYPE_MP_F_VERSION          EXIF_UNDEFINED
+
+// MP Individual Image Number
+// Use MPOTAGTYPE_MP_INDIVIDUAL_NUM as the exif_tag_type (EXIF_LONG)
+// Count should be 1
+#define _ID_MP_INDIVIDUAL_NUM            0xb101
+#define MPOTAGID_MP_INDIVIDUAL_NUM       CONSTRUCT_TAGID(MP_INDIVIDUAL_NUM, _ID_MP_INDIVIDUAL_NUM)
+#define MPOTAGTYPE_MP_INDIVIDUAL_NUM     EXIF_LONG
+
+#endif
diff --git a/msmcobalt/mm-image-codec/qomx_core/Android.mk b/msmcobalt/mm-image-codec/qomx_core/Android.mk
new file mode 100644
index 0000000..fa7242b
--- /dev/null
+++ b/msmcobalt/mm-image-codec/qomx_core/Android.mk
@@ -0,0 +1,26 @@
+LOCAL_PATH := $(call my-dir)
+
+# ------------------------------------------------------------------------------
+#                Make the shared library (libqomx_core)
+# ------------------------------------------------------------------------------
+
+include $(CLEAR_VARS)
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_CFLAGS := -Werror \
+                   -g -O0
+
+LOCAL_C_INCLUDES := frameworks/native/include/media/openmax \
+                    $(LOCAL_PATH)/../qexif
+
+LOCAL_INC_FILES := qomx_core.h \
+                   QOMX_JpegExtensions.h
+
+LOCAL_SRC_FILES := qomx_core.c
+
+LOCAL_MODULE           := libqomx_core
+LOCAL_PRELINK_MODULE   := false
+LOCAL_SHARED_LIBRARIES := libcutils libdl
+
+LOCAL_32_BIT_ONLY := true
+include $(BUILD_SHARED_LIBRARY)
diff --git a/msmcobalt/mm-image-codec/qomx_core/QOMX_JpegExtensions.h b/msmcobalt/mm-image-codec/qomx_core/QOMX_JpegExtensions.h
new file mode 100644
index 0000000..315723b
--- /dev/null
+++ b/msmcobalt/mm-image-codec/qomx_core/QOMX_JpegExtensions.h
@@ -0,0 +1,365 @@
+/*Copyright (c) 2012-2015, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of The Linux Foundation nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.*/
+
+#ifndef __QOMX_EXTENSIONS_H__
+#define __QOMX_EXTENSIONS_H__
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <OMX_Image.h>
+#include <qexif.h>
+
+/** qomx_image_eventd
+*  Qcom specific events extended from OMX_EVENT
+*  @ OMX_EVENT_THUMBNAIL_DROPPED - Indicates that the thumbnail
+*                                 size id too big to be included
+*                                 in the exif and will be
+*                                 dropped
+**/
+typedef enum {
+ OMX_EVENT_THUMBNAIL_DROPPED = OMX_EventVendorStartUnused+1
+} QOMX_IMAGE_EXT_EVENTS;
+
+/**
+*  The following macros defines the string to be used for
+*  getting the extension indices.
+**/
+#define QOMX_IMAGE_EXT_EXIF_NAME                  "OMX.QCOM.image.exttype.exif"
+#define QOMX_IMAGE_EXT_THUMBNAIL_NAME        "OMX.QCOM.image.exttype.thumbnail"
+#define QOMX_IMAGE_EXT_BUFFER_OFFSET_NAME "OMX.QCOM.image.exttype.bufferOffset"
+#define QOMX_IMAGE_EXT_MOBICAT_NAME            "OMX.QCOM.image.exttype.mobicat"
+#define QOMX_IMAGE_EXT_ENCODING_MODE_NAME        "OMX.QCOM.image.encoding.mode"
+#define QOMX_IMAGE_EXT_WORK_BUFFER_NAME      "OMX.QCOM.image.exttype.workbuffer"
+#define QOMX_IMAGE_EXT_METADATA_NAME      "OMX.QCOM.image.exttype.metadata"
+#define QOMX_IMAGE_EXT_META_ENC_KEY_NAME      "OMX.QCOM.image.exttype.metaEncKey"
+#define QOMX_IMAGE_EXT_MEM_OPS_NAME      "OMX.QCOM.image.exttype.mem_ops"
+#define QOMX_IMAGE_EXT_JPEG_SPEED_NAME      "OMX.QCOM.image.exttype.jpeg.speed"
+#define QOMX_IMAGE_EXT_MULTI_IMAGE_NAME  "OMX.QCOM.image.exttype.multi.image"
+
+/** QOMX_IMAGE_EXT_INDEXTYPE
+*  This enum is an extension of the OMX_INDEXTYPE enum and
+*  specifies Qcom supported extention indexes. These indexes are
+*  associated with the extension names and can be used as
+*  Indexes in the SetParameter and Getparameter functins to set
+*  or get values from qcom specific data structures
+**/
+typedef enum {
+  //Name: OMX.QCOM.image.exttype.exif
+  QOMX_IMAGE_EXT_EXIF = 0x07F00000,
+
+  //Name: OMX.QCOM.image.exttype.thumbnail
+  QOMX_IMAGE_EXT_THUMBNAIL = 0x07F00001,
+
+  //Name: OMX.QCOM.image.exttype.bufferOffset
+  QOMX_IMAGE_EXT_BUFFER_OFFSET = 0x07F00002,
+
+  //Name: OMX.QCOM.image.exttype.mobicat
+  QOMX_IMAGE_EXT_MOBICAT = 0x07F00003,
+
+  //Name: OMX.QCOM.image.encoding.approach
+  QOMX_IMAGE_EXT_ENCODING_MODE = 0x07F00004,
+
+  //Name: OMX.QCOM.image.exttype.workbuffer
+  QOMX_IMAGE_EXT_WORK_BUFFER = 0x07F00005,
+
+  //Name: OMX.QCOM.image.exttype.metadata
+  QOMX_IMAGE_EXT_METADATA = 0x07F00008,
+
+  //Name: OMX.QCOM.image.exttype.metaEncKey
+  QOMX_IMAGE_EXT_META_ENC_KEY = 0x07F00009,
+
+  //Name: OMX.QCOM.image.exttype.memOps
+  QOMX_IMAGE_EXT_MEM_OPS = 0x07F0000A,
+
+  //Name: OMX.QCOM.image.exttype.jpeg.speed
+  QOMX_IMAGE_EXT_JPEG_SPEED = 0x07F000B,
+
+  //Name: OMX.QCOM.image.exttype.multi.image
+  QOMX_IMAGE_EXT_MULTI_IMAGE = 0x07F000C,
+
+} QOMX_IMAGE_EXT_INDEXTYPE;
+
+/** QOMX_BUFFER_INFO
+*  The structure specifies informaton
+*   associated with the buffers and should be passed as appData
+*   in UseBuffer calls to the OMX component with buffer specific
+*   data. @ fd - FD of the buffer allocated. If the buffer is
+*          allocated on the heap, it can be zero.
+*   @offset - Buffer offset
+**/
+
+typedef struct {
+  OMX_U32 fd;
+  OMX_U32 offset;
+} QOMX_BUFFER_INFO;
+
+/** QEXIF_INFO_DATA
+*   The basic exif structure used to construct
+*   information for a single exif tag.
+*   @tag_entry
+*   @tag_id
+**/
+typedef struct{
+  exif_tag_entry_t tag_entry;
+  exif_tag_id_t tag_id;
+} QEXIF_INFO_DATA;
+
+/** QEXTN_DATA
+*   The structure used to carry addtional payload
+*   meant to be in EXIF Appx marker fields.
+*   @sw_3a_version
+**/
+typedef struct {
+  uint16_t sw_3a_version[4];
+} QEXTN_DATA;
+
+/**QOMX_EXIF_INFO
+*  The structure contains an array of exif tag
+*  structures(qexif_info_data) and should be passed to the OMX
+*  layer by the OMX client using the extension index.
+*  @exif_data - Array of exif tags
+*  @numOfEntries - Number of exif tags entries being passed in
+*                  the array
+*  @debug_data - specific debug information for internal use
+**/
+typedef struct {
+  QEXIF_INFO_DATA *exif_data;
+  OMX_U32 numOfEntries;
+  QEXTN_DATA debug_data;
+} QOMX_EXIF_INFO;
+
+/**QOMX_YUV_FRAME_INFO
+*  The structre contains all the offsets
+*  associated with the Y and cbcr buffers.
+*  @yOffset - Offset within the Y buffer
+*  @cbcrOffset - Offset within the cb/cr buffer. The array
+*                should be populated in order depending on cb
+*                first or cr first in case of planar data. For
+*                pseusoplanar, only the first array element
+*                needs to be filled and the secnd element should
+*                be set to zero.
+*  @cbcrStartOffset - Start offset of the cb/cr buffer starting
+*                     starting from the Y buffer. The array
+*                     should be populated in order depending on
+*                     cb first or cr first in case of planar
+*                     data. For pseusoplanar, only the first
+*                     array element needs to be filled and the
+*                     secnd element should be set to zero.
+**/
+typedef struct {
+  OMX_U32 yOffset;
+  OMX_U32 cbcrOffset[2];
+  OMX_U32 cbcrStartOffset[2];
+} QOMX_YUV_FRAME_INFO;
+
+/** qomx_thumbnail_info
+*  Includes all information associated with the thumbnail
+*  @input_width - Width of the input thumbnail buffer
+*  @input_height - Heighr of the input thumbnail buffer
+*  @scaling_enabled - Flag indicating if thumbnail scaling is
+*  enabled.
+*  @quality - JPEG Q factor value in the range of 1-100. A factor of 1
+ *               produces the smallest, worst quality images, and a factor
+ *               of 100 produces the largest, best quality images.  A
+ *               typical default is 75 for small good quality images.
+*  @crop_info - Includes the crop width, crop height,
+*               horizontal and vertical offsets.
+*  @output_width - Output Width of the the thumbnail. This is
+*                the width after scaling if scaling is enabled
+*                or width after cropping if only cropping is
+*                enabled or same same input width otherwise
+*  @output_height - Output height of the thumbnail. This is
+*                the height after scaling if scaling is enabled
+*                or height after cropping if only cropping is
+*                enabled or same same input height otherwise
+**/
+typedef struct {
+  OMX_U32 input_width;
+  OMX_U32 input_height;
+  OMX_U8 scaling_enabled;
+  OMX_U32 quality;
+  OMX_CONFIG_RECTTYPE crop_info;
+  OMX_U32 output_width;
+  OMX_U32 output_height;
+  QOMX_YUV_FRAME_INFO tmbOffset;
+  OMX_U32 rotation;
+} QOMX_THUMBNAIL_INFO;
+
+/**qomx_mobicat
+*  Mobicat data to padded tot he OMX layer
+*  @mobicatData - Mobicate data
+*  @mobicatDataLength - length of the mobicat data
+**/
+typedef struct {
+  OMX_U8 *mobicatData;
+  OMX_U32 mobicatDataLength;
+} QOMX_MOBICAT;
+
+/**qomx_workbuffer
+*  Ion buffer to be used for the H/W encoder
+*  @fd - FD of the buffer allocated
+*  @vaddr - Buffer address
+**/
+typedef struct {
+  int fd;
+  uint8_t *vaddr;
+  uint32_t length;
+} QOMX_WORK_BUFFER;
+
+/**QOMX_METADATA
+ *
+ * meta data to be set in EXIF
+ * @metadata: Dynamic metadata associated with each image
+ * @metaPayloadSize : Size of dynamic metadata
+ * @mobicat_mask : Mobicat MASk
+ * @static_metadata: Static metadata associated with each image
+ */
+typedef struct {
+  OMX_U8  *metadata;
+  OMX_U32 metaPayloadSize;
+  OMX_U8 mobicat_mask;
+  OMX_U8 *static_metadata;
+} QOMX_METADATA;
+
+/**QOMX_META_ENC_KEY
+ *
+ * meta data encryption key
+ */
+typedef struct {
+  OMX_U8  *metaKey;
+  OMX_U32 keyLen;
+} QOMX_META_ENC_KEY;
+
+/** QOMX_IMG_COLOR_FORMATTYPE
+*  This enum is an extension of the OMX_COLOR_FORMATTYPE enum.
+*  It specifies Qcom supported color formats.
+**/
+typedef enum QOMX_IMG_COLOR_FORMATTYPE {
+  OMX_QCOM_IMG_COLOR_FormatYVU420SemiPlanar = OMX_COLOR_FormatVendorStartUnused + 0x300,
+  OMX_QCOM_IMG_COLOR_FormatYVU422SemiPlanar,
+  OMX_QCOM_IMG_COLOR_FormatYVU422SemiPlanar_h1v2,
+  OMX_QCOM_IMG_COLOR_FormatYUV422SemiPlanar_h1v2,
+  OMX_QCOM_IMG_COLOR_FormatYVU444SemiPlanar,
+  OMX_QCOM_IMG_COLOR_FormatYUV444SemiPlanar,
+  OMX_QCOM_IMG_COLOR_FormatYVU420Planar,
+  OMX_QCOM_IMG_COLOR_FormatYVU422Planar,
+  OMX_QCOM_IMG_COLOR_FormatYVU422Planar_h1v2,
+  OMX_QCOM_IMG_COLOR_FormatYUV422Planar_h1v2,
+  OMX_QCOM_IMG_COLOR_FormatYVU444Planar,
+  OMX_QCOM_IMG_COLOR_FormatYUV444Planar
+} QOMX_IMG_COLOR_FORMATTYPE;
+
+/** QOMX_ENCODING_MODE
+*  This enum is used to select parallel encoding
+*  or sequential encoding for the thumbnail and
+*  main image
+**/
+typedef enum {
+  OMX_Serial_Encoding,
+  OMX_Parallel_Encoding
+} QOMX_ENCODING_MODE;
+
+
+/**omx_jpeg_ouput_buf_t
+*  Structure describing jpeg output buffer
+*  @handle - Handle to the containing class
+*  @mem_hdl - Handle to camera memory struct
+*  @vaddr - Buffer address
+*  @size - Buffer size
+*  @fd - file descriptor
+**/
+typedef struct {
+  void *handle;
+  void *mem_hdl;
+  int8_t isheap;
+  size_t size; /*input*/
+  void *vaddr;
+  int fd;
+} omx_jpeg_ouput_buf_t;
+
+/** QOMX_MEM_OPS
+* Structure holding the function pointers to
+* buffer memory operations
+* @get_memory - function to allocate buffer memory
+* @psession - reference to jpeg session ptr
+**/
+typedef struct {
+  int (*get_memory)( omx_jpeg_ouput_buf_t *p_out_buf, void *p_jpeg_session);
+  void *psession;
+} QOMX_MEM_OPS;
+
+/** QOMX_JPEG_SPEED_MODE
+* Enum specifying the values for the jpeg
+* speed mode setting
+**/
+typedef enum {
+  QOMX_JPEG_SPEED_MODE_NORMAL,
+  QOMX_JPEG_SPEED_MODE_HIGH
+} QOMX_JPEG_SPEED_MODE;
+
+/** QOMX_JPEG_SPEED
+* Structure used to set the jpeg speed mode
+* parameter
+* @speedMode - jpeg speed mode
+**/
+typedef struct {
+  QOMX_JPEG_SPEED_MODE speedMode;
+} QOMX_JPEG_SPEED;
+
+/** OMX_IMAGE_TYPE
+* Enum specifying the values for the jpeg
+* image type setting
+**/
+typedef enum {
+  QOMX_JPEG_IMAGE_TYPE_JPEG,
+  QOMX_JPEG_IMAGE_TYPE_MPO
+} OMX_IMAGE_TYPE;
+
+/** QOMX_JPEG_IMAGE_SEQUENCE_INFO
+* Struct specifying the parameters for
+* sequence of jpeg images
+* @image_type : jpeg image type
+* @is_primary_image: Flag indicating if the image is a
+    primary image in the sequence
+* @num_of_images: Number of images in the sequence
+* @enable_metadata: Flag indicating if multi image
+    metadata need to be added to the image
+**/
+typedef struct {
+  OMX_IMAGE_TYPE image_type;
+  OMX_U8 is_primary_image;
+  OMX_U32 num_of_images;
+  OMX_U8 enable_metadata;
+} QOMX_JPEG_MULTI_IMAGE_INFO;
+
+#ifdef __cplusplus
+ }
+#endif
+
+#endif
diff --git a/msmcobalt/mm-image-codec/qomx_core/qomx_core.c b/msmcobalt/mm-image-codec/qomx_core/qomx_core.c
new file mode 100644
index 0000000..ef2685d
--- /dev/null
+++ b/msmcobalt/mm-image-codec/qomx_core/qomx_core.c
@@ -0,0 +1,375 @@
+/*Copyright (c) 2012-2014, 2016, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of The Linux Foundation nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.*/
+
+#define LOG_NDEBUG 0
+#define LOG_NIDEBUG 0
+#define LOG_TAG "qomx_image_core"
+
+// System dependencies
+#include <dlfcn.h>
+#include <malloc.h>
+#include <string.h>
+#include <utils/Log.h>
+
+// OpenMAX dependencies
+#include "qomx_core.h"
+
+#define BUFF_SIZE 255
+
+static omx_core_t *g_omxcore;
+static pthread_mutex_t g_omxcore_lock = PTHREAD_MUTEX_INITIALIZER;
+static int g_omxcore_cnt = 0;
+
+//Map the library name with the component name
+static const comp_info_t g_comp_info[] =
+{
+  { "OMX.qcom.image.jpeg.encoder", "libqomx_jpegenc.so" },
+  { "OMX.qcom.image.jpeg.decoder", "libqomx_jpegdec.so" },
+  { "OMX.qcom.image.jpeg.encoder_pipeline", "libqomx_jpegenc_pipe.so" }
+};
+
+static int get_idx_from_handle(OMX_IN OMX_HANDLETYPE *ahComp, int *acompIndex,
+  int *ainstanceIndex);
+
+/*==============================================================================
+* Function : OMX_Init
+* Parameters: None
+* Description: This is the first call that is made to the OMX Core
+* and initializes the OMX IL core
+==============================================================================*/
+OMX_API OMX_ERRORTYPE OMX_APIENTRY OMX_Init()
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+  int i = 0;
+  int comp_cnt = sizeof(g_comp_info)/sizeof(g_comp_info[0]);
+
+  pthread_mutex_lock(&g_omxcore_lock);
+
+  /* check if core is created */
+  if (g_omxcore) {
+    g_omxcore_cnt++;
+    pthread_mutex_unlock(&g_omxcore_lock);
+    return rc;
+  }
+
+  if (comp_cnt > OMX_COMP_MAX_NUM) {
+    ALOGE("%s:%d] cannot exceed max number of components",
+      __func__, __LINE__);
+    pthread_mutex_unlock(&g_omxcore_lock);
+    return OMX_ErrorUndefined;
+  }
+  /* create new global object */
+  g_omxcore = malloc(sizeof(omx_core_t));
+  if (g_omxcore) {
+    memset(g_omxcore, 0x0, sizeof(omx_core_t));
+
+    /* populate the library name and component name */
+    for (i = 0; i < comp_cnt; i++) {
+      g_omxcore->component[i].comp_name = g_comp_info[i].comp_name;
+      g_omxcore->component[i].lib_name = g_comp_info[i].lib_name;
+    }
+    g_omxcore->comp_cnt = comp_cnt;
+    g_omxcore_cnt++;
+  } else {
+    rc = OMX_ErrorInsufficientResources;
+  }
+  pthread_mutex_unlock(&g_omxcore_lock);
+  ALOGI("%s:%d] Complete %d", __func__, __LINE__, comp_cnt);
+  return rc;
+}
+
+/*==============================================================================
+* Function : OMX_Deinit
+* Parameters: None
+* Return Value : OMX_ERRORTYPE
+* Description: Deinit all the OMX components
+==============================================================================*/
+OMX_API OMX_ERRORTYPE OMX_APIENTRY OMX_Deinit()
+{
+  pthread_mutex_lock(&g_omxcore_lock);
+
+  if (g_omxcore_cnt == 1) {
+    if (g_omxcore) {
+      free(g_omxcore);
+      g_omxcore = NULL;
+    }
+  }
+  if (g_omxcore_cnt) {
+    g_omxcore_cnt--;
+  }
+
+  ALOGI("%s:%d] Complete", __func__, __LINE__);
+  pthread_mutex_unlock(&g_omxcore_lock);
+  return OMX_ErrorNone;
+}
+
+/*==============================================================================
+* Function : get_comp_from_list
+* Parameters: componentName
+* Return Value : component_index
+* Description: If the componnt is already present in the list, return the
+* component index. If not return the next index to create the component.
+==============================================================================*/
+static int get_comp_from_list(char *comp_name)
+{
+  int index = -1, i = 0;
+
+  if (NULL == comp_name)
+    return -1;
+
+  for (i = 0; i < g_omxcore->comp_cnt; i++) {
+    if (!strcmp(g_omxcore->component[i].comp_name, comp_name)) {
+      index = i;
+      break;
+    }
+  }
+  return index;
+}
+
+/*==============================================================================
+* Function : get_free_inst_idx
+* Parameters: p_comp
+* Return Value : The next instance index if available
+* Description: Get the next available index for to store the new instance of the
+*            component being created.
+*============================================================================*/
+static int get_free_inst_idx(omx_core_component_t *p_comp)
+{
+  int idx = -1, i = 0;
+
+  for (i = 0; i < OMX_COMP_MAX_INSTANCES; i++) {
+    if (NULL == p_comp->handle[i]) {
+      idx = i;
+      break;
+    }
+  }
+  return idx;
+}
+
+/*==============================================================================
+* Function : OMX_GetHandle
+* Parameters: handle, componentName, appData, callbacks
+* Return Value : OMX_ERRORTYPE
+* Description: Construct and load the requested omx library
+==============================================================================*/
+OMX_API OMX_ERRORTYPE OMX_APIENTRY OMX_GetHandle(
+  OMX_OUT OMX_HANDLETYPE* handle,
+  OMX_IN OMX_STRING componentName,
+  OMX_IN OMX_PTR appData,
+  OMX_IN OMX_CALLBACKTYPE* callBacks)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+  int comp_idx = 0, inst_idx = 0;
+  char libName[BUFF_SIZE] = {0};
+  void *p_obj = NULL;
+  OMX_COMPONENTTYPE *p_comp = NULL;
+  omx_core_component_t *p_core_comp = NULL;
+  OMX_BOOL close_handle = OMX_FALSE;
+
+  if (NULL == handle) {
+    ALOGE("%s:%d] Error invalid input ", __func__, __LINE__);
+    return OMX_ErrorBadParameter;
+  }
+
+  pthread_mutex_lock(&g_omxcore_lock);
+
+  comp_idx = get_comp_from_list(componentName);
+  if (comp_idx < 0) {
+    ALOGE("%s:%d] Cannot find the component", __func__, __LINE__);
+    pthread_mutex_unlock(&g_omxcore_lock);
+    return OMX_ErrorInvalidComponent;
+  }
+  p_core_comp = &g_omxcore->component[comp_idx];
+
+  *handle = NULL;
+
+  //If component already present get the instance index
+  inst_idx = get_free_inst_idx(p_core_comp);
+  if (inst_idx < 0) {
+    ALOGE("%s:%d] Cannot alloc new instance", __func__, __LINE__);
+    rc = OMX_ErrorInvalidComponent;
+    goto error;
+  }
+
+  if (FALSE == p_core_comp->open) {
+    /* load the library */
+    p_core_comp->lib_handle = dlopen(p_core_comp->lib_name, RTLD_NOW);
+    if (NULL == p_core_comp->lib_handle) {
+      ALOGE("%s:%d] Cannot load the library", __func__, __LINE__);
+      rc = OMX_ErrorInvalidComponent;
+      goto error;
+    }
+
+    p_core_comp->open = TRUE;
+    /* Init the component and get component functions */
+    p_core_comp->create_comp_func = dlsym(p_core_comp->lib_handle,
+      "create_component_fns");
+    p_core_comp->get_instance = dlsym(p_core_comp->lib_handle, "getInstance");
+
+    close_handle = OMX_TRUE;
+    if (!p_core_comp->create_comp_func || !p_core_comp->get_instance) {
+      ALOGE("%s:%d] Cannot maps the symbols", __func__, __LINE__);
+      rc = OMX_ErrorInvalidComponent;
+      goto error;
+    }
+  }
+
+  /* Call the function from the address to create the obj */
+  p_obj = (*p_core_comp->get_instance)();
+  ALOGI("%s:%d] get instance pts is %p", __func__, __LINE__, p_obj);
+  if (NULL == p_obj) {
+    ALOGE("%s:%d] Error cannot create object", __func__, __LINE__);
+    rc = OMX_ErrorInvalidComponent;
+    goto error;
+  }
+
+  /* Call the function from the address to get the func ptrs */
+  p_comp = (*p_core_comp->create_comp_func)(p_obj);
+  if (NULL == p_comp) {
+    ALOGE("%s:%d] Error cannot create component", __func__, __LINE__);
+    rc = OMX_ErrorInvalidComponent;
+    goto error;
+  }
+
+  *handle = p_core_comp->handle[inst_idx] = (OMX_HANDLETYPE)p_comp;
+
+  ALOGD("%s:%d] handle = %p Instanceindex = %d,"
+    "comp_idx %d g_ptr %p", __func__, __LINE__,
+    p_core_comp->handle[inst_idx], inst_idx,
+    comp_idx, g_omxcore);
+
+  p_comp->SetCallbacks(p_comp, callBacks, appData);
+  pthread_mutex_unlock(&g_omxcore_lock);
+  ALOGI("%s:%d] Success", __func__, __LINE__);
+  return OMX_ErrorNone;
+
+error:
+
+  if (OMX_TRUE == close_handle) {
+    dlclose(p_core_comp->lib_handle);
+    p_core_comp->lib_handle = NULL;
+  }
+  pthread_mutex_unlock(&g_omxcore_lock);
+  ALOGE("%s:%d] Error %d", __func__, __LINE__, rc);
+  return rc;
+}
+
+/*==============================================================================
+* Function : getIndexFromComponent
+* Parameters: handle,
+* Return Value : Component present - true or false, Instance Index, Component
+* Index
+* Description: Check if the handle is present in the list and get the component
+* index and instance index for the component handle.
+==============================================================================*/
+static int get_idx_from_handle(OMX_IN OMX_HANDLETYPE *ahComp, int *aCompIdx,
+  int *aInstIdx)
+{
+  int i = 0, j = 0;
+  for (i = 0; i < g_omxcore->comp_cnt; i++) {
+    for (j = 0; j < OMX_COMP_MAX_INSTANCES; j++) {
+      if ((OMX_COMPONENTTYPE *)g_omxcore->component[i].handle[j] ==
+        (OMX_COMPONENTTYPE *)ahComp) {
+        ALOGD("%s:%d] comp_idx %d inst_idx %d", __func__, __LINE__, i, j);
+        *aCompIdx = i;
+        *aInstIdx = j;
+        return TRUE;
+      }
+    }
+  }
+  return FALSE;
+}
+
+/*==============================================================================
+* Function : is_comp_active
+* Parameters: p_core_comp
+* Return Value : int
+* Description: Check if the component has any active instances
+==============================================================================*/
+static uint8_t is_comp_active(omx_core_component_t *p_core_comp)
+{
+  uint8_t i = 0;
+  for (i = 0; i < OMX_COMP_MAX_INSTANCES; i++) {
+    if (NULL != p_core_comp->handle[i]) {
+      return TRUE;
+    }
+  }
+  return FALSE;
+}
+
+/*==============================================================================
+* Function : OMX_FreeHandle
+* Parameters: hComp
+* Return Value : OMX_ERRORTYPE
+* Description: Deinit the omx component and remove it from the global list
+==============================================================================*/
+OMX_API OMX_ERRORTYPE OMX_APIENTRY OMX_FreeHandle(
+  OMX_IN OMX_HANDLETYPE hComp)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+  int comp_idx, inst_idx;
+  OMX_COMPONENTTYPE *p_comp = NULL;
+  omx_core_component_t *p_core_comp = NULL;
+
+  ALOGV("%s:%d] ", __func__, __LINE__);
+  if (hComp == NULL) {
+    return OMX_ErrorBadParameter;
+  }
+
+  pthread_mutex_lock(&g_omxcore_lock);
+
+  p_comp = (OMX_COMPONENTTYPE *)hComp;
+  if (FALSE == get_idx_from_handle(hComp, &comp_idx, &inst_idx)) {
+    ALOGE("%s:%d] Error invalid component", __func__, __LINE__);
+    pthread_mutex_unlock(&g_omxcore_lock);
+    return OMX_ErrorInvalidComponent;
+  }
+
+
+  //Deinit the component;
+  rc = p_comp->ComponentDeInit(hComp);
+  if (rc != OMX_ErrorNone) {
+    /* Remove the handle from the comp structure */
+    ALOGE("%s:%d] Error comp deinit failed", __func__, __LINE__);
+    pthread_mutex_unlock(&g_omxcore_lock);
+    return OMX_ErrorInvalidComponent;
+  }
+  p_core_comp = &g_omxcore->component[comp_idx];
+  p_core_comp->handle[inst_idx] = NULL;
+  if (!is_comp_active(p_core_comp)) {
+    rc = dlclose(p_core_comp->lib_handle);
+    p_core_comp->lib_handle = NULL;
+    p_core_comp->get_instance = NULL;
+    p_core_comp->create_comp_func = NULL;
+    p_core_comp->open = FALSE;
+  } else {
+    ALOGI("%s:%d] Error Component is still Active", __func__, __LINE__);
+  }
+  pthread_mutex_unlock(&g_omxcore_lock);
+  ALOGV("%s:%d] Success", __func__, __LINE__);
+  return rc;
+}
diff --git a/msmcobalt/mm-image-codec/qomx_core/qomx_core.h b/msmcobalt/mm-image-codec/qomx_core/qomx_core.h
new file mode 100644
index 0000000..8cac80f
--- /dev/null
+++ b/msmcobalt/mm-image-codec/qomx_core/qomx_core.h
@@ -0,0 +1,97 @@
+/*Copyright (c) 2012, 2016, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of The Linux Foundation nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.*/
+
+#ifndef QOMX_CORE_H
+#define QOMX_CORE_H
+
+// To remove (after PanoNative is updated)
+#include <string.h>
+
+// System dependencies
+#include <pthread.h>
+
+// OpenMAX dependencies
+#include "OMX_Component.h"
+
+#define TRUE 1
+#define FALSE 0
+#define OMX_COMP_MAX_INSTANCES 3
+#define OMX_CORE_MAX_ROLES 1
+#define OMX_COMP_MAX_NUM 3
+#define OMX_SPEC_VERSION 0x00000101
+
+typedef void *(*get_instance_t)(void);
+typedef void *(*create_comp_func_t)(OMX_PTR aobj);
+
+/** comp_info_t: Structure containing the mapping
+*    between the library name and the corresponding .so name
+*    @comp_name: name of the component
+     @lib_name: Name of the .so library
+**/
+typedef struct comp_info_t {
+  char *comp_name;
+  char *lib_name;
+} comp_info_t;
+
+/** omx_core_component_t: OMX Component structure
+*    @handle: array of number of instances of the component
+*    @roles: array of roles played by the component
+*    @comp_info: Component information such as libname,
+*              component name
+*    @open: Is the component active
+*    @lib_handle: Library handle after dlopen
+*    @obj_ptr: Function ptr to get the instance of the component
+*    @comp_func_ptr: Function ptr to map the functions in the
+*     OMX handle to its respective function implementation in
+*     the component
+**/
+typedef struct _omx_core_component_t {
+  OMX_HANDLETYPE *handle[OMX_COMP_MAX_INSTANCES];  //Instance handle
+  char *roles[OMX_CORE_MAX_ROLES];  //Roles played by the component
+  char *name;  //Component Name
+  uint8_t open;  //Is component active
+  void *lib_handle;
+  get_instance_t get_instance;
+  create_comp_func_t create_comp_func;
+  char *comp_name;
+  char *lib_name;
+} omx_core_component_t;
+
+/** omx_core_t: Global structure that contains all the active
+*   components
+*    @component: array of active components
+*    @is_initialized: Flag to check if the OMX core has been
+*    initialized
+*    @core_lock: Lock to syncronize the omx core operations
+**/
+typedef struct _omx_core_t {
+  omx_core_component_t component[OMX_COMP_MAX_NUM];  //Array of pointers to components
+  int comp_cnt;
+  pthread_mutex_t core_lock;
+} omx_core_t;
+
+#endif
diff --git a/msmcobalt/usbcamcore/inc/QCameraMjpegDecode.h b/msmcobalt/usbcamcore/inc/QCameraMjpegDecode.h
new file mode 100755
index 0000000..b04182b
--- /dev/null
+++ b/msmcobalt/usbcamcore/inc/QCameraMjpegDecode.h
@@ -0,0 +1,49 @@
+/* Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef __QCAMERA_MJPEG_DECODE_H
+#define __QCAMERA_MJPEG_DECODE_H
+
+typedef int MJPEGD_ERR;
+#define MJPEGD_NO_ERROR          0
+#define MJPEGD_ERROR            -1
+#define MJPEGD_INSUFFICIENT_MEM -2
+
+MJPEGD_ERR mjpegDecoderInit(void**);
+
+MJPEGD_ERR mjpegDecoderDestroy(void* mjpegd);
+
+MJPEGD_ERR mjpegDecode(
+            void*   mjpegd,
+            char*   mjpegBuffer,
+            int     mjpegBufferSize,
+            char*   outputYptr,
+            char*   outputUVptr,
+            int     outputFormat);
+
+#endif /* __QCAMERA_MJPEG_DECODE_H */
diff --git a/msmcobalt/usbcamcore/inc/QCameraUsbParm.h b/msmcobalt/usbcamcore/inc/QCameraUsbParm.h
new file mode 100755
index 0000000..595bf42
--- /dev/null
+++ b/msmcobalt/usbcamcore/inc/QCameraUsbParm.h
@@ -0,0 +1,178 @@
+/* Copyright (c) 2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef ANDROID_HARDWARE_QCAMERA_USB_PARM_H
+#define ANDROID_HARDWARE_QCAMERA_USB_PARM_H
+
+
+#include <utils/threads.h>
+#include <hardware/camera.h>
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <utils/threads.h>
+#include <cutils/properties.h>
+#include <camera/Camera.h>
+#include <camera/QCameraParameters.h>
+#include <system/window.h>
+#include <system/camera.h>
+#include <hardware/camera.h>
+#include <gralloc_priv.h>
+#include <hardware/power.h>
+
+extern "C" {
+#include <linux/android_pmem.h>
+#include <linux/msm_ion.h>
+#include <camera.h>
+#include <camera_defs_i.h>
+} //extern C
+
+//Error codes
+#define NOT_FOUND       -1
+
+/******************************************************************************
+* Macro definitions
+******************************************************************************/
+/* enum definitions for picture formats */
+static const int PICTURE_FORMAT_JPEG = 1;
+static const int PICTURE_FORMAT_RAW = 2;
+
+/* Default preview width in pixels */
+#define DEFAULT_USBCAM_PRVW_WD  1280//640
+
+/* Default preview height in pixels */
+#define DEFAULT_USBCAM_PRVW_HT  720//480
+
+/* Default picture format */
+#define DEFAULT_USBCAM_PICT_FMT     PICTURE_FORMAT_JPEG
+
+/* Default picture width in pixels */
+#define DEFAULT_USBCAM_PICT_WD  640
+
+/* Default picture height in pixels */
+#define DEFAULT_USBCAM_PICT_HT  480
+
+/* Default picture JPEG quality 0-100 */
+#define DEFAULT_USBCAM_PICT_QLTY  85
+
+/* Default thumbnail width in pixels */
+#define DEFAULT_USBCAM_THUMBNAIL_WD    432
+
+/* Default thumbnail height in pixels */
+#define DEFAULT_USBCAM_THUMBNAIL_HT    288
+
+/* Default thumbnail JPEG quality 0-100 */
+#define DEFAULT_USBCAM_THUMBNAIL_QLTY  85
+
+/* Default preview format */
+#define DEFAULT_USBCAM_PRVW_FMT HAL_PIXEL_FORMAT_YCrCb_420_SP
+
+/* minimum of the default preview fps range in milli-Hz */
+#define MIN_PREV_FPS            5000
+
+/* maximum of the default preview fps range in milli-Hz */
+#define MAX_PREV_FPS            121000
+
+//for histogram stats
+#define HISTOGRAM_STATS_SIZE 257
+#define NUM_HISTOGRAM_BUFFERS 3
+
+namespace android {
+
+/******************************************************************************
+* Structure definitions
+******************************************************************************/
+typedef struct {
+    uint32_t aspect_ratio;
+    uint32_t width;
+    uint32_t height;
+} thumbnail_size_type;
+
+/******************************************************************************
+ * Function: usbCamInitDefaultParameters
+ * Description: This function sets default parameters to camera HAL context
+ *
+ * Input parameters:
+ *   camHal              - camera HAL handle
+ *
+ * Return values:
+ *      0   No error
+ *      -1  Error
+ *
+ * Notes: none
+ *****************************************************************************/
+int usbCamInitDefaultParameters(camera_hardware_t *camHal);
+
+/******************************************************************************
+ * Function: usbCamSetParameters
+ * Description: This function parses the parameter string and stores the
+ *              parameters in the camera HAL handle
+ *
+ * Input parameters:
+ *  camHal              - camera HAL handle
+ *  params              - pointer to parameter string
+ *
+ * Return values:
+ *      0   Success
+ *      -1  Error
+ * Notes: none
+ *****************************************************************************/
+int usbCamSetParameters(camera_hardware_t *camHal, const char *params);
+
+/******************************************************************************
+ * Function: usbCamGetParameters
+ * Description: This function allocates memory for parameter string,
+ *              composes and returns the parameter string
+ *
+ * Input parameters:
+ *   camHal             - camera HAL handle
+ *
+ * Return values:
+ *      Address to the parameter string
+ *
+ * Notes: none
+ *****************************************************************************/
+char* usbCamGetParameters(camera_hardware_t *camHal);
+
+/******************************************************************************
+ * Function: usbCamPutParameters
+ * Description: This function frees the memory allocated for parameter string
+ *
+ * Input parameters:
+ *  camHal              - camera HAL handle
+ *  parms               - Parameter string
+ *
+ * Return values:
+ *      0   Success
+ *      -1  Error
+ * Notes: none
+ *****************************************************************************/
+void usbCamPutParameters(camera_hardware_t *camHal, char *parms);
+
+}; // namespace android
+
+#endif /* ANDROID_HARDWARE_QCAMERA_USB_PARM_H */
diff --git a/msmcobalt/usbcamcore/inc/QCameraUsbPriv.h b/msmcobalt/usbcamcore/inc/QCameraUsbPriv.h
new file mode 100755
index 0000000..9b60c11
--- /dev/null
+++ b/msmcobalt/usbcamcore/inc/QCameraUsbPriv.h
@@ -0,0 +1,202 @@
+/* Copyright (c) 2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef ANDROID_HARDWARE_QCAMERA_USB_PRIV_H
+#define ANDROID_HARDWARE_QCAMERA_USB_PRIV_H
+
+namespace android {
+
+/* File name length in number of characters */
+#define FILENAME_LENGTH     (256)
+
+/* Number of display buffers (in addition to minimum number of undequed buffers */
+#define PRVW_DISP_BUF_CNT   2
+
+/* Number of V4L2 capture  buffers. */
+#define PRVW_CAP_BUF_CNT    4
+
+/* Maximum buffer size for JPEG output in number of bytes */
+#define MAX_JPEG_BUFFER_SIZE    (1024 * 1024)
+
+/* Preview loop commands */
+#define USB_CAM_PREVIEW_EXIT    (0x100)
+#define USB_CAM_PREVIEW_PAUSE   (0x101)
+#define USB_CAM_PREVIEW_TAKEPIC (0x200)
+
+/******************************************************************************
+ * Macro function to input validate device handle
+ *****************************************************************************/
+#define VALIDATE_DEVICE_HDL(camHal, device, ret_err_code)     {\
+    if(device && device->priv){\
+        camHal = (camera_hardware_t *)device->priv;\
+    }else{\
+        ALOGE("%s: Null device or device->priv", __func__);\
+        return ret_err_code;\
+    }\
+}\
+
+/******************************************************************************
+ * Macro function to check return status of a function, log and exit the thread
+ *****************************************************************************/
+#define ERROR_CHECK_EXIT_THREAD(rc, string)    {\
+    if(rc < 0) {\
+        ALOGE("%s: Error %s", __func__, string);\
+        return (void *)-1;\
+    }\
+}
+
+/******************************************************************************
+ * Macro function to check return status of a function, log and exit
+ *****************************************************************************/
+#define ERROR_CHECK_EXIT(rc, string)    {\
+    if(rc < 0) {\
+        ALOGE("%s: Error %s", __func__, string);\
+        return -1;\
+    }\
+}
+
+/******************************************************************************
+* Macro function to Print the parameter string 1000 characters at a time
+******************************************************************************/
+#define PRINT_PARAM_STR(parms)    {\
+        char temp[1001] = {0};\
+        int n=0;\
+        while(1) {\
+            strlcpy(temp,parms+n,1000);\
+            ALOGD("parms = %s", temp);\
+            if (strlen(temp) < 1000) break;\
+            n += 1000;\
+        }\
+    }\
+
+/******************************************************************************
+ * Macro function to open camera
+ *****************************************************************************/
+#define USB_CAM_OPEN(camHal)    {\
+        camHal->fd = open(camHal->dev_name, O_RDWR | O_NONBLOCK, 0);\
+        if(!camHal->fd)\
+            ALOGE("%s: Error in open", __func__);\
+        else\
+            ALOGD("%s: Successfully opened", __func__);\
+        }\
+
+/******************************************************************************
+ * Macro function to close camera
+ *****************************************************************************/
+#define USB_CAM_CLOSE(camHal) {\
+        int rc;\
+        if(camHal->fd){\
+            rc = close(camHal->fd);\
+            if(0 > rc){\
+                ALOGE("%s: close failed ", __func__);\
+            }\
+            else{\
+                camHal->fd = 0;\
+                ALOGD("%s: close successful", __func__);\
+            }\
+        }\
+    }\
+
+struct bufObj {
+    void    *data;
+    int     len;
+};
+
+typedef struct {
+    camera_device                       hw_dev;
+    Mutex                               lock;
+    int                                 previewEnabledFlag;
+    int                                 prvwStoppedForPicture;
+    int                                 msgEnabledFlag;
+    volatile int                        prvwCmdPending;
+    volatile int                        prvwCmd;
+    pthread_t                           previewThread;
+    pthread_t                           takePictureThread;
+
+    camera_notify_callback              notify_cb;
+    camera_data_callback                data_cb;
+    camera_data_timestamp_callback      data_cb_timestamp;
+    camera_request_memory               get_memory;
+    void*                               cb_ctxt;
+
+    /* capture related members */
+    /* prevFormat is pixel format of preview buffers that are exported */
+    int                                 prevFormat;
+    int                                 prevFps;
+    int                                 prevWidth;
+    int                                 prevHeight;
+    /* captureFormat is internal setting for USB camera buffers */
+    int                                 captureFormat;
+    char                                dev_name[FILENAME_LENGTH];
+    int                                 fd;
+    unsigned int                        n_buffers;
+    struct v4l2_buffer                  curCaptureBuf;
+    struct bufObj                       *buffers;
+
+    /* Display related members */
+    preview_stream_ops*                 window;
+    QCameraHalMemory_t                  previewMem;
+    /* dispFormat is preview display format.Same as preview buffer format*/
+    int                                 dispFormat;
+    int                                 dispWidth;
+    int                                 dispHeight;
+
+    /* MJPEG decoder related members */
+    /* MJPEG decoder object */
+    void*                               mjpegd;
+
+    /* JPEG picture and thumbnail related members */
+    int                                 pictFormat;
+    int                                 pictWidth;
+    int                                 pictHeight;
+    int                                 pictJpegQlty;
+    int                                 thumbnailWidth;
+    int                                 thumbnailHeight;
+    int                                 thumbnailJpegQlty;
+    QCameraHalMemory_t                  pictMem;
+    int                                 takePictInProgress;
+    int                                 jpegEncInProgress;
+    pthread_mutex_t                     jpegEncMutex;
+    pthread_cond_t                      jpegEncCond;
+
+    /* */
+    QCameraParameters                   qCamParams;
+    String8                             prevSizeValues;
+    String8                             pictSizeValues;
+    String8                             thumbnailSizeValues;
+    String8                             vidSizeValues;
+    String8                             pictFormatValues;
+    String8                             prevFormatValues;
+    String8                             prevFpsRangesValues;
+
+} camera_hardware_t;
+
+
+}; // namespace android
+
+#endif /* ANDROID_HARDWARE_QCAMERA_USB_PRIV_H */
diff --git a/msmcobalt/usbcamcore/inc/QualcommUsbCamera.h b/msmcobalt/usbcamcore/inc/QualcommUsbCamera.h
new file mode 100755
index 0000000..e389c76
--- /dev/null
+++ b/msmcobalt/usbcamcore/inc/QualcommUsbCamera.h
@@ -0,0 +1,243 @@
+/* Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef ANDROID_HARDWARE_QUALCOMM_CAMERA_USBCAM_H
+#define ANDROID_HARDWARE_QUALCOMM_CAMERA_USBCAM_H
+
+extern "C" {
+#include <sys/time.h>
+}
+
+#include "QCameraHWI.h"
+
+extern "C" {
+
+/*#include <hardware/camera.h>*/
+
+    int usbcam_get_number_of_cameras();
+    int usbcam_get_camera_info(int camera_id, struct camera_info *info);
+
+    int usbcam_camera_device_open(const struct hw_module_t* module, const char* id,
+          struct hw_device_t** device);
+
+    hw_device_t * usbcam_open_camera_device(int cameraId);
+
+    int usbcam_close_camera_device( hw_device_t *);
+
+namespace android {
+
+    /** Set the ANativeWindow to which preview frames are sent */
+    int usbcam_set_preview_window(struct camera_device *,
+          struct preview_stream_ops *window);
+
+    /** Set the notification and data callbacks */
+    void usbcam_set_CallBacks(struct camera_device *,
+          camera_notify_callback notify_cb,
+          camera_data_callback data_cb,
+          camera_data_timestamp_callback data_cb_timestamp,
+          camera_request_memory get_memory,
+          void *user);
+
+    /**
+     * The following three functions all take a msg_type, which is a bitmask of
+     * the messages defined in include/ui/Camera.h
+     */
+
+    /**
+     * Enable a message, or set of messages.
+     */
+    void usbcam_enable_msg_type(struct camera_device *, int32_t msg_type);
+
+    /**
+     * Disable a message, or a set of messages.
+     *
+     * Once received a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), camera
+     * HAL should not rely on its client to call releaseRecordingFrame() to
+     * release video recording frames sent out by the cameral HAL before and
+     * after the disableMsgType(CAMERA_MSG_VIDEO_FRAME) call. Camera HAL
+     * clients must not modify/access any video recording frame after calling
+     * disableMsgType(CAMERA_MSG_VIDEO_FRAME).
+     */
+    void usbcam_disable_msg_type(struct camera_device *, int32_t msg_type);
+
+    /**
+     * Query whether a message, or a set of messages, is enabled.  Note that
+     * this is operates as an AND, if any of the messages queried are off, this
+     * will return false.
+     */
+    int usbcam_msg_type_enabled(struct camera_device *, int32_t msg_type);
+
+    /**
+     * Start preview mode.
+     */
+    int usbcam_start_preview(struct camera_device *);
+
+    /**
+     * Stop a previously started preview.
+     */
+    void usbcam_stop_preview(struct camera_device *);
+
+    /**
+     * Returns true if preview is enabled.
+     */
+    int usbcam_preview_enabled(struct camera_device *);
+
+    /**
+     * Request the camera HAL to store meta data or real YUV data in the video
+     * buffers sent out via CAMERA_MSG_VIDEO_FRAME for a recording session. If
+     * it is not called, the default camera HAL behavior is to store real YUV
+     * data in the video buffers.
+     *
+     * This method should be called before startRecording() in order to be
+     * effective.
+     *
+     * If meta data is stored in the video buffers, it is up to the receiver of
+     * the video buffers to interpret the contents and to find the actual frame
+     * data with the help of the meta data in the buffer. How this is done is
+     * outside of the scope of this method.
+     *
+     * Some camera HALs may not support storing meta data in the video buffers,
+     * but all camera HALs should support storing real YUV data in the video
+     * buffers. If the camera HAL does not support storing the meta data in the
+     * video buffers when it is requested to do do, INVALID_OPERATION must be
+     * returned. It is very useful for the camera HAL to pass meta data rather
+     * than the actual frame data directly to the video encoder, since the
+     * amount of the uncompressed frame data can be very large if video size is
+     * large.
+     *
+     * @param enable if true to instruct the camera HAL to store
+     *        meta data in the video buffers; false to instruct
+     *        the camera HAL to store real YUV data in the video
+     *        buffers.
+     *
+     * @return OK on success.
+     */
+    int usbcam_store_meta_data_in_buffers(struct camera_device *, int enable);
+
+    /**
+     * Start record mode. When a record image is available, a
+     * CAMERA_MSG_VIDEO_FRAME message is sent with the corresponding
+     * frame. Every record frame must be released by a camera HAL client via
+     * releaseRecordingFrame() before the client calls
+     * disableMsgType(CAMERA_MSG_VIDEO_FRAME). After the client calls
+     * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's
+     * responsibility to manage the life-cycle of the video recording frames,
+     * and the client must not modify/access any video recording frames.
+     */
+    int usbcam_start_recording(struct camera_device *);
+
+    /**
+     * Stop a previously started recording.
+     */
+    void usbcam_stop_recording(struct camera_device *);
+
+    /**
+     * Returns true if recording is enabled.
+     */
+    int usbcam_recording_enabled(struct camera_device *);
+
+    /**
+     * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
+     *
+     * It is camera HAL client's responsibility to release video recording
+     * frames sent out by the camera HAL before the camera HAL receives a call
+     * to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives the call to
+     * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's
+     * responsibility to manage the life-cycle of the video recording frames.
+     */
+    void usbcam_release_recording_frame(struct camera_device *,
+                  const void *opaque);
+
+    /**
+     * Start auto focus, the notification callback routine is called with
+     * CAMERA_MSG_FOCUS once when focusing is complete. autoFocus() will be
+     * called again if another auto focus is needed.
+     */
+    int usbcam_auto_focus(struct camera_device *);
+
+    /**
+     * Cancels auto-focus function. If the auto-focus is still in progress,
+     * this function will cancel it. Whether the auto-focus is in progress or
+     * not, this function will return the focus position to the default.  If
+     * the camera does not support auto-focus, this is a no-op.
+     */
+    int usbcam_cancel_auto_focus(struct camera_device *);
+
+    /**
+     * Take a picture.
+     */
+    int usbcam_take_picture(struct camera_device *);
+
+    /**
+     * Cancel a picture that was started with takePicture. Calling this method
+     * when no picture is being taken is a no-op.
+     */
+    int usbcam_cancel_picture(struct camera_device *);
+
+    /**
+     * Set the camera parameters. This returns BAD_VALUE if any parameter is
+     * invalid or not supported.
+     */
+    int usbcam_set_parameters(struct camera_device *, const char *parms);
+
+    //status_t setParameters(const QCameraParameters& params);
+    /** Retrieve the camera parameters.  The buffer returned by the camera HAL
+        must be returned back to it with put_parameters, if put_parameters
+        is not NULL.
+     */
+    char* usbcam_get_parameters(struct camera_device *);
+
+    /** The camera HAL uses its own memory to pass us the parameters when we
+        call get_parameters.  Use this function to return the memory back to
+        the camera HAL, if put_parameters is not NULL.  If put_parameters
+        is NULL, then you have to use free() to release the memory.
+    */
+    void usbcam_put_parameters(struct camera_device *, char *);
+
+    /**
+     * Send command to camera driver.
+     */
+    int usbcam_send_command(struct camera_device *,
+              int32_t cmd, int32_t arg1, int32_t arg2);
+
+    /**
+     * Release the hardware resources owned by this object.  Note that this is
+     * *not* done in the destructor.
+     */
+    void usbcam_release(struct camera_device *);
+
+    /**
+     * Dump state of the camera hardware
+     */
+    int usbcam_dump(struct camera_device *, int fd);
+
+}; // namespace android
+
+} //extern "C"
+
+#endif
diff --git a/msmcobalt/usbcamcore/src/QCameraMjpegDecode.cpp b/msmcobalt/usbcamcore/src/QCameraMjpegDecode.cpp
new file mode 100755
index 0000000..dcf0aa2
--- /dev/null
+++ b/msmcobalt/usbcamcore/src/QCameraMjpegDecode.cpp
@@ -0,0 +1,701 @@
+/* Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+//#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+#define LOG_TAG "QCameraMjpegDecode"
+#include <utils/Log.h>
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <pthread.h>
+
+extern "C" {
+#include "jpeg_buffer.h"
+#include "jpeg_common.h"
+#include "jpegd.h"
+}
+
+#include "QCameraMjpegDecode.h"
+
+/* TBDJ: Can be removed */
+#define MIN(a,b)  (((a) < (b)) ? (a) : (b))
+
+// Abstract the return type of the function to be run as a thread
+#define OS_THREAD_FUNC_RET_T            void *
+
+// Abstract the argument type to the thread function
+#define OS_THREAD_FUNC_ARG_T            void *
+
+// Helpful constants for return values in the thread functions
+#define OS_THREAD_FUNC_RET_SUCCEEDED    (OS_THREAD_FUNC_RET_T)0
+#define OS_THREAD_FUNC_RET_FAILED       (OS_THREAD_FUNC_RET_T)1
+
+// Abstract the function modifier placed in the beginning of the thread
+// declaration (empty for Linux)
+#define OS_THREAD_FUNC_MODIFIER
+
+#define os_mutex_init(a) pthread_mutex_init(a, NULL)
+#define os_cond_init(a)  pthread_cond_init(a, NULL)
+#define os_mutex_lock    pthread_mutex_lock
+#define os_mutex_unlock  pthread_mutex_unlock
+#define os_cond_wait     pthread_cond_wait
+#define os_cond_signal   pthread_cond_signal
+
+const char event_to_string[4][14] =
+{
+    "EVENT_DONE",
+    "EVENT_WARNING",
+    "EVENT_ERROR",
+};
+
+typedef struct
+{
+    uint32_t   width;
+    uint32_t   height;
+    uint32_t   format;
+    uint32_t   preference;
+    uint32_t   abort_time;
+    uint16_t   back_to_back_count;
+    /* TBDJ: Is this required */
+    int32_t    rotation;
+    /* TBDJ: Is this required */
+    jpeg_rectangle_t region;
+    /* TBDJ: Is this required */
+    jpegd_scale_type_t scale_factor;
+    uint32_t   hw_rotation;
+
+    char*       inputMjpegBuffer;
+    int         inputMjpegBufferSize;
+    char*       outputYptr;
+    char*       outputUVptr;
+
+} test_args_t;
+
+typedef struct
+{
+    int                   tid;
+    pthread_t             thread;
+    jpegd_obj_t           decoder;
+    uint8_t               decoding;
+    uint8_t               decode_success;
+    pthread_mutex_t       mutex;
+    pthread_cond_t        cond;
+    test_args_t           *p_args;
+    jpegd_output_buf_t    *p_whole_output_buf;
+
+} thread_ctrl_blk_t;
+
+OS_THREAD_FUNC_RET_T OS_THREAD_FUNC_MODIFIER decoder_test(OS_THREAD_FUNC_ARG_T p_thread_args);
+void decoder_event_handler(void        *p_user_data,
+                           jpeg_event_t event,
+                           void        *p_arg);
+int decoder_output_handler(void               *p_user_data,
+                           jpegd_output_buf_t *p_output_buffer,
+                           uint32_t            first_row_id,
+                           uint8_t             is_last_buffer);
+uint32_t decoder_input_req_handler(void           *p_user_data,
+                                   jpeg_buffer_t   buffer,
+                                   uint32_t        start_offset,
+                                   uint32_t        length);
+static void* insertHuffmanTable(void *p, int size);
+
+static int mjpegd_timer_start(timespec *p_timer);
+static int mjpegd_timer_get_elapsed(timespec *p_timer, int *elapsed_in_ms, uint8_t reset_start);
+static int mjpegd_cond_timedwait(pthread_cond_t *p_cond, pthread_mutex_t *p_mutex, uint32_t ms);
+
+// Global variables
+/* TBDJ: can be removed */
+thread_ctrl_blk_t *thread_ctrl_blks = NULL;
+
+/*
+ * This function initializes the mjpeg decoder and returns the object
+ */
+MJPEGD_ERR mjpegDecoderInit(void** mjpegd_obj)
+{
+    test_args_t* mjpegd;
+
+    ALOGD("%s: E", __func__);
+
+    mjpegd = (test_args_t *)malloc(sizeof(test_args_t));
+    if(!mjpegd)
+        return MJPEGD_INSUFFICIENT_MEM;
+
+    memset(mjpegd, 0, sizeof(test_args_t));
+
+    /* Defaults */
+    /* Due to current limitation, s/w decoder is selected always */
+    mjpegd->preference          = JPEG_DECODER_PREF_HW_ACCELERATED_PREFERRED;
+    mjpegd->back_to_back_count  = 1;
+    mjpegd->rotation            = 0;
+    mjpegd->hw_rotation         = 0;
+    mjpegd->scale_factor        = (jpegd_scale_type_t)1;
+
+    /* TBDJ: can be removed */
+    mjpegd->width                 = 640;
+    mjpegd->height                = 480;
+    mjpegd->abort_time            = 0;
+
+    *mjpegd_obj = (void *)mjpegd;
+
+    ALOGD("%s: X", __func__);
+    return  MJPEGD_NO_ERROR;
+}
+
+MJPEGD_ERR mjpegDecode(
+            void*   mjpegd_obj,
+            char*   inputMjpegBuffer,
+            int     inputMjpegBufferSize,
+            char*   outputYptr,
+            char*   outputUVptr,
+            int     outputFormat)
+{
+    int rc, c, i;
+    test_args_t* mjpegd;
+    test_args_t  test_args;
+
+    ALOGD("%s: E", __func__);
+    /* store input arguments in the context */
+    mjpegd = (test_args_t*) mjpegd_obj;
+    mjpegd->inputMjpegBuffer        = inputMjpegBuffer;
+    mjpegd->inputMjpegBufferSize    = inputMjpegBufferSize;
+    mjpegd->outputYptr              = outputYptr;
+    mjpegd->outputUVptr             = outputUVptr;
+    mjpegd->format                  = outputFormat;
+
+    /* TBDJ: can be removed */
+    memcpy(&test_args, mjpegd, sizeof(test_args_t));
+
+    // check the formats
+    if (((test_args.format == YCRCBLP_H1V2) || (test_args.format == YCBCRLP_H1V2) ||
+      (test_args.format == YCRCBLP_H1V1) || (test_args.format == YCBCRLP_H1V1)) &&
+      !(test_args.preference == JPEG_DECODER_PREF_HW_ACCELERATED_ONLY)) {
+        ALOGE("%s:These formats are not supported by SW format %d", __func__, test_args.format);
+        return 1;
+    }
+
+    // Create thread control blocks
+    thread_ctrl_blks = (thread_ctrl_blk_t *)malloc( sizeof(thread_ctrl_blk_t));
+    if (!thread_ctrl_blks)
+    {
+        ALOGE("%s: decoder_test failed: insufficient memory in creating thread control blocks", __func__);
+        return 1;
+    }
+    memset(thread_ctrl_blks, 0, sizeof(thread_ctrl_blk_t));
+    // Initialize the blocks and kick off the threads
+        thread_ctrl_blks[i].tid = i;
+        thread_ctrl_blks[i].p_args = &test_args;
+        os_mutex_init(&thread_ctrl_blks[i].mutex);
+        os_cond_init(&thread_ctrl_blks[i].cond);
+
+    rc = (int)decoder_test(&thread_ctrl_blks[i]);
+
+    if (!rc)
+        ALOGD("%s: decoder_test finished successfully ", __func__);
+    else
+        ALOGE("%s: decoder_test failed",__func__);
+
+    ALOGD("%s: X rc: %d", __func__, rc);
+
+    return rc;
+}
+
+OS_THREAD_FUNC_RET_T OS_THREAD_FUNC_MODIFIER decoder_test(OS_THREAD_FUNC_ARG_T arg)
+{
+    int rc, i;
+    jpegd_obj_t         decoder;
+    jpegd_src_t         source;
+    jpegd_dst_t         dest;
+    jpegd_cfg_t         config;
+    jpeg_hdr_t          header;
+    jpegd_output_buf_t  p_output_buffers;
+    uint32_t            output_buffers_count = 1; // currently only 1 buffer a time is supported
+    uint8_t use_pmem = true;
+    timespec os_timer;
+    thread_ctrl_blk_t *p_thread_arg = (thread_ctrl_blk_t *)arg;
+    test_args_t *p_args = p_thread_arg->p_args;
+    uint32_t            output_width;
+    uint32_t            output_height;
+    uint32_t total_time = 0;
+
+    ALOGD("%s: E", __func__);
+
+    // Determine whether pmem should be used (useful for pc environment testing where
+    // pmem is not available)
+    if ((jpegd_preference_t)p_args->preference == JPEG_DECODER_PREF_SOFTWARE_PREFERRED ||
+        (jpegd_preference_t)p_args->preference == JPEG_DECODER_PREF_SOFTWARE_ONLY) {
+        use_pmem = false;
+    }
+
+    if (((jpegd_preference_t)p_args->preference !=
+      JPEG_DECODER_PREF_HW_ACCELERATED_ONLY) &&
+      ((jpegd_preference_t)p_args->scale_factor > 0)) {
+        ALOGI("%s: Setting scale factor to 1x", __func__);
+    }
+
+    ALOGD("%s: before jpegd_init p_thread_arg: %p", __func__, p_thread_arg);
+
+    // Initialize decoder
+    rc = jpegd_init(&decoder,
+                    &decoder_event_handler,
+                    &decoder_output_handler,
+                    p_thread_arg);
+
+    if (JPEG_FAILED(rc)) {
+        ALOGE("%s: decoder_test: jpegd_init failed", __func__);
+        goto fail;
+    }
+    p_thread_arg->decoder = decoder;
+
+    // Set source information
+    source.p_input_req_handler = &decoder_input_req_handler;
+    source.total_length        = p_args->inputMjpegBufferSize & 0xffffffff;
+
+    rc = jpeg_buffer_init(&source.buffers[0]);
+    if (JPEG_SUCCEEDED(rc)) {
+        /* TBDJ: why buffer [1] */
+        rc = jpeg_buffer_init(&source.buffers[1]);
+    }
+    if (JPEG_SUCCEEDED(rc)) {
+#if 1
+        rc = jpeg_buffer_allocate(source.buffers[0], 0xA000, use_pmem);
+#else
+        rc = jpeg_buffer_use_external_buffer(source.buffers[0],
+                                             (uint8_t *)p_args->inputMjpegBuffer,
+                                             p_args->inputMjpegBufferSize,
+                                             0);
+#endif
+        ALOGD("%s: source.buffers[0]:%p compressed buffer ptr = %p", __func__,
+              source.buffers[0], p_args->inputMjpegBuffer);
+    }
+    if (JPEG_SUCCEEDED(rc)) {
+#if 1
+        rc = jpeg_buffer_allocate(source.buffers[1], 0xA000, use_pmem);
+#else
+         rc = jpeg_buffer_use_external_buffer(source.buffers[1],
+                                             (uint8_t *)p_args->inputMjpegBuffer,
+                                             p_args->inputMjpegBufferSize,
+                                             0);
+#endif
+        ALOGD("%s: source.buffers[1]:%p compressed buffer ptr  = %p", __func__,
+              source.buffers[1], p_args->inputMjpegBuffer);
+   }
+    if (JPEG_FAILED(rc)) {
+        jpeg_buffer_destroy(&source.buffers[0]);
+        jpeg_buffer_destroy(&source.buffers[1]);
+        goto fail;
+    }
+
+   ALOGI("%s: *** Starting back-to-back decoding of %d frame(s)***\n",
+                 __func__, p_args->back_to_back_count);
+
+	 // Loop to perform n back-to-back decoding (to the same output file)
+    for(i = 0; i < p_args->back_to_back_count; i++) {
+        if(mjpegd_timer_start(&os_timer) < 0) {
+            ALOGE("%s: failed to get start time", __func__);
+        }
+
+        /* TBDJ: Every frame? */
+        ALOGD("%s: before jpegd_set_source source.p_arg:%p", __func__, source.p_arg);
+        rc = jpegd_set_source(decoder, &source);
+        if (JPEG_FAILED(rc))
+        {
+            ALOGE("%s: jpegd_set_source failed", __func__);
+            goto fail;
+        }
+
+        rc = jpegd_read_header(decoder, &header);
+        if (JPEG_FAILED(rc))
+        {
+            ALOGE("%s: jpegd_read_header failed", __func__);
+            goto fail;
+        }
+        p_args->width = header.main.width;
+        p_args->height = header.main.height;
+        ALOGD("%s: main dimension: (%dx%d) subsampling: (%d)", __func__,
+                header.main.width, header.main.height, (int)header.main.subsampling);
+
+        // main image decoding:
+        // Set destination information
+        dest.width = (p_args->width) ? (p_args->width) : header.main.width;
+        dest.height = (p_args->height) ? (p_args->height) : header.main.height;
+        dest.output_format = (jpeg_color_format_t) p_args->format;
+        dest.region = p_args->region;
+
+        // if region is defined, re-assign the output width/height
+        output_width  = dest.width;
+        output_height = dest.height;
+
+        if (p_args->region.right || p_args->region.bottom)
+        {
+            if (0 == p_args->rotation || 180 == p_args->rotation)
+            {
+                output_width  = MIN((dest.width),
+                        (uint32_t)(dest.region.right  - dest.region.left + 1));
+                output_height = MIN((dest.height),
+                        (uint32_t)(dest.region.bottom - dest.region.top  + 1));
+            }
+            // Swap output width/height for 90/270 rotation cases
+            else if (90 == p_args->rotation || 270 == p_args->rotation)
+            {
+                output_height  = MIN((dest.height),
+                        (uint32_t)(dest.region.right  - dest.region.left + 1));
+                output_width   = MIN((dest.width),
+                        (uint32_t)(dest.region.bottom - dest.region.top  + 1));
+            }
+            // Unsupported rotation cases
+            else
+            {
+                goto fail;
+            }
+        }
+
+        if (dest.output_format == YCRCBLP_H2V2 || dest.output_format == YCBCRLP_H2V2 ||
+            dest.output_format == YCRCBLP_H2V1 || dest.output_format == YCBCRLP_H2V1 ||
+            dest.output_format == YCRCBLP_H1V2 || dest.output_format == YCBCRLP_H1V2 ||
+            dest.output_format == YCRCBLP_H1V1 || dest.output_format == YCBCRLP_H1V1) {
+            jpeg_buffer_init(&p_output_buffers.data.yuv.luma_buf);
+            jpeg_buffer_init(&p_output_buffers.data.yuv.chroma_buf);
+        } else {
+            jpeg_buffer_init(&p_output_buffers.data.rgb.rgb_buf);
+
+        }
+
+        {
+            // Assign 0 to tile width and height
+            // to indicate that no tiling is requested.
+            p_output_buffers.tile_width  = 0;
+            p_output_buffers.tile_height = 0;
+        }
+        p_output_buffers.is_in_q = 0;
+
+        switch (dest.output_format)
+        {
+        case YCRCBLP_H2V2:
+        case YCBCRLP_H2V2:
+//        case YCRCBLP_H2V1:
+//        case YCBCRLP_H2V1:
+//        case YCRCBLP_H1V2:
+//        case YCBCRLP_H1V2:
+//        case YCRCBLP_H1V1:
+//        case YCBCRLP_H1V1:
+            jpeg_buffer_use_external_buffer(
+               p_output_buffers.data.yuv.luma_buf,
+               (uint8_t*)p_args->outputYptr,
+               p_args->width * p_args->height * SQUARE(p_args->scale_factor),
+               0);
+            jpeg_buffer_use_external_buffer(
+                p_output_buffers.data.yuv.chroma_buf,
+                (uint8_t*)p_args->outputUVptr,
+                p_args->width * p_args->height / 2 * SQUARE(p_args->scale_factor),
+                0);
+            break;
+
+        default:
+            ALOGE("%s: decoder_test: unsupported output format", __func__);
+            goto fail;
+        }
+
+        // Set up configuration
+        memset(&config, 0, sizeof(jpegd_cfg_t));
+        config.preference = (jpegd_preference_t) p_args->preference;
+        config.decode_from = JPEGD_DECODE_FROM_AUTO;
+        config.rotation = p_args->rotation;
+        config.scale_factor = p_args->scale_factor;
+        config.hw_rotation = p_args->hw_rotation;
+        dest.back_to_back_count = p_args->back_to_back_count;
+
+        // Start decoding
+        p_thread_arg->decoding = true;
+
+        rc = jpegd_start(decoder, &config, &dest, &p_output_buffers, output_buffers_count);
+        dest.back_to_back_count--;
+
+        if(JPEG_FAILED(rc)) {
+            ALOGE("%s: decoder_test: jpegd_start failed (rc=%d)\n",
+                    __func__, rc);
+            goto fail;
+        }
+
+        ALOGD("%s: decoder_test: jpegd_start succeeded", __func__);
+
+        // Do abort
+        if (p_args->abort_time) {
+            os_mutex_lock(&p_thread_arg->mutex);
+            while (p_thread_arg->decoding)
+            {
+                rc = mjpegd_cond_timedwait(&p_thread_arg->cond, &p_thread_arg->mutex, p_args->abort_time);
+                if (rc == JPEGERR_ETIMEDOUT)
+                {
+                    // Do abort
+                    os_mutex_unlock(&p_thread_arg->mutex);
+                    rc = jpegd_abort(decoder);
+                    if (rc)
+                    {
+                        ALOGE("%s: decoder_test: jpegd_abort failed: %d", __func__, rc);
+                        goto fail;
+                    }
+                    break;
+                }
+            }
+            if (p_thread_arg->decoding)
+                os_mutex_unlock(&p_thread_arg->mutex);
+        } else {
+            // Wait until decoding is done or stopped due to error
+            os_mutex_lock(&p_thread_arg->mutex);
+            while (p_thread_arg->decoding)
+            {
+                os_cond_wait(&p_thread_arg->cond, &p_thread_arg->mutex);
+            }
+            os_mutex_unlock(&p_thread_arg->mutex);
+        }
+
+        int diff;
+        // Display the time elapsed
+        if (mjpegd_timer_get_elapsed(&os_timer, &diff, 0) < 0) {
+            ALOGE("%s: decoder_test: failed to get elapsed time", __func__);
+        } else {
+            if(p_args->abort_time) {
+                if(p_thread_arg->decoding) {
+                    ALOGI("%s: decoder_test: decoding aborted successfully after %d ms", __func__, diff);
+                    goto buffer_clean_up;
+                }
+                else
+                {
+                    ALOGI("%s: decoder_test: decoding stopped before abort is issued. "
+                                    "decode time: %d ms", __func__, diff);
+                }
+            }
+            else {
+                if(p_thread_arg->decode_success) {
+                    total_time += diff;
+                    ALOGI("%s: decode time: %d ms (%d frame(s), total=%dms, avg=%dms/frame)",
+                            __func__, diff, i+1, total_time, total_time/(i+1));
+                }
+                else
+                {
+                    fprintf(stderr, "decoder_test: decode failed\n");
+                }
+            }
+        }
+    }
+
+    if(p_thread_arg->decode_success) {
+        ALOGD("%s: Frame(s) = %d, Total Time = %dms, Avg. decode time = %dms/frame)\n",
+                 __func__, p_args->back_to_back_count, total_time, total_time/p_args->back_to_back_count);
+    }
+
+buffer_clean_up:
+    // Clean up decoder and allocate buffers
+    jpeg_buffer_destroy(&source.buffers[0]);
+    jpeg_buffer_destroy(&source.buffers[1]);
+    switch (dest.output_format)
+    {
+    case YCRCBLP_H2V2:
+    case YCBCRLP_H2V2:
+    case YCRCBLP_H2V1:
+    case YCBCRLP_H2V1:
+    case YCRCBLP_H1V2:
+    case YCBCRLP_H1V2:
+    case YCRCBLP_H1V1:
+    case YCBCRLP_H1V1:
+        jpeg_buffer_destroy(&p_output_buffers.data.yuv.luma_buf);
+        jpeg_buffer_destroy(&p_output_buffers.data.yuv.chroma_buf);
+        break;
+    default:
+        break;
+    }
+    jpegd_destroy(&decoder);
+
+    if (!p_thread_arg->decode_success)
+    {
+        goto fail;
+    }
+
+    ALOGD("%s: X", __func__);
+    return OS_THREAD_FUNC_RET_SUCCEEDED;
+fail:
+
+    ALOGD("%s: X", __func__);
+    return OS_THREAD_FUNC_RET_FAILED;
+}
+
+void decoder_event_handler(void        *p_user_data,
+                           jpeg_event_t event,
+                           void        *p_arg)
+{
+    thread_ctrl_blk_t *p_thread_arg = (thread_ctrl_blk_t *)p_user_data;
+
+    ALOGD("%s: E", __func__);
+
+    ALOGD("%s: Event: %s\n", __func__, event_to_string[event]);
+    if (event == JPEG_EVENT_DONE)
+    {
+        p_thread_arg->decode_success = true;
+        ALOGD("%s: decode_success: %d\n", __func__, p_thread_arg->decode_success);
+    }
+    // If it is not a warning event, decoder has stopped; Signal
+    // main thread to clean up
+    if (event != JPEG_EVENT_WARNING)
+    {
+        os_mutex_lock(&p_thread_arg->mutex);
+        p_thread_arg->decoding = false;
+        os_cond_signal(&p_thread_arg->cond);
+        os_mutex_unlock(&p_thread_arg->mutex);
+    }
+    ALOGD("%s: X", __func__);
+
+}
+
+// consumes the output buffer.
+/*TBDJ: Can be removed. Is this related to tiling */
+int decoder_output_handler(void *p_user_data,
+                           jpegd_output_buf_t *p_output_buffer,
+                           uint32_t first_row_id,
+                           uint8_t is_last_buffer)
+{
+    uint8_t* whole_output_buf_ptr, *tiling_buf_ptr;
+
+    ALOGD("%s: E", __func__);
+
+    thread_ctrl_blk_t *p_thread_arg = (thread_ctrl_blk_t *)p_user_data;
+
+    jpeg_buffer_get_addr(p_thread_arg->p_whole_output_buf->data.rgb.rgb_buf, &whole_output_buf_ptr);
+    jpeg_buffer_get_addr(p_output_buffer->data.rgb.rgb_buf, &tiling_buf_ptr);
+
+    if (p_output_buffer->tile_height != 1)
+        return JPEGERR_EUNSUPPORTED;
+
+    // testing purpose only
+    // This is to simulate that the user needs to bail out when error happens
+    // in the middle of decoding
+    //if (first_row_id == 162)
+     //   return JPEGERR_EFAILED;
+
+    // do not enqueue any buffer if it reaches the last buffer
+    if (!is_last_buffer)
+    {
+        jpegd_enqueue_output_buf(p_thread_arg->decoder, p_output_buffer, 1);
+    }
+    ALOGD("%s: X", __func__);
+
+    return JPEGERR_SUCCESS;
+}
+
+//      p_reader->p_input_req_handler(p_reader->decoder,
+//                                    p_reader->p_input_buf,
+//                                    p_reader->next_byte_offset,
+//                                    MAX_BYTES_TO_FETCH);
+
+uint32_t decoder_input_req_handler(void           *p_user_data,
+                                   jpeg_buffer_t   buffer,
+                                   uint32_t        start_offset,
+                                   uint32_t        length)
+{
+    uint32_t buf_size;
+    uint8_t *buf_ptr;
+    int bytes_to_read, bytes_read, rc;
+    thread_ctrl_blk_t *p_thread_arg = (thread_ctrl_blk_t *)p_user_data;
+    thread_ctrl_blk_t *thread_ctrl_blk = (thread_ctrl_blk_t *)p_user_data;
+    test_args_t*    mjpegd = (test_args_t*) thread_ctrl_blk->p_args;
+
+    ALOGD("%s: E", __func__);
+
+    jpeg_buffer_get_max_size(buffer, &buf_size);
+    jpeg_buffer_get_addr(buffer, &buf_ptr);
+    bytes_to_read = (length < buf_size) ? length : buf_size;
+    bytes_read = 0;
+
+    ALOGD("%s: buf_ptr = %p, start_offset = %d, length = %d buf_size = %d bytes_to_read = %d", __func__, buf_ptr, start_offset, length, buf_size, bytes_to_read);
+    if (bytes_to_read)
+    {
+        /* TBDJ: Should avoid this Mem copy */
+#if 1
+        memcpy(buf_ptr, (char *)mjpegd->inputMjpegBuffer + start_offset, bytes_to_read);
+#else
+        if(JPEGERR_SUCCESS != jpeg_buffer_set_start_offset(buffer, start_offset))
+            ALOGE("%s: jpeg_buffer_set_start_offset failed", __func__);
+#endif
+        bytes_read = bytes_to_read;
+    }
+
+    ALOGD("%s: X", __func__);
+    return bytes_read;
+}
+
+static int mjpegd_timer_start(timespec *p_timer)
+{
+    if (!p_timer)
+        return JPEGERR_ENULLPTR;
+
+    if (clock_gettime(CLOCK_REALTIME, p_timer))
+        return JPEGERR_EFAILED;
+
+    return JPEGERR_SUCCESS;
+}
+
+static int mjpegd_timer_get_elapsed(timespec *p_timer, int *elapsed_in_ms, uint8_t reset_start)
+{
+    timespec now;
+    long diff;
+    int rc = mjpegd_timer_start(&now);
+
+    if (JPEG_FAILED(rc))
+        return rc;
+
+    diff = (long)(now.tv_sec - p_timer->tv_sec) * 1000;
+    diff += (long)(now.tv_nsec - p_timer->tv_nsec) / 1000000;
+    *elapsed_in_ms = (int)diff;
+
+    if (reset_start)
+        *p_timer = now;
+
+    return JPEGERR_SUCCESS;
+}
+
+int mjpegd_cond_timedwait(pthread_cond_t *p_cond, pthread_mutex_t *p_mutex, uint32_t ms)
+{
+    struct timespec ts;
+    int rc = clock_gettime(CLOCK_REALTIME, &ts);
+    if (rc < 0) return rc;
+
+    if (ms >= 1000) {
+       ts.tv_sec += (ms/1000);
+       ts.tv_nsec += ((ms%1000) * 1000000);
+    } else {
+        ts.tv_nsec += (ms * 1000000);
+    }
+
+    rc = pthread_cond_timedwait(p_cond, p_mutex, &ts);
+    if (rc == ETIMEDOUT)
+    {
+        rc = JPEGERR_ETIMEDOUT;
+    }
+    return rc;
+}
+
diff --git a/msmcobalt/usbcamcore/src/QCameraUsbParm.cpp b/msmcobalt/usbcamcore/src/QCameraUsbParm.cpp
new file mode 100755
index 0000000..fda6e9d
--- /dev/null
+++ b/msmcobalt/usbcamcore/src/QCameraUsbParm.cpp
@@ -0,0 +1,683 @@
+/* Copyright (c) 2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+//#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+#define LOG_TAG "QCameraUsbParm"
+#include <utils/Log.h>
+
+#include <utils/Errors.h>
+#include <utils/threads.h>
+#include <utils/String16.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <cutils/properties.h>
+#include <math.h>
+#if HAVE_ANDROID_OS
+#include <linux/android_pmem.h>
+#endif
+#include <linux/ioctl.h>
+#include <camera/QCameraParameters.h>
+#include <media/mediarecorder.h>
+#include <gralloc_priv.h>
+
+#include "linux/msm_mdp.h"
+#include <linux/fb.h>
+#include <limits.h>
+
+
+extern "C" {
+#include <fcntl.h>
+#include <time.h>
+#include <pthread.h>
+#include <stdio.h>
+#include <string.h>
+#include <unistd.h>
+#include <termios.h>
+#include <assert.h>
+#include <stdlib.h>
+#include <ctype.h>
+#include <signal.h>
+#include <errno.h>
+#include <sys/mman.h>
+#include <sys/system_properties.h>
+#include <sys/time.h>
+#include <stdlib.h>
+#include <linux/msm_ion.h>
+#include <camera.h>
+#include <cam_fifo.h>
+#include <jpege.h>
+
+} // extern "C"
+
+#include "QCameraHWI.h"
+#include "QualcommUsbCamera.h"
+#include "QCameraUsbPriv.h"
+#include "QCameraUsbParm.h"
+
+namespace android {
+
+/********************************************************************/
+static const str_map preview_formats[] = {
+    {QCameraParameters::PIXEL_FORMAT_YUV420SP, HAL_PIXEL_FORMAT_YCrCb_420_SP},
+};
+
+static const preview_format_info_t preview_format_info_list[] = {
+    {HAL_PIXEL_FORMAT_YV12, CAMERA_YUV_420_YV12, CAMERA_PAD_TO_WORD, 3}
+};
+
+static struct camera_size_type previewSizes[] = {
+    { 1920, 1088}, //1080p
+    { 1280, 720}, // 720P,
+    { 640, 480}, // VGA
+    { 512, 384},
+    { 480, 320},
+    { 320, 240}, // QVGA
+};
+
+// All fps ranges which can be supported. This list will be filtered according
+// to the min and max fps supported by hardware
+// this list must be sorted first by max_fps and then min_fps
+// fps values are multiplied by 1000
+static android::FPSRange prevFpsRanges[] = {
+    android::FPSRange(5000, 121000),
+};
+
+/* TBR: Is frame rate mode mandatory */
+static const str_map frame_rate_modes[] = {
+    {QCameraParameters::KEY_QC_PREVIEW_FRAME_RATE_AUTO_MODE, FPS_MODE_AUTO},
+    {QCameraParameters::KEY_QC_PREVIEW_FRAME_RATE_FIXED_MODE, FPS_MODE_FIXED}
+};
+
+static const str_map picture_formats[] = {
+    {QCameraParameters::PIXEL_FORMAT_JPEG, PICTURE_FORMAT_JPEG},
+    //{QCameraParameters::PIXEL_FORMAT_RAW, PICTURE_FORMAT_RAW}
+};
+
+static camera_size_type picture_sizes[] = {
+    { 1920, 1088}, //HD1080
+    { 1280, 720}, //HD720
+    { 640, 480}, // VGA
+    { 320, 240}, // QVGA
+};
+
+/* aspect ratio removed */
+static camera_size_type thumbnail_sizes[] = {
+    { 512, 288 }, //1.777778
+    { 480, 288 }, //1.666667
+    { 256, 154 }, //1.66233
+    { 432, 288 }, //1.5
+    { 512, 384 }, //1.333333
+    { 352, 288 }, //1.222222
+    { 320, 240 }, //1.33333
+    { 176, 144 }, //1.222222
+};
+
+static const str_map recording_Hints[] = {
+    {"false", FALSE},
+    {"true",  TRUE}
+};
+
+/* Static functions list */
+static String8 create_sizes_str(const camera_size_type *sizes, int len);
+static String8 create_values_str(const str_map *values, int len);
+static String8 create_fps_str(const android:: FPSRange* fps, int len);
+static String8 create_values_range_str(int min, int max);
+static int usbCamSetPrvwSize(   camera_hardware_t           *camHal,
+                                const QCameraParameters&    params);
+static int usbCamSetPictSize(   camera_hardware_t           *camHal,
+                                const QCameraParameters&    params);
+static int usbCamSetThumbnailSize(  camera_hardware_t           *camHal,
+                                    const QCameraParameters&    params);
+static int usbCamSetJpegQlty(   camera_hardware_t           *camHal,
+                                const QCameraParameters&    params);
+
+/******************************************************************************
+ * Function: usbCamInitDefaultParameters
+ * Description: This function sets default parameters to camera HAL context
+ *
+ * Input parameters:
+ *   camHal              - camera HAL handle
+ *
+ * Return values:
+ *      None
+ *
+ * Notes: none
+ *****************************************************************************/
+int usbCamInitDefaultParameters(camera_hardware_t *camHal)
+{
+    ALOGD("%s: E", __func__);
+    int rc = 0;
+    char tempStr[FILENAME_LENGTH];
+
+    /* Default initializations */
+    camHal->prevFormat          = DEFAULT_USBCAM_PRVW_FMT;
+    camHal->prevWidth           = DEFAULT_USBCAM_PRVW_WD;
+    camHal->prevHeight          = DEFAULT_USBCAM_PRVW_HT;
+    camHal->dispFormat          = camHal->prevFormat;
+    camHal->dispWidth           = camHal->prevWidth;
+    camHal->dispHeight          = camHal->prevHeight;
+    camHal->pictFormat          = DEFAULT_USBCAM_PICT_FMT;
+    camHal->pictWidth           = DEFAULT_USBCAM_PICT_WD;
+    camHal->pictHeight          = DEFAULT_USBCAM_PICT_HT;
+    camHal->pictJpegQlty        = DEFAULT_USBCAM_PICT_QLTY;
+    camHal->thumbnailWidth      = DEFAULT_USBCAM_THUMBNAIL_WD;
+    camHal->thumbnailHeight     = DEFAULT_USBCAM_THUMBNAIL_HT;
+    camHal->thumbnailJpegQlty   = DEFAULT_USBCAM_THUMBNAIL_QLTY;
+    camHal->previewEnabledFlag  = 0;
+    camHal->prvwStoppedForPicture = 0;
+    camHal->prvwCmdPending      = 0;
+    camHal->takePictInProgress  = 0;
+
+    //Set picture size values
+    camHal->pictSizeValues = create_sizes_str(
+        picture_sizes, sizeof(picture_sizes) / sizeof(camera_size_type));
+    camHal->qCamParams.set(QCameraParameters::KEY_SUPPORTED_PICTURE_SIZES,
+        camHal->pictSizeValues.string());
+    camHal->qCamParams.setPictureSize(camHal->pictWidth, camHal->pictHeight);
+
+    //Set picture format
+    camHal->pictFormatValues = create_values_str(
+        picture_formats, sizeof(picture_formats) / sizeof(str_map));
+    camHal->qCamParams.set(QCameraParameters::KEY_SUPPORTED_PICTURE_FORMATS,
+                    camHal->pictFormatValues.string());
+    if(PICTURE_FORMAT_JPEG == camHal->pictFormat)
+        camHal->qCamParams.setPictureFormat(QCameraParameters::PIXEL_FORMAT_JPEG);
+
+    //Set picture quality
+    sprintf(tempStr, "%d", camHal->pictJpegQlty);
+    camHal->qCamParams.set(QCameraParameters::KEY_JPEG_QUALITY, tempStr);
+
+    //Set Thumbnail size
+    camHal->thumbnailSizeValues = create_sizes_str(
+        thumbnail_sizes, sizeof(thumbnail_sizes) /sizeof(camera_size_type));
+    camHal->qCamParams.set(QCameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES,
+                    camHal->thumbnailSizeValues.string());
+    sprintf(tempStr, "%d", camHal->thumbnailWidth);
+    camHal->qCamParams.set(QCameraParameters::KEY_JPEG_THUMBNAIL_WIDTH,
+                                                tempStr);
+    sprintf(tempStr, "%d", camHal->thumbnailHeight);
+    camHal->qCamParams.set(QCameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT,
+                                                tempStr);
+
+    //Set Thumbnail quality
+    sprintf(tempStr, "%d", camHal->thumbnailJpegQlty);
+    camHal->qCamParams.set(QCameraParameters::KEY_JPEG_THUMBNAIL_QUALITY,
+                                                tempStr);
+
+    //Set Preview Format
+    camHal->prevFormatValues = create_values_str(
+        preview_formats, sizeof(preview_formats) / sizeof(str_map));
+    camHal->qCamParams.set(QCameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS,
+        camHal->prevFormatValues.string());
+    if(HAL_PIXEL_FORMAT_YCrCb_420_SP == camHal->prevFormat)
+        camHal->qCamParams.setPreviewFormat(QCameraParameters::PIXEL_FORMAT_YUV420SP);
+
+    //Set Preview size
+    camHal->prevSizeValues = create_sizes_str(
+        previewSizes,  sizeof(previewSizes) / sizeof(camera_size_type));
+    camHal->qCamParams.set(QCameraParameters::KEY_SUPPORTED_PREVIEW_SIZES,
+                    camHal->prevSizeValues.string());
+    camHal->qCamParams.setPreviewSize(camHal->prevWidth, camHal->prevHeight);
+
+    //Set Preivew fps range
+    camHal->prevFpsRangesValues = create_fps_str(
+        prevFpsRanges, sizeof(prevFpsRanges) / sizeof(android::FPSRange));
+
+    camHal->qCamParams.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,
+                        camHal->prevFpsRangesValues);
+    camHal->qCamParams.setPreviewFpsRange(MIN_PREV_FPS, MAX_PREV_FPS);
+
+    ALOGD("%s: X", __func__);
+
+    return rc;
+} /* usbCamInitDefaultParameters */
+
+/******************************************************************************
+ * Function: usbCamSetParameters
+ * Description: This function parses the parameter string and stores the
+ *              parameters in the camera HAL handle
+ *
+ * Input parameters:
+ *  camHal              - camera HAL handle
+ *  params              - pointer to parameter string
+ *
+ * Return values:
+ *      0   Success
+ *      -1  Error
+ * Notes: none
+ *****************************************************************************/
+int usbCamSetParameters(camera_hardware_t *camHal, const char *params)
+{
+    int             rc      = 0;
+    String8         str     = String8(params);
+    QCameraParameters qParam;
+
+    ALOGD("%s: E", __func__);
+
+    if(params)
+        PRINT_PARAM_STR(params);
+
+    qParam.unflatten(str);
+
+    if(usbCamSetPrvwSize(camHal, qParam))
+        rc = -1;
+    if(usbCamSetPictSize(camHal, qParam))
+        rc = -1;
+    if(usbCamSetThumbnailSize(camHal, qParam))
+        rc = -1;
+    if(usbCamSetJpegQlty(camHal, qParam))
+        rc = -1;
+
+    ALOGD("%s: X", __func__);
+    return rc;
+} /* usbCamSetParameters */
+
+/******************************************************************************
+ * Function: usbCamGetParameters
+ * Description: This function allocates memory for parameter string,
+ *              composes and returns the parameter string
+ *
+ * Input parameters:
+ *   camHal             - camera HAL handle
+ *
+ * Return values:
+ *      Address to the parameter string
+ *
+ * Notes: none
+ *****************************************************************************/
+char* usbCamGetParameters(camera_hardware_t *camHal)
+{
+    ALOGD("%s: E", __func__);
+    char *parms = NULL;
+    char* rc = NULL;
+    String8 str;
+
+    QCameraParameters qParam = camHal->qCamParams;
+    //qParam.dump();
+    str = qParam.flatten( );
+    rc = (char *)malloc(sizeof(char)*(str.length()+1));
+    if(rc != NULL){
+        memset(rc, 0, sizeof(char)*(str.length()+1));
+        strncpy(rc, str.string(), str.length());
+    rc[str.length()] = 0;
+    parms = rc;
+    }
+
+    PRINT_PARAM_STR(parms);
+
+    ALOGD("%s: X", __func__);
+    return (parms);
+} /* usbCamGetParameters */
+
+/******************************************************************************
+ * Function: usbCamPutParameters
+ * Description: This function frees the memory allocated for parameter string
+ *
+ * Input parameters:
+ *  camHal              - camera HAL handle
+ *  parms               - Parameter string
+ *
+ * Return values:
+ *      None
+ *
+ * Notes: none
+ *****************************************************************************/
+void usbCamPutParameters(camera_hardware_t *camHal, char *parms)
+{
+    ALOGD("%s: E", __func__);
+    if(parms)
+        free(parms);
+    parms = NULL;
+    ALOGD("%s: X", __func__);
+} /* usbCamPutParameters */
+
+/******************************************************************************
+ * Function: create_sizes_str
+ * Description: This function loops through /dev/video entries and probes with
+ *              UVCIOC query. If the device responds to the query, then it is
+ *              detected as UVC webcam
+ * Input parameters:
+ *   devname             - String pointer. The function return dev entry
+ *                          name in this string
+ * Return values:
+ *      0   Success
+ *      -1  Error
+ * Notes: none
+ *****************************************************************************/
+static String8 create_sizes_str(const camera_size_type *sizes, int len) {
+    String8 str;
+    char buffer[32];
+
+    if (len > 0) {
+        snprintf(buffer, sizeof(buffer), "%dx%d", sizes[0].width, sizes[0].height);
+        str.append(buffer);
+    }
+    for (int i = 1; i < len; i++) {
+        snprintf(buffer, sizeof(buffer), ",%dx%d", sizes[i].width, sizes[i].height);
+        str.append(buffer);
+    }
+    return str;
+}
+
+/******************************************************************************
+ * Function: create_values_str
+ * Description: This function loops through /dev/video entries and probes with
+ *              UVCIOC query. If the device responds to the query, then it is
+ *              detected as UVC webcam
+ * Input parameters:
+ *   devname             - String pointer. The function return dev entry
+ *                          name in this string
+ * Return values:
+ *      0   Success
+ *      -1  Error
+ * Notes: none
+ *****************************************************************************/
+static String8 create_values_str(const str_map *values, int len) {
+    String8 str;
+
+    if (len > 0) {
+        str.append(values[0].desc);
+    }
+    for (int i = 1; i < len; i++) {
+        str.append(",");
+        str.append(values[i].desc);
+    }
+    return str;
+}
+
+/******************************************************************************
+ * Function: create_fps_str
+ * Description: This function loops through /dev/video entries and probes with
+ *              UVCIOC query. If the device responds to the query, then it is
+ *              detected as UVC webcam
+ * Input parameters:
+ *   devname             - String pointer. The function return dev entry
+ *                          name in this string
+ * Return values:
+ *      0   Success
+ *      -1  Error
+ * Notes: none
+ *****************************************************************************/
+static String8 create_fps_str(const android:: FPSRange* fps, int len) {
+    String8 str;
+    char buffer[32];
+
+    if (len > 0) {
+        snprintf(buffer, sizeof(buffer), "(%d,%d)", fps[0].minFPS, fps[0].maxFPS);
+        str.append(buffer);
+    }
+    for (int i = 1; i < len; i++) {
+        snprintf(buffer, sizeof(buffer), ",(%d,%d)", fps[i].minFPS, fps[i].maxFPS);
+        str.append(buffer);
+    }
+    return str;
+}
+
+/******************************************************************************
+ * Function: create_values_range_str
+ * Description: This function loops through /dev/video entries and probes with
+ *              UVCIOC query. If the device responds to the query, then it is
+ *              detected as UVC webcam
+ * Input parameters:
+ *   devname             - String pointer. The function return dev entry
+ *                          name in this string
+ * Return values:
+ *      0   Success
+ *      -1  Error
+ * Notes: none
+ *****************************************************************************/
+static String8 create_values_range_str(int min, int max){
+    String8 str;
+    char buffer[32];
+
+    if(min <= max){
+        snprintf(buffer, sizeof(buffer), "%d", min);
+        str.append(buffer);
+
+        for (int i = min + 1; i <= max; i++) {
+            snprintf(buffer, sizeof(buffer), ",%d", i);
+            str.append(buffer);
+        }
+    }
+    return str;
+}
+
+/******************************************************************************
+ * Function: usbCamSetPrvwSize
+ * Description: This function parses preview width and height from the input
+ *              parameters and stores into the context
+ *
+ * Input parameters:
+ *  camHal              - camera HAL handle
+ *  params              - QCameraParameters reference
+ *
+ * Return values:
+ *      0   If parameters are valid
+ *      -1  If parameters are invalid
+ *
+ * Notes: none
+ *****************************************************************************/
+static int usbCamSetPrvwSize(   camera_hardware_t           *camHal,
+                                const QCameraParameters&    params)
+{
+    int rc = 0, width, height, i, numPrvwSizes, validSize;
+    ALOGD("%s: E", __func__);
+
+    params.getPreviewSize(&width, &height);
+    ALOGI("%s: Requested preview size %d x %d", __func__, width, height);
+
+    // Validate the preview size
+    numPrvwSizes = sizeof(previewSizes) / sizeof(camera_size_type);
+    for (i = 0, validSize = 0; i <  numPrvwSizes; i++) {
+        if (width ==  previewSizes[i].width
+           && height ==  previewSizes[i].height) {
+            validSize = 1;
+
+            camHal->qCamParams.setPreviewSize(width, height);
+            ALOGD("%s: setPreviewSize:  width: %d   height: %d",
+                __func__, width, height);
+
+            camHal->prevWidth   = width;
+            camHal->prevHeight  = height;
+            camHal->dispWidth   = width;
+            camHal->dispHeight  = height;
+
+            /* TBD: restrict pictures size and video to preview size */
+        }
+    }
+    if(!validSize)
+        ALOGE("%s: Invalid preview size %dx%d requested", __func__,
+            width, height);
+
+    rc = (validSize == 0)? -1:0;
+    ALOGD("%s: X", __func__);
+
+    return rc;
+} /* usbCamSetPrvwSize */
+
+/******************************************************************************
+ * Function: usbCamSetPictSize
+ * Description: This function parses picture width and height from the input
+ *              parameters and stores into the context
+ *
+ * Input parameters:
+ *  camHal              - camera HAL handle
+ *  params              - QCameraParameters reference
+ *
+ * Return values:
+ *      0   If parameters are valid
+ *      -1  If parameters are invalid
+ *
+ * Notes: none
+ *****************************************************************************/
+static int usbCamSetPictSize(   camera_hardware_t           *camHal,
+                                const QCameraParameters&    params)
+{
+    int rc = 0, width, height, i, numPictSizes, validSize;
+    ALOGD("%s: E", __func__);
+
+    /* parse for picture width and height */
+    params.getPictureSize(&width, &height);
+    ALOGI("%s: Requested picture size %d x %d", __func__, width, height);
+
+    // Validate the picture size
+    numPictSizes = sizeof(picture_sizes) / sizeof(camera_size_type);
+    for (i = 0, validSize = 0; i <  numPictSizes; i++) {
+        if (width ==  picture_sizes[i].width
+           && height ==  picture_sizes[i].height) {
+            validSize = 1;
+
+            camHal->qCamParams.setPictureSize(width, height);
+            ALOGD("%s: setPictureSize:  width: %d   height: %d",
+                __func__, width, height);
+
+            /* TBD: If new pictSize is different from old size, restart prvw */
+            camHal->pictWidth   = width;
+            camHal->pictHeight  = height;
+        }
+    }
+    if(!validSize)
+        ALOGE("%s: Invalid picture size %dx%d requested", __func__,
+            width, height);
+    rc = (validSize == 0)? -1:0;
+    ALOGD("%s: X", __func__);
+
+    return rc;
+} /* usbCamSetPictSize */
+
+/******************************************************************************
+ * Function: usbCamSetThumbnailSize
+ * Description: This function parses picture width and height from the input
+ *              parameters and stores into the context
+ *
+ * Input parameters:
+ *  camHal              - camera HAL handle
+ *  params              - QCameraParameters reference
+ *
+ * Return values:
+ *      0   If parameters are valid
+ *      -1  If parameters are invalid
+ *
+ * Notes: none
+ *****************************************************************************/
+static int usbCamSetThumbnailSize(  camera_hardware_t           *camHal,
+                                    const QCameraParameters&    params)
+{
+    int rc = 0, width, height, i, numThumbnailSizes, validSize;
+    char tempStr[FILENAME_LENGTH];
+    ALOGD("%s: E", __func__);
+
+    /* parse for thumbnail width and height */
+    width = params.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+    height = params.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+    ALOGI("%s: Requested thumbnail size %d x %d", __func__, width, height);
+
+    // Validate the thumbnail size
+    numThumbnailSizes = sizeof(thumbnail_sizes) / sizeof(camera_size_type);
+    for (i = 0, validSize = 0; i <  numThumbnailSizes; i++) {
+        if (width ==  thumbnail_sizes[i].width
+           && height ==  thumbnail_sizes[i].height) {
+            validSize = 1;
+
+            camHal->thumbnailWidth   = width;
+            camHal->thumbnailHeight  = height;
+            sprintf(tempStr, "%d", camHal->thumbnailWidth);
+            camHal->qCamParams.set(QCameraParameters::KEY_JPEG_THUMBNAIL_WIDTH,
+                                                        width);
+            sprintf(tempStr, "%d", camHal->thumbnailHeight);
+            camHal->qCamParams.set(QCameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT,
+                                                        height);
+
+        }
+    }
+    if(!validSize)
+        ALOGE("%s: Invalid picture size %dx%d requested", __func__,
+            width, height);
+    rc = (validSize == 0)? -1:0;
+    ALOGD("%s: X", __func__);
+
+    return rc;
+} /* usbCamSetThumbnailSize */
+
+/******************************************************************************
+ * Function: usbCamSetJpegQlty
+ * Description: This function parses picture and thumbnail JPEG quality and
+ *              validates before storing in the context
+ *
+ * Input parameters:
+ *  camHal              - camera HAL handle
+ *  params              - QCameraParameters reference
+ *
+ * Return values:
+ *      0   If parameters are valid
+ *      -1  If parameters are invalid
+ *
+ * Notes: none
+ *****************************************************************************/
+static int usbCamSetJpegQlty(   camera_hardware_t           *camHal,
+                                const QCameraParameters&    params)
+{
+    int rc = 0, quality = 0;
+    char tempStr[FILENAME_LENGTH];
+    ALOGD("%s: E", __func__);
+
+    /**/
+    quality = params.getInt(QCameraParameters::KEY_JPEG_QUALITY);
+    ALOGI("%s: Requested picture qlty %d", __func__, quality);
+
+    if (quality >= 0 && quality <= 100) {
+        camHal->pictJpegQlty = quality;
+        sprintf(tempStr, "%d", camHal->pictJpegQlty);
+        camHal->qCamParams.set(QCameraParameters::KEY_JPEG_QUALITY, quality);
+    } else {
+        ALOGE("Invalid jpeg quality=%d", quality);
+        rc = -1;
+    }
+
+    quality = params.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
+    ALOGI("%s: Requested thumbnail qlty %d", __func__, quality);
+
+    if (quality >= 0 && quality <= 100) {
+        camHal->thumbnailJpegQlty = quality;
+        sprintf(tempStr, "%d", camHal->thumbnailJpegQlty);
+        camHal->qCamParams.set(QCameraParameters::KEY_JPEG_THUMBNAIL_QUALITY,
+                                    tempStr);
+    } else {
+        ALOGE("Invalid jpeg thumbnail quality=%d", quality);
+        rc = -1;
+    }
+
+    ALOGD("%s: X rc:%d", __func__, rc);
+
+    return rc;
+}
+
+}; /*namespace android */
diff --git a/msmcobalt/usbcamcore/src/QualcommUsbCamera.cpp b/msmcobalt/usbcamcore/src/QualcommUsbCamera.cpp
new file mode 100755
index 0000000..b274626
--- /dev/null
+++ b/msmcobalt/usbcamcore/src/QualcommUsbCamera.cpp
@@ -0,0 +1,2963 @@
+/* Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+ /*#error uncomment this for compiler test!*/
+//#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+#define LOG_TAG "QualcommUsbCamera"
+
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+#include <sys/stat.h>
+#include <unistd.h>
+#include <sys/prctl.h>
+#include <sys/resource.h>
+#include <pthread.h>
+#include <linux/uvcvideo.h>
+
+#include "QCameraHAL.h"
+#include "QualcommUsbCamera.h"
+#include "QCameraUsbPriv.h"
+#include "QCameraMjpegDecode.h"
+#include "QCameraUsbParm.h"
+#include <gralloc_priv.h>
+#include <genlock.h>
+
+extern "C" {
+#include <sys/time.h>
+}
+
+camera_device_ops_t usbcam_camera_ops = {
+  set_preview_window:         android::usbcam_set_preview_window,
+  set_callbacks:              android::usbcam_set_CallBacks,
+  enable_msg_type:            android::usbcam_enable_msg_type,
+  disable_msg_type:           android::usbcam_disable_msg_type,
+  msg_type_enabled:           android::usbcam_msg_type_enabled,
+
+  start_preview:              android::usbcam_start_preview,
+  stop_preview:               android::usbcam_stop_preview,
+  preview_enabled:            android::usbcam_preview_enabled,
+  store_meta_data_in_buffers: android::usbcam_store_meta_data_in_buffers,
+
+  start_recording:            android::usbcam_start_recording,
+  stop_recording:             android::usbcam_stop_recording,
+  recording_enabled:          android::usbcam_recording_enabled,
+  release_recording_frame:    android::usbcam_release_recording_frame,
+
+  auto_focus:                 android::usbcam_auto_focus,
+  cancel_auto_focus:          android::usbcam_cancel_auto_focus,
+
+  take_picture:               android::usbcam_take_picture,
+  cancel_picture:             android::usbcam_cancel_picture,
+
+  set_parameters:             android::usbcam_set_parameters,
+  get_parameters:             android::usbcam_get_parameters,
+  put_parameters:             android::usbcam_put_parameters,
+  send_command:               android::usbcam_send_command,
+
+  release:                    android::usbcam_release,
+  dump:                       android::usbcam_dump,
+};
+
+#define CAPTURE                 1
+#define DISPLAY                 1
+#define CALL_BACK               1
+#define MEMSET                  0
+#define FREAD_JPEG_PICTURE      0
+#define JPEG_ON_USB_CAMERA      1
+#define FILE_DUMP_CAMERA        0
+#define FILE_DUMP_B4_DISP       0
+
+namespace android {
+
+static int initUsbCamera(               camera_hardware_t *camHal,
+                                        int width, int height,
+                                        int pixelFormat);
+static int startUsbCamCapture(          camera_hardware_t *camHal);
+static int stopUsbCamCapture(           camera_hardware_t *camHal);
+static int initV4L2mmap(                camera_hardware_t *camHal);
+static int unInitV4L2mmap(              camera_hardware_t *camHal);
+static int launch_preview_thread(       camera_hardware_t *camHal);
+static int launchTakePictureThread(     camera_hardware_t *camHal);
+static int initDisplayBuffers(          camera_hardware_t *camHal);
+static int deInitDisplayBuffers(        camera_hardware_t *camHal);
+static int stopPreviewInternal(         camera_hardware_t *camHal);
+static int get_buf_from_cam(            camera_hardware_t *camHal);
+static int put_buf_to_cam(              camera_hardware_t *camHal);
+static int prvwThreadTakePictureInternal(camera_hardware_t *camHal);
+static int get_buf_from_display( camera_hardware_t *camHal, int *buffer_id);
+static int put_buf_to_display(   camera_hardware_t *camHal, int buffer_id);
+static int convert_data_frm_cam_to_disp(camera_hardware_t *camHal, int buffer_id);
+static void * previewloop(void *);
+static void * takePictureThread(void *);
+static int convert_YUYV_to_420_NV12(char *in_buf, char *out_buf, int wd, int ht);
+static int get_uvc_device(char *devname);
+static int getPreviewCaptureFmt(camera_hardware_t *camHal);
+static int allocate_ion_memory(QCameraHalMemInfo_t *mem_info, int ion_type);
+static int deallocate_ion_memory(QCameraHalMemInfo_t *mem_info);
+static int ioctlLoop(int fd, int ioctlCmd, void *args);
+static int readFromFile(char* fileName, char* buffer, int bufferSize);
+static int fileDump(const char* fileName, char* data, int length, int* frm_cnt);
+static int encodeJpeg(                  camera_hardware_t *camHal);
+void jpegEncodeCb   (jpeg_job_status_t status,
+                       uint8_t thumbnailDroppedFlag,
+                       uint32_t client_hdl,
+                       uint32_t jobId,
+                       uint8_t* out_data,
+                       uint32_t data_size,
+                       void *userData);
+
+/* HAL function implementation goes here*/
+
+/**
+ * The functions need to be provided by the camera HAL.
+ *
+ * If getNumberOfCameras() returns N, the valid cameraId for getCameraInfo()
+ * and openCameraHardware() is 0 to N-1.
+ */
+
+extern "C" int usbcam_get_number_of_cameras()
+{
+    /* TBR: This is hardcoded currently to 1 USB camera */
+    int numCameras = 1;
+    ALOGI("%s: E", __func__);
+    ALOGI("%s: X", __func__);
+
+    return numCameras;
+}
+
+extern "C" int usbcam_get_camera_info(int camera_id, struct camera_info *info)
+{
+    int rc = -1;
+    ALOGI("%s: E", __func__);
+
+    /* TBR: This info is hardcoded currently irrespective of camera_id */
+    if(info) {
+        struct CameraInfo camInfo;
+        memset(&camInfo, -1, sizeof (struct CameraInfo));
+
+        info->facing = CAMERA_FACING_FRONT;//CAMERA_FACING_BACK;
+        info->orientation = 0;
+        rc = 0;
+    }
+    ALOGI("%s: X", __func__);
+    return rc;
+}
+
+/* HAL should return NULL handle if it fails to open camera hardware. */
+extern "C" int  usbcam_camera_device_open(
+  const struct hw_module_t* module, const char* id,
+          struct hw_device_t** hw_device)
+{
+    int rc = -1;
+    camera_device       *device = NULL;
+    camera_hardware_t   *camHal;
+    char                *dev_name;
+
+    ALOGI("%s: E", __func__);
+
+    /* initialize return handle value to NULL */
+    *hw_device = NULL;
+
+    camHal = new camera_hardware_t();
+    if(!camHal) {
+
+            ALOGE("%s:  end in no mem", __func__);
+            return -1;
+    }
+
+    rc = usbCamInitDefaultParameters(camHal);
+    if(0 != rc)
+    {
+        ALOGE("%s: usbCamInitDefaultParameters error", __func__);
+        return rc;
+    }
+#if CAPTURE
+
+    dev_name = camHal->dev_name;
+
+    rc = get_uvc_device(dev_name);
+    if(rc || *dev_name == '\0'){
+        ALOGE("%s: No UVC node found \n", __func__);
+        return -1;
+    }
+
+    camHal->fd = open(dev_name, O_RDWR /* required */ | O_NONBLOCK, 0);
+
+    if (camHal->fd <  0) {
+        ALOGE("%s: Cannot open '%s'", __func__, dev_name);
+        free(camHal);
+        rc = -1;
+    }else{
+        rc = 0;
+    }
+
+#else /* CAPTURE */
+    rc = 0;
+#endif /* CAPTURE */
+
+    device                  = &camHal->hw_dev;
+    device->common.close    = usbcam_close_camera_device;
+    device->ops             = &usbcam_camera_ops;
+    device->priv            = (void *)camHal;
+    *hw_device              = &(device->common);
+
+    ALOGD("%s: camHal: %p", __func__, camHal);
+    ALOGI("%s: X %d", __func__, rc);
+
+    return rc;
+}
+
+extern "C"  int usbcam_close_camera_device( hw_device_t *hw_dev)
+{
+    ALOGI("%s: device =%p E", __func__, hw_dev);
+    int rc =  -1;
+    camera_device_t *device     = (camera_device_t *)hw_dev;
+
+    if(device) {
+        camera_hardware_t *camHal   = (camera_hardware_t *)device->priv;
+        if(camHal) {
+            rc = close(camHal->fd);
+            if(rc < 0) {
+                ALOGE("%s: close failed ", __func__);
+            }
+            camHal->fd = 0;
+            delete camHal;
+        }else{
+                ALOGE("%s: camHal is NULL pointer ", __func__);
+        }
+    }
+    ALOGI("%s: X device =%p, rc = %d", __func__, hw_dev, rc);
+    return rc;
+}
+
+int usbcam_set_preview_window(struct camera_device * device,
+        struct preview_stream_ops *window)
+{
+    ALOGI("%s: E", __func__);
+    int rc = 0;
+    camera_hardware_t *camHal;
+
+    VALIDATE_DEVICE_HDL(camHal, device, -1);
+    Mutex::Autolock autoLock(camHal->lock);
+
+    /* if window is already set, then de-init previous buffers */
+    if(camHal->window){
+        rc = deInitDisplayBuffers(camHal);
+        if(rc < 0) {
+            ALOGE("%s: deInitDisplayBuffers returned error", __func__);
+        }
+    }
+    camHal->window = window;
+
+    if(camHal->window){
+        rc = initDisplayBuffers(camHal);
+        if(rc < 0) {
+            ALOGE("%s: initDisplayBuffers returned error", __func__);
+        }
+    }
+    ALOGI("%s: X. rc = %d", __func__, rc);
+    return rc;
+}
+
+void usbcam_set_CallBacks(struct camera_device * device,
+        camera_notify_callback notify_cb,
+        camera_data_callback data_cb,
+        camera_data_timestamp_callback data_cb_timestamp,
+        camera_request_memory get_memory,
+        void *user)
+{
+    ALOGI("%s: E", __func__);
+    camera_hardware_t *camHal;
+
+    if(device && device->priv){
+        camHal = (camera_hardware_t *)device->priv;
+    }else{
+        ALOGE("%s: Null device or device->priv", __func__);
+        return;
+    }
+
+    Mutex::Autolock autoLock(camHal->lock);
+
+    camHal->notify_cb           = notify_cb;
+    camHal->data_cb             = data_cb;
+    camHal->data_cb_timestamp   = data_cb_timestamp;
+    camHal->get_memory          = get_memory;
+    camHal->cb_ctxt             = user;
+
+    ALOGI("%s: X", __func__);
+}
+
+void usbcam_enable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+    ALOGI("%s: E", __func__);
+    ALOGI("%s: msg_type: %d", __func__, msg_type);
+
+    camera_hardware_t *camHal;
+
+    if(device && device->priv){
+        camHal = (camera_hardware_t *)device->priv;
+    }else{
+        ALOGE("%s: Null device or device->priv", __func__);
+        return;
+    }
+
+    Mutex::Autolock autoLock(camHal->lock);
+
+    camHal->msgEnabledFlag |= msg_type;
+
+    ALOGI("%s: X", __func__);
+}
+
+void usbcam_disable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+    ALOGI("%s: E", __func__);
+    ALOGI("%s: msg_type: %d", __func__, msg_type);
+
+    camera_hardware_t *camHal;
+    if(device && device->priv){
+        camHal = (camera_hardware_t *)device->priv;
+    }else{
+        ALOGE("%s: Null device or device->priv", __func__);
+        return;
+    }
+
+    Mutex::Autolock autoLock(camHal->lock);
+
+    camHal->msgEnabledFlag &= ~msg_type;
+
+    ALOGI("%s: X", __func__);
+}
+
+int usbcam_msg_type_enabled(struct camera_device * device, int32_t msg_type)
+{
+    ALOGI("%s: E", __func__);
+
+    camera_hardware_t *camHal;
+    if(device && device->priv){
+        camHal = (camera_hardware_t *)device->priv;
+    }else{
+        ALOGE("%s: Null device or device->priv", __func__);
+        return -1;
+    }
+
+    Mutex::Autolock autoLock(camHal->lock);
+
+    ALOGI("%s: X", __func__);
+    return (camHal->msgEnabledFlag & msg_type);
+}
+
+int usbcam_start_preview(struct camera_device * device)
+{
+    ALOGI("%s: E", __func__);
+
+    int rc = -1;
+    camera_hardware_t *camHal = NULL;
+
+    VALIDATE_DEVICE_HDL(camHal, device, -1);
+    Mutex::Autolock autoLock(camHal->lock);
+
+    /* If preivew is already running, nothing to be done */
+    if(camHal->previewEnabledFlag){
+        ALOGI("%s: Preview is already running", __func__);
+        return 0;
+    }
+
+#if CAPTURE
+    rc = initUsbCamera(camHal, camHal->prevWidth,
+                        camHal->prevHeight, getPreviewCaptureFmt(camHal));
+    if(rc < 0) {
+        ALOGE("%s: Failed to intialize the device", __func__);
+    }else{
+        rc = startUsbCamCapture(camHal);
+        if(rc < 0) {
+            ALOGE("%s: Failed to startUsbCamCapture", __func__);
+        }else{
+            rc = launch_preview_thread(camHal);
+            if(rc < 0) {
+                ALOGE("%s: Failed to launch_preview_thread", __func__);
+            }
+        }
+    }
+#else /* CAPTURE */
+    rc = launch_preview_thread(camHal);
+    if(rc < 0) {
+        ALOGE("%s: Failed to launch_preview_thread", __func__);
+    }
+#endif /* CAPTURE */
+    /* if no errors, then set the flag */
+    if(!rc)
+        camHal->previewEnabledFlag = 1;
+
+    ALOGD("%s: X", __func__);
+    return rc;
+}
+
+void usbcam_stop_preview(struct camera_device * device)
+{
+    ALOGD("%s: E", __func__);
+
+    int rc = 0;
+    camera_hardware_t *camHal;
+
+    if(device && device->priv){
+        camHal = (camera_hardware_t *)device->priv;
+    }else{
+        ALOGE("%s: Null device or device->priv", __func__);
+        return;
+    }
+
+    Mutex::Autolock autoLock(camHal->lock);
+
+    rc = stopPreviewInternal(camHal);
+    if(rc)
+        ALOGE("%s: stopPreviewInternal returned error", __func__);
+
+    ALOGI("%s: X", __func__);
+    return;
+}
+
+/* This function is equivalent to is_preview_enabled */
+int usbcam_preview_enabled(struct camera_device * device)
+{
+    ALOGI("%s: E", __func__);
+    camera_hardware_t *camHal;
+
+    if(device && device->priv){
+        camHal = (camera_hardware_t *)device->priv;
+    }else{
+        ALOGE("%s: Null device or device->priv", __func__);
+        return -1;
+    }
+    Mutex::Autolock autoLock(camHal->lock);
+
+    ALOGI("%s: X", __func__);
+    return camHal->previewEnabledFlag;
+}
+
+/* TBD */
+int usbcam_store_meta_data_in_buffers(struct camera_device * device, int enable)
+{
+    ALOGI("%s: E", __func__);
+    int rc = 0;
+
+    ALOGI("%s: X", __func__);
+    return rc;
+}
+
+/* TBD */
+int usbcam_start_recording(struct camera_device * device)
+{
+    int rc = 0;
+    ALOGD("%s: E", __func__);
+
+    ALOGD("%s: X", __func__);
+
+    return rc;
+}
+
+/* TBD */
+void usbcam_stop_recording(struct camera_device * device)
+{
+    ALOGD("%s: E", __func__);
+
+    ALOGD("%s: X", __func__);
+}
+
+/* TBD */
+int usbcam_recording_enabled(struct camera_device * device)
+{
+    int rc = 0;
+    ALOGD("%s: E", __func__);
+
+    ALOGD("%s: X", __func__);
+    return rc;
+}
+
+/* TBD */
+void usbcam_release_recording_frame(struct camera_device * device,
+                const void *opaque)
+{
+    ALOGV("%s: E", __func__);
+
+    ALOGD("%s: X", __func__);
+}
+
+/* TBD */
+int usbcam_auto_focus(struct camera_device * device)
+{
+    ALOGD("%s: E", __func__);
+    int rc = 0;
+
+    ALOGD("%s: X", __func__);
+    return rc;
+}
+
+/* TBD */
+int usbcam_cancel_auto_focus(struct camera_device * device)
+{
+    int rc = 0;
+    ALOGD("%s: E", __func__);
+
+    ALOGD("%s: X", __func__);
+    return rc;
+}
+
+int usbcam_take_picture(struct camera_device * device)
+{
+    ALOGI("%s: E", __func__);
+    int rc = 0;
+    camera_hardware_t *camHal;
+
+    VALIDATE_DEVICE_HDL(camHal, device, -1);
+
+    Mutex::Autolock autoLock(camHal->lock);
+
+    /* If take picture is already in progress, nothing t be done */
+    if(camHal->takePictInProgress){
+        ALOGI("%s: Take picture already in progress", __func__);
+        return 0;
+    }
+
+    if(camHal->previewEnabledFlag)
+    {
+        rc = stopPreviewInternal(camHal);
+        if(rc){
+            ALOGE("%s: stopPreviewInternal returned error", __func__);
+        }
+        USB_CAM_CLOSE(camHal);
+        camHal->prvwStoppedForPicture = 1;
+    }
+    /* TBD: Need to handle any dependencies on video recording state */
+    rc = launchTakePictureThread(camHal);
+    if(rc)
+        ALOGE("%s: launchTakePictureThread error", __func__);
+
+#if 0
+    /* This implementation requests preview thread to take picture */
+    if(camHal->previewEnabledFlag)
+    {
+        camHal->prvwCmdPending++;
+        camHal->prvwCmd         = USB_CAM_PREVIEW_TAKEPIC;
+        ALOGD("%s: Take picture command set ", __func__);
+    }else{
+        ALOGE("%s: Take picture without preview started!", __func__);
+        rc = -1;
+    }
+#endif
+
+    if(!rc)
+        camHal->takePictInProgress = 1;
+
+    ALOGI("%s: X", __func__);
+    return rc;
+}
+
+/* TBD */
+int usbcam_cancel_picture(struct camera_device * device)
+
+{
+    ALOGI("%s: E", __func__);
+    int rc = 0;
+
+    ALOGI("%s: X", __func__);
+    return rc;
+}
+
+int usbcam_set_parameters(struct camera_device * device, const char *params)
+
+{
+    ALOGI("%s: E", __func__);
+    int rc = 0;
+    camera_hardware_t *camHal;
+
+    VALIDATE_DEVICE_HDL(camHal, device, -1);
+
+    Mutex::Autolock autoLock(camHal->lock);
+
+    rc = usbCamSetParameters(camHal, params);
+
+    ALOGI("%s: X", __func__);
+    return rc;
+}
+
+char* usbcam_get_parameters(struct camera_device * device)
+{
+    char *parms;
+    ALOGI("%s: E", __func__);
+
+    camera_hardware_t *camHal;
+    VALIDATE_DEVICE_HDL(camHal, device, NULL);
+
+    Mutex::Autolock autoLock(camHal->lock);
+
+    parms = usbCamGetParameters(camHal);
+
+    ALOGI("%s: X", __func__);
+    return parms;
+}
+
+void usbcam_put_parameters(struct camera_device * device, char *parm)
+
+{
+    ALOGI("%s: E", __func__);
+
+    camera_hardware_t *camHal;
+
+    if(device && device->priv){
+        camHal = (camera_hardware_t *)device->priv;
+    }else{
+        ALOGE("%s: Null device or device->priv", __func__);
+        return;
+    }
+
+    usbCamPutParameters(camHal, parm);
+
+    ALOGI("%s: X", __func__);
+    return;
+}
+
+/* TBD */
+int usbcam_send_command(struct camera_device * device,
+            int32_t cmd, int32_t arg1, int32_t arg2)
+{
+    int rc = 0;
+    ALOGI("%s: E", __func__);
+    ALOGI("%d", cmd);
+
+    ALOGI("%s: X", __func__);
+    return rc;
+}
+
+/* TBD */
+void usbcam_release(struct camera_device * device)
+{
+    ALOGI("%s: E", __func__);
+#if 0
+    Mutex::Autolock l(&mLock);
+
+    switch(mPreviewState) {
+    case QCAMERA_HAL_PREVIEW_STOPPED:
+        break;
+    case QCAMERA_HAL_PREVIEW_START:
+        break;
+    case QCAMERA_HAL_PREVIEW_STARTED:
+        stopPreviewInternal();
+    break;
+    case QCAMERA_HAL_RECORDING_STARTED:
+        stopRecordingInternal();
+        stopPreviewInternal();
+        break;
+    case QCAMERA_HAL_TAKE_PICTURE:
+        cancelPictureInternal();
+        break;
+    default:
+        break;
+    }
+    mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+#endif
+    ALOGI("%s: X", __func__);
+}
+
+/* TBD */
+int usbcam_dump(struct camera_device * device, int fd)
+{
+    ALOGI("%s: E", __func__);
+    int rc = 0;
+
+    ALOGI("%s: X", __func__);
+    return rc;
+}
+/*****************************************************************************
+*  Static function definitions below
+*****************************************************************************/
+
+/******************************************************************************/
+/* No in place conversion supported. Output buffer and input MUST should be   */
+/* different input buffer for a 4x4 pixel video                             ***/
+/******                  YUYVYUYV          00 01 02 03 04 05 06 07 ************/
+/******                  YUYVYUYV          08 09 10 11 12 13 14 15 ************/
+/******                  YUYVYUYV          16 17 18 19 20 21 22 23 ************/
+/******                  YUYVYUYV          24 25 26 27 28 29 30 31 ************/
+/******************************************************************************/
+/* output generated by this function ******************************************/
+/************************** YYYY            00 02 04 06            ************/
+/************************** YYYY            08 10 12 14            ************/
+/************************** YYYY            16 18 20 22            ************/
+/************************** YYYY            24 26 28 30            ************/
+/************************** VUVU            03 01 07 05            ************/
+/************************** VUVU            19 17 23 21            ************/
+/******************************************************************************/
+
+static int convert_YUYV_to_420_NV12(char *in_buf, char *out_buf, int wd, int ht)
+{
+    int rc =0;
+    int row, col, uv_row;
+
+    ALOGD("%s: E", __func__);
+    /* Arrange Y */
+    for(row = 0; row < ht; row++)
+        for(col = 0; col < wd * 2; col += 2)
+        {
+            out_buf[row * wd + col / 2] = in_buf[row * wd * 2 + col];
+        }
+
+    /* Arrange UV */
+    for(row = 0, uv_row = ht; row < ht; row += 2, uv_row++)
+        for(col = 1; col < wd * 2; col += 4)
+        {
+            out_buf[uv_row * wd + col / 2]= in_buf[row * wd * 2 + col + 2];
+            out_buf[uv_row * wd + col / 2 + 1]  = in_buf[row * wd * 2 + col];
+        }
+
+    ALOGD("%s: X", __func__);
+    return rc;
+}
+
+/******************************************************************************
+ * Function: initDisplayBuffers
+ * Description: This function initializes the preview buffers
+ *
+ * Input parameters:
+ *   camHal              - camera HAL handle
+ *
+ * Return values:
+ *      0   Success
+ *      -1  Error
+ * Notes: none
+ *****************************************************************************/
+static int initDisplayBuffers(camera_hardware_t *camHal)
+{
+    preview_stream_ops    *mPreviewWindow;
+    struct ion_fd_data    ion_info_fd;
+    int                   numMinUndequeuedBufs = 0;
+    int                   rc = 0;
+    int                   gralloc_usage = 0;
+    int                   err;
+    int                   color=30;
+
+    ALOGD("%s: E", __func__);
+
+#if DISPLAY
+    if(camHal == NULL) {
+        ALOGE("%s: camHal = NULL", __func__);
+        return -1;
+    }
+
+    mPreviewWindow = camHal->window;
+    if(!mPreviewWindow) {
+        ALOGE("%s: mPreviewWindow = NULL", __func__);
+        return -1;
+    }
+
+    /************************************************************************/
+    /* - get_min_undequeued_buffer_count                                    */
+    /* - set_buffer_count                                                   */
+    /* - set_buffers_geometry                                               */
+    /* - set_usage                                                          */
+    /* - dequeue all the display buffers                                    */
+    /* - cancel buffers: release w/o displaying                             */
+    /************************************************************************/
+
+    /************************************************************************/
+    /* - get_min_undequeued_buffer_count                                    */
+    /************************************************************************/
+    if(mPreviewWindow->get_min_undequeued_buffer_count) {
+        rc = mPreviewWindow->get_min_undequeued_buffer_count(
+            mPreviewWindow, &numMinUndequeuedBufs);
+        if (0 != rc) {
+            ALOGE("%s: get_min_undequeued_buffer_count returned error", __func__);
+        }
+        else
+            ALOGD("%s: get_min_undequeued_buffer_count returned: %d ",
+               __func__, numMinUndequeuedBufs);
+    }
+    else
+        ALOGE("%s: get_min_undequeued_buffer_count is NULL pointer", __func__);
+
+    /************************************************************************/
+    /* - set_buffer_count                                                   */
+    /************************************************************************/
+    if(mPreviewWindow->set_buffer_count) {
+        camHal->previewMem.buffer_count = numMinUndequeuedBufs
+                                            + PRVW_DISP_BUF_CNT;
+        rc = mPreviewWindow->set_buffer_count(
+            mPreviewWindow,
+            camHal->previewMem.buffer_count);
+        if (rc != 0) {
+            ALOGE("%s: set_buffer_count returned error", __func__);
+        }else
+            ALOGD("%s: set_buffer_count returned success", __func__);
+    }else
+        ALOGE("%s: set_buffer_count is NULL pointer", __func__);
+
+    /************************************************************************/
+    /* - set_buffers_geometry                                               */
+    /************************************************************************/
+    if(mPreviewWindow->set_buffers_geometry) {
+        rc = mPreviewWindow->set_buffers_geometry(mPreviewWindow,
+                                                camHal->dispWidth,
+                                                camHal->dispHeight,
+                                                camHal->dispFormat);
+        if (rc != 0) {
+            ALOGE("%s: set_buffers_geometry returned error. %s (%d)",
+               __func__, strerror(-rc), -rc);
+        }else
+            ALOGD("%s: set_buffers_geometry returned success", __func__);
+    }else
+        ALOGE("%s: set_buffers_geometry is NULL pointer", __func__);
+
+    /************************************************************************/
+    /* - set_usage                                                          */
+    /************************************************************************/
+    gralloc_usage = CAMERA_GRALLOC_HEAP_ID | CAMERA_GRALLOC_FALLBACK_HEAP_ID |
+                    GRALLOC_USAGE_PRIVATE_UNCACHED;
+
+    if(mPreviewWindow->set_usage) {
+        rc = mPreviewWindow->set_usage(mPreviewWindow, gralloc_usage);
+        if (rc != 0) {
+            ALOGE("%s: set_usage returned error", __func__);
+        }else
+            ALOGD("%s: set_usage returned success", __func__);
+    }
+    else
+        ALOGE("%s: set_usage is NULL pointer", __func__);
+
+    /************************************************************************/
+    /* - dequeue all the display buffers                                    */
+    /************************************************************************/
+    for (int cnt = 0; cnt < camHal->previewMem.buffer_count; cnt++) {
+        int stride;
+        err = mPreviewWindow->dequeue_buffer(
+                mPreviewWindow,
+                &camHal->previewMem.buffer_handle[cnt],
+                &camHal->previewMem.stride[cnt]);
+        if(!err) {
+            ALOGD("%s: dequeue buf: %p\n",
+                 __func__, camHal->previewMem.buffer_handle[cnt]);
+
+            if(mPreviewWindow->lock_buffer) {
+                err = mPreviewWindow->lock_buffer(
+                    mPreviewWindow,
+                    camHal->previewMem.buffer_handle[cnt]);
+                ALOGD("%s: mPreviewWindow->lock_buffer success",
+                     __func__);
+            }
+
+            // lock the buffer using genlock
+            ALOGD("%s: camera call genlock_lock, hdl=%p",
+                __func__, (*camHal->previewMem.buffer_handle[cnt]));
+
+            if (GENLOCK_NO_ERROR !=
+                genlock_lock_buffer(
+                    (native_handle_t *) (*camHal->previewMem.buffer_handle[cnt]),
+                    GENLOCK_WRITE_LOCK, GENLOCK_MAX_TIMEOUT))
+            {
+                ALOGE("%s: genlock_lock_buffer(WRITE) failed",
+                    __func__);
+                camHal->previewMem.local_flag[cnt] = BUFFER_UNLOCKED;
+            }else {
+                ALOGD("%s: genlock_lock_buffer hdl =%p",
+                  __func__, *camHal->previewMem.buffer_handle[cnt]);
+                camHal->previewMem.local_flag[cnt] = BUFFER_LOCKED;
+            }
+
+            /* Store this buffer details in the context */
+            camHal->previewMem.private_buffer_handle[cnt] =
+                (struct private_handle_t *) (*camHal->previewMem.buffer_handle[cnt]);
+
+            ALOGD("%s: idx = %d, fd = %d, size = %d, offset = %d", __func__,
+                cnt, camHal->previewMem.private_buffer_handle[cnt]->fd,
+                camHal->previewMem.private_buffer_handle[cnt]->size,
+                camHal->previewMem.private_buffer_handle[cnt]->offset);
+
+            camHal->previewMem.camera_memory[cnt] =
+                camHal->get_memory(
+                    camHal->previewMem.private_buffer_handle[cnt]->fd,
+                    camHal->previewMem.private_buffer_handle[cnt]->size,
+                    1, camHal->cb_ctxt);
+
+            ALOGD("%s: data = %p, size = %d, handle = %p", __func__,
+                camHal->previewMem.camera_memory[cnt]->data,
+                camHal->previewMem.camera_memory[cnt]->size,
+                camHal->previewMem.camera_memory[cnt]->handle);
+
+#ifdef USE_ION
+            /* In case of ION usage, open ION fd */
+            camHal->previewMem.mem_info[cnt].main_ion_fd =
+                                                open("/dev/ion", O_RDONLY);
+            if (camHal->previewMem.mem_info[cnt].main_ion_fd < 0) {
+                ALOGE("%s: failed: could not open ion device\n", __func__);
+            }else{
+                memset(&ion_info_fd, 0, sizeof(ion_info_fd));
+                ion_info_fd.fd =
+                    camHal->previewMem.private_buffer_handle[cnt]->fd;
+                if (ioctl(camHal->previewMem.mem_info[cnt].main_ion_fd,
+                          ION_IOC_IMPORT, &ion_info_fd) < 0) {
+                    ALOGE("ION import failed\n");
+                }
+            }
+            camHal->previewMem.mem_info[cnt].fd =
+                camHal->previewMem.private_buffer_handle[cnt]->fd;
+            camHal->previewMem.mem_info[cnt].size =
+                camHal->previewMem.private_buffer_handle[cnt]->size;
+            camHal->previewMem.mem_info[cnt].handle = ion_info_fd.handle;
+
+#endif
+        }
+        else
+            ALOGE("%s: dequeue buf %d failed \n", __func__, cnt);
+    }
+    /************************************************************************/
+    /* - cancel buffers: queue w/o displaying                               */
+    /************************************************************************/
+    for (int cnt = 0; cnt < camHal->previewMem.buffer_count; cnt++) {
+        if (GENLOCK_FAILURE == genlock_unlock_buffer(
+                (native_handle_t *)(*(camHal->previewMem.buffer_handle[cnt])))){
+            ALOGE("%s: genlock_unlock_buffer failed: hdl =%p", __func__,
+                (*(camHal->previewMem.buffer_handle[cnt])) );
+        } else {
+            camHal->previewMem.local_flag[cnt] = BUFFER_UNLOCKED;
+            ALOGD("%s: genlock_unlock_buffer success: hdl = %p",
+               __func__, (*(camHal->previewMem.buffer_handle[cnt])));
+        }
+
+        err = mPreviewWindow->cancel_buffer(mPreviewWindow,
+            (buffer_handle_t *)camHal->previewMem.buffer_handle[cnt]);
+        if(!err) {
+            ALOGD("%s: cancel_buffer successful: %p\n",
+                 __func__, camHal->previewMem.buffer_handle[cnt]);
+        }else
+            ALOGE("%s: cancel_buffer failed: %p\n", __func__,
+                 camHal->previewMem.buffer_handle[cnt]);
+    }
+#else
+    rc = 0;
+#endif /* #if DISPLAY */
+    ALOGD("%s: X", __func__);
+    return rc;
+}
+
+/******************************************************************************
+ * Function: deInitDisplayBuffers
+ * Description: This function de-initializes all the display buffers allocated
+ *              in initDisplayBuffers
+ *
+ * Input parameters:
+ *   camHal              - camera HAL handle
+ *
+ * Return values:
+ *      0   Success
+ *      -1  Error
+ * Notes: none
+ *****************************************************************************/
+static int deInitDisplayBuffers(camera_hardware_t *camHal)
+{
+    int rc = 0;
+    preview_stream_ops    *previewWindow;
+
+    ALOGD("%s: E", __func__);
+
+    if(!camHal || !camHal->window) {
+      ALOGE("%s: camHal = NULL or window = NULL ", __func__);
+      return -1;
+    }
+
+    previewWindow = camHal->window;
+
+    /************************************************************************/
+    /* - Release all buffers that were acquired using get_memory            */
+    /* - If using ION memory, free ION related resources                    */
+    /* - genUnlock if buffer is genLocked                                   */
+    /* - Cancel buffers: queue w/o displaying                               */
+    /************************************************************************/
+
+#if DISPLAY
+    for (int cnt = 0; cnt < camHal->previewMem.buffer_count; cnt++) {
+
+        /* Release all buffers that were acquired using get_memory */
+        camHal->previewMem.camera_memory[cnt]->release(
+                                camHal->previewMem.camera_memory[cnt]);
+
+#ifdef USE_ION
+        /* If using ION memory, free ION related resources */
+        struct ion_handle_data ion_handle;
+        memset(&ion_handle, 0, sizeof(ion_handle));
+        ion_handle.handle = camHal->previewMem.mem_info[cnt].handle;
+        if (ioctl(camHal->previewMem.mem_info[cnt].main_ion_fd,
+            ION_IOC_FREE, &ion_handle) < 0) {
+            ALOGE("%s: ion free failed\n", __func__);
+        }
+        close(camHal->previewMem.mem_info[cnt].main_ion_fd);
+#endif
+
+        /* genUnlock if buffer is genLocked */
+        if(camHal->previewMem.local_flag[cnt] == BUFFER_LOCKED){
+            if (GENLOCK_FAILURE == genlock_unlock_buffer(
+                    (native_handle_t *)(*(camHal->previewMem.buffer_handle[cnt])))){
+                ALOGE("%s: genlock_unlock_buffer failed: hdl =%p", __func__,
+                    (*(camHal->previewMem.buffer_handle[cnt])) );
+            } else {
+                camHal->previewMem.local_flag[cnt] = BUFFER_UNLOCKED;
+                ALOGD("%s: genlock_unlock_buffer success: hdl = %p",
+                   __func__, (*(camHal->previewMem.buffer_handle[cnt])));
+            }
+        }
+        /* cancel buffers: enqueue w/o displaying */
+        rc = previewWindow->cancel_buffer(previewWindow,
+            (buffer_handle_t *)camHal->previewMem.buffer_handle[cnt]);
+        if(!rc) {
+            ALOGD("%s: cancel_buffer successful: %p\n",
+                 __func__, camHal->previewMem.buffer_handle[cnt]);
+        }else
+            ALOGE("%s: cancel_buffer failed: %p\n", __func__,
+                 camHal->previewMem.buffer_handle[cnt]);
+    }
+#endif /* #if DISPLAY */
+    memset(&camHal->previewMem, 0, sizeof(camHal->previewMem));
+
+    ALOGD("%s: X", __func__);
+    return rc;
+}
+
+/******************************************************************************
+ * Function: getPreviewCaptureFmt
+ * Description: This function implements the logic to decide appropriate
+ *              capture format from the USB camera
+ *
+ * Input parameters:
+ *   camHal              - camera HAL handle
+ *
+ * Return values:
+ *      Capture format. Default (V4L2_PIX_FMT_MJPEG)
+ *
+ * Notes: none
+ *****************************************************************************/
+static int getPreviewCaptureFmt(camera_hardware_t *camHal)
+{
+    int     i = 0, mjpegSupported = 0, h264Supported = 0;
+    struct v4l2_fmtdesc fmtdesc;
+
+    memset(&fmtdesc, 0, sizeof(v4l2_fmtdesc));
+
+    /************************************************************************/
+    /* - Query the camera for all supported formats                         */
+    /* - Based on the resolution, pick an apporpriate format                */
+    /************************************************************************/
+
+    /************************************************************************/
+    /* - Query the camera for all supported formats                         */
+    /************************************************************************/
+    for(i = 0; ; i++) {
+        fmtdesc.index = i;
+        fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+        if (-1 == ioctlLoop(camHal->fd, VIDIOC_ENUM_FMT, &fmtdesc)) {
+            if (EINVAL == errno) {
+                ALOGI("%s: Queried all formats till index %d\n", __func__, i);
+                break;
+            } else {
+                ALOGE("%s: VIDIOC_ENUM_FMT failed", __func__);
+            }
+        }
+        if(V4L2_PIX_FMT_MJPEG == fmtdesc.pixelformat){
+            mjpegSupported = 1;
+            ALOGI("%s: V4L2_PIX_FMT_MJPEG is supported", __func__ );
+        }
+        if(V4L2_PIX_FMT_H264 == fmtdesc.pixelformat){
+            h264Supported = 1;
+            ALOGI("%s: V4L2_PIX_FMT_H264 is supported", __func__ );
+        }
+
+    }
+
+    /************************************************************************/
+    /* - Based on the resolution, pick an apporpriate format                */
+    /************************************************************************/
+    //V4L2_PIX_FMT_MJPEG; V4L2_PIX_FMT_YUYV; V4L2_PIX_FMT_H264 = 0x34363248;
+    camHal->captureFormat = V4L2_PIX_FMT_YUYV;
+    if(camHal->prevWidth > 640){
+        if(1 == mjpegSupported)
+            camHal->captureFormat = V4L2_PIX_FMT_MJPEG;
+        else if(1 == h264Supported)
+            camHal->captureFormat = V4L2_PIX_FMT_H264;
+    }
+    ALOGI("%s: Capture format chosen: 0x%x. 0x%x:YUYV. 0x%x:MJPEG. 0x%x: H264",
+        __func__, camHal->captureFormat, V4L2_PIX_FMT_YUYV,
+        V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_H264);
+
+    return camHal->captureFormat;
+}
+
+/******************************************************************************
+ * Function: getMjpegdOutputFormat
+ * Description: This function maps display pixel format enum to JPEG output
+ *              format enum
+ *
+ * Input parameters:
+ *   dispFormat              - Display pixel format
+ *
+ * Return values:
+ *      (int)mjpegOutputFormat
+ *
+ * Notes: none
+ *****************************************************************************/
+static int getMjpegdOutputFormat(int dispFormat)
+{
+    int mjpegOutputFormat = YCRCBLP_H2V2;
+
+    if(HAL_PIXEL_FORMAT_YCrCb_420_SP == dispFormat)
+        mjpegOutputFormat = YCRCBLP_H2V2;
+
+    return mjpegOutputFormat;
+}
+
+/******************************************************************************
+ * Function: ioctlLoop
+ * Description: This function is a blocking call around ioctl
+ *
+ * Input parameters:
+ *   fd             - IOCTL fd
+ *   ioctlCmd       - IOCTL command
+ *   args           - IOCTL arguments
+ *
+ * Return values:
+ *      (int)mjpegOutputFormat
+ *
+ * Notes: none
+ *****************************************************************************/
+static int ioctlLoop(int fd, int ioctlCmd, void *args)
+{
+    int rc = -1;
+
+    while(1)
+    {
+        rc = ioctl(fd, ioctlCmd, args);
+        if(!((-1 == rc) && (EINTR == errno)))
+            break;
+    }
+    return rc;
+}
+
+/******************************************************************************
+ * Function: initV4L2mmap
+ * Description: This function requests for V4L2 driver allocated buffers
+ *
+ * Input parameters:
+ *   camHal              - camera HAL handle
+ *
+ * Return values:
+ *   0      No error
+ *   -1     Error
+ *
+ * Notes: none
+ *****************************************************************************/
+static int initV4L2mmap(camera_hardware_t *camHal)
+{
+    int rc = -1;
+    struct v4l2_requestbuffers  reqBufs;
+    struct v4l2_buffer          tempBuf;
+
+    ALOGD("%s: E", __func__);
+    memset(&reqBufs, 0, sizeof(v4l2_requestbuffers));
+    reqBufs.type    = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    reqBufs.memory  = V4L2_MEMORY_MMAP;
+    reqBufs.count   = PRVW_CAP_BUF_CNT;
+
+    if (-1 == ioctlLoop(camHal->fd, VIDIOC_REQBUFS, &reqBufs)) {
+        if (EINVAL == errno) {
+            ALOGE("%s: does not support memory mapping\n", __func__);
+        } else {
+            ALOGE("%s: VIDIOC_REQBUFS failed", __func__);
+        }
+    }
+    ALOGD("%s: VIDIOC_REQBUFS success", __func__);
+
+    if (reqBufs.count < PRVW_CAP_BUF_CNT) {
+        ALOGE("%s: Insufficient buffer memory on\n", __func__);
+    }
+
+    camHal->buffers =
+        ( bufObj* ) calloc(reqBufs.count, sizeof(bufObj));
+
+    if (!camHal->buffers) {
+        ALOGE("%s: Out of memory\n", __func__);
+    }
+
+    /* Store the indexes in the context. Useful during releasing */
+    for (camHal->n_buffers = 0;
+         camHal->n_buffers < reqBufs.count;
+         camHal->n_buffers++) {
+
+        memset(&tempBuf, 0, sizeof(tempBuf));
+
+        tempBuf.index       = camHal->n_buffers;
+        tempBuf.type        = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+        tempBuf.memory      = V4L2_MEMORY_MMAP;
+
+        if (-1 == ioctlLoop(camHal->fd, VIDIOC_QUERYBUF, &tempBuf))
+            ALOGE("%s: VIDIOC_QUERYBUF failed", __func__);
+
+        ALOGD("%s: VIDIOC_QUERYBUF success", __func__);
+
+        camHal->buffers[camHal->n_buffers].len = tempBuf.length;
+        camHal->buffers[camHal->n_buffers].data =
+        mmap(NULL /* start anywhere */,
+                  tempBuf.length,
+                  PROT_READ | PROT_WRITE,
+                  MAP_SHARED,
+                  camHal->fd, tempBuf.m.offset);
+
+        if (MAP_FAILED == camHal->buffers[camHal->n_buffers].data)
+            ALOGE("%s: mmap failed", __func__);
+    }
+    ALOGD("%s: X", __func__);
+    return 0;
+}
+
+/******************************************************************************
+ * Function: unInitV4L2mmap
+ * Description: This function unmaps the V4L2 driver buffers
+ *
+ * Input parameters:
+ *   camHal              - camera HAL handle
+ *
+ * Return values:
+ *   0      No error
+ *   -1     Error
+ *
+ * Notes: none
+ *****************************************************************************/
+static int unInitV4L2mmap(camera_hardware_t *camHal)
+{
+    int i, rc = 0;
+    ALOGD("%s: E", __func__);
+
+    for (i = 0; i < camHal->n_buffers; i++)
+        if (-1 == munmap(camHal->buffers[i].data, camHal->buffers[i].len)){
+            ALOGE("%s: munmap failed for buffer: %d", __func__, i);
+            rc = -1;
+        }
+
+    ALOGD("%s: X", __func__);
+    return rc;
+}
+
+/******************************************************************************
+ * Function: initUsbCamera
+ * Description: This function sets the resolution and pixel format of the
+ *              USB camera
+ *
+ * Input parameters:
+ *  camHal              - camera HAL handle
+ *  width               - picture width in pixels
+ *  height              - picture height in pixels
+ *  pixelFormat         - capture format for the camera
+ *
+ * Return values:
+ *   0      No error
+ *   -1     Error
+ *
+ * Notes: none
+ *****************************************************************************/
+static int initUsbCamera(camera_hardware_t *camHal, int width, int height,
+                        int pixelFormat)
+{
+    int     rc = -1;
+    struct  v4l2_capability     cap;
+    struct  v4l2_cropcap        cropcap;
+    struct  v4l2_crop           crop;
+    struct  v4l2_format         v4l2format;
+    unsigned int                min;
+
+    ALOGI("%s: E", __func__);
+
+    if (-1 == ioctlLoop(camHal->fd, VIDIOC_QUERYCAP, &cap)) {
+        if (EINVAL == errno) {
+            ALOGE( "%s: This is not V4L2 device\n", __func__);
+            return -1;
+        } else {
+            ALOGE("%s: VIDIOC_QUERYCAP errno: %d", __func__, errno);
+        }
+    }
+    ALOGD("%s: VIDIOC_QUERYCAP success", __func__);
+
+    if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
+        ALOGE("%s: This is not video capture device\n", __func__);
+        return -1;
+    }
+
+    if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
+        ALOGE("%s: This does not support streaming i/o\n", __func__);
+        return -1;
+    }
+
+    /* Select video input, video standard and tune here. */
+    memset(&cropcap, 0, sizeof(cropcap));
+
+    cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+    if (0 == ioctlLoop(camHal->fd, VIDIOC_CROPCAP, &cropcap)) {
+
+        /* reset to default */
+        crop.c = cropcap.defrect;
+        crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+        ALOGD("%s: VIDIOC_CROPCAP success", __func__);
+        if (-1 == ioctlLoop(camHal->fd, VIDIOC_S_CROP, &crop)) {
+        switch (errno) {
+            case EINVAL:
+            /* Cropping not supported. */
+                break;
+            default:
+            /* Errors ignored. */
+                break;
+            }
+        }
+                ALOGD("%s: VIDIOC_S_CROP success", __func__);
+
+    } else {
+        /* Errors ignored. */
+               ALOGE("%s: VIDIOC_S_CROP failed", __func__);
+    }
+
+
+    memset(&v4l2format, 0, sizeof(v4l2format));
+
+    v4l2format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    {
+        v4l2format.fmt.pix.field       = V4L2_FIELD_NONE;
+        v4l2format.fmt.pix.pixelformat = pixelFormat;
+        v4l2format.fmt.pix.width       = width;
+        v4l2format.fmt.pix.height      = height;
+
+        if (-1 == ioctlLoop(camHal->fd, VIDIOC_S_FMT, &v4l2format))
+        {
+            ALOGE("%s: VIDIOC_S_FMT failed", __func__);
+            return -1;
+        }
+        ALOGD("%s: VIDIOC_S_FMT success", __func__);
+
+        /* Note VIDIOC_S_FMT may change width and height. */
+    }
+
+    /* TBR: In case of user pointer buffers, v4l2format.fmt.pix.sizeimage */
+    /* might have to be calculated as per V4L2 sample application due to */
+    /* open source driver bug */
+
+    rc = initV4L2mmap(camHal);
+    ALOGI("%s: X", __func__);
+    return rc;
+}
+
+/******************************************************************************
+ * Function: startUsbCamCapture
+ * Description: This function queues buffer objects to the driver and sends
+ *              STREAM ON command to the USB camera driver
+ *
+ * Input parameters:
+ *   camHal              - camera HAL handle
+ *
+ * Return values:
+ *   0      No error
+ *   -1     Error
+ *
+ * Notes: none
+ *****************************************************************************/
+static int startUsbCamCapture(camera_hardware_t *camHal)
+{
+    int         rc = -1;
+    unsigned    int i;
+    enum        v4l2_buf_type   v4l2BufType;
+    ALOGD("%s: E", __func__);
+
+    for (i = 0; i < camHal->n_buffers; ++i) {
+        struct v4l2_buffer tempBuf;
+
+        memset(&tempBuf, 0, sizeof(tempBuf));
+        tempBuf.type    = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+        tempBuf.memory  = V4L2_MEMORY_MMAP;
+        tempBuf.index   = i;
+
+        if (-1 == ioctlLoop(camHal->fd, VIDIOC_QBUF, &tempBuf))
+            ALOGE("%s: VIDIOC_QBUF for %d buffer failed", __func__, i);
+        else
+            ALOGD("%s: VIDIOC_QBUF for %d buffer success", __func__, i);
+    }
+
+    v4l2BufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    if (-1 == ioctlLoop(camHal->fd, VIDIOC_STREAMON, &v4l2BufType))
+        ALOGE("%s: VIDIOC_STREAMON failed", __func__);
+    else
+    {
+        ALOGD("%s: VIDIOC_STREAMON success", __func__);
+        rc = 0;
+    }
+
+    ALOGD("%s: X", __func__);
+    return rc;
+}
+
+/******************************************************************************
+ * Function: stopUsbCamCapture
+ * Description: This function sends STREAM OFF command to the USB camera driver
+ *
+ * Input parameters:
+ *   camHal              - camera HAL handle
+ *
+ * Return values:
+ *   0      No error
+ *   -1     Error
+ *
+ * Notes: none
+ *****************************************************************************/
+static int stopUsbCamCapture(camera_hardware_t *camHal)
+{
+    int         rc = -1;
+    unsigned    int i;
+    enum        v4l2_buf_type   v4l2BufType;
+    ALOGD("%s: E", __func__);
+
+    if(!camHal->fd){
+        ALOGE("%s: camHal->fd = NULL ", __func__);
+        return -1;
+    }
+    v4l2BufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    if (-1 == ioctlLoop(camHal->fd, VIDIOC_STREAMOFF, &v4l2BufType)){
+        ALOGE("%s: VIDIOC_STREAMOFF failed", __func__);
+        rc = -1;
+    }else{
+        ALOGD("%s: VIDIOC_STREAMOFF success", __func__);
+        rc = 0;
+    }
+
+    ALOGD("%s: X", __func__);
+    return rc;
+}
+
+/******************************************************************************
+ * Function: stopPreviewInternal
+ * Description: This function sends EXIT command to prview loop thread,
+ *              stops usb camera capture and uninitializes MMAP. This function
+ *              assumes that calling function has locked camHal->lock
+ *
+ * Input parameters:
+ *   camHal              - camera HAL handle
+ *
+ * Return values:
+ *   0      No error
+ *   -1     Error
+ *
+ * Notes: none
+ *****************************************************************************/
+static int stopPreviewInternal(camera_hardware_t *camHal)
+{
+    int rc = 0;
+    ALOGD("%s: E", __func__);
+
+    if(camHal->previewEnabledFlag)
+    {
+        camHal->prvwCmdPending++;
+        camHal->prvwCmd         = USB_CAM_PREVIEW_EXIT;
+
+        /* yield lock while waiting for the preview thread to exit */
+        camHal->lock.unlock();
+        if(pthread_join(camHal->previewThread, NULL)){
+            ALOGE("%s: Error in pthread_join preview thread", __func__);
+        }
+        camHal->lock.lock();
+
+        if(stopUsbCamCapture(camHal)){
+            ALOGE("%s: Error in stopUsbCamCapture", __func__);
+            rc = -1;
+        }
+        if(unInitV4L2mmap(camHal)){
+            ALOGE("%s: Error in stopUsbCamCapture", __func__);
+            rc = -1;
+        }
+        camHal->previewEnabledFlag = 0;
+    }
+
+    ALOGD("%s: X, rc: %d", __func__, rc);
+    return rc;
+}
+#if 1
+/******************************************************************************
+ * Function: prvwThreadTakePictureInternal
+ * Description: This function processes one camera frame to get JPEG encoded
+ *              picture.
+ *
+ * Input parameters:
+ *   camHal              - camera HAL handle
+ *
+ * Return values:
+ *   0      No error
+ *   -1     Error
+ *
+ * Notes: none
+ *****************************************************************************/
+static int prvwThreadTakePictureInternal(camera_hardware_t *camHal)
+{
+    int     rc = 0;
+    QCameraHalMemInfo_t     *mem_info;
+    ALOGD("%s: E", __func__);
+
+    /************************************************************************/
+    /* - If requested for shutter notfication, callback                     */
+    /* - Dequeue capture buffer from USB camera                             */
+    /* - Send capture buffer to JPEG encoder for JPEG compression           */
+    /* - If jpeg frames callback is requested, callback with jpeg buffers   */
+    /* - Enqueue capture buffer back to USB camera                          */
+    /************************************************************************/
+
+    /************************************************************************/
+    /* - If requested for shutter notfication, callback                     */
+    /************************************************************************/
+    if (camHal->msgEnabledFlag & CAMERA_MSG_SHUTTER){
+        camHal->lock.unlock();
+        camHal->notify_cb(CAMERA_MSG_SHUTTER, 0, 0, camHal->cb_ctxt);
+        camHal->lock.lock();
+    }
+
+#if CAPTURE
+    /************************************************************************/
+    /* - Dequeue capture buffer from USB camera                             */
+    /************************************************************************/
+    if (0 == get_buf_from_cam(camHal))
+        ALOGD("%s: get_buf_from_cam success", __func__);
+    else
+        ALOGE("%s: get_buf_from_cam error", __func__);
+#endif
+
+    /************************************************************************/
+    /* - Send capture buffer to JPEG encoder for JPEG compression           */
+    /************************************************************************/
+    /* Optimization: If camera capture is JPEG format, need not compress! */
+    /* instead, just data copy from capture buffer to picture buffer */
+    if(V4L2_PIX_FMT_MJPEG == camHal->captureFormat){
+        /* allocate heap memory for JPEG output */
+        mem_info = &camHal->pictMem.mem_info[0];
+        mem_info->size = camHal->curCaptureBuf.bytesused;
+        /* TBD: allocate_ion_memory
+        rc = QCameraHardwareInterface::allocate_ion_memory(mem_info,
+                            ((0x1 << CAMERA_ZSL_ION_HEAP_ID) |
+                            (0x1 << CAMERA_ZSL_ION_FALLBACK_HEAP_ID)));
+        */
+        if(rc)
+            ALOGE("%s: ION memory allocation failed", __func__);
+
+        camHal->pictMem.camera_memory[0] = camHal->get_memory(
+                            mem_info->fd, mem_info->size, 1, camHal->cb_ctxt);
+        if(!camHal->pictMem.camera_memory[0])
+            ALOGE("%s: get_mem failed", __func__);
+
+        memcpy( camHal->pictMem.camera_memory[0]->data,
+                (char *)camHal->buffers[camHal->curCaptureBuf.index].data,
+                camHal->curCaptureBuf.bytesused);
+    }
+
+    /************************************************************************/
+    /* - If jpeg frames callback is requested, callback with jpeg buffers   */
+    /************************************************************************/
+    if ((camHal->msgEnabledFlag & CAMERA_MSG_COMPRESSED_IMAGE) &&
+            (camHal->data_cb)){
+        camHal->lock.unlock();
+        camHal->data_cb(CAMERA_MSG_COMPRESSED_IMAGE,
+                        camHal->pictMem.camera_memory[0],
+                        0, NULL, camHal->cb_ctxt);
+        camHal->lock.lock();
+    }
+    /* release heap memory after the call back */
+    if(camHal->pictMem.camera_memory[0])
+        camHal->pictMem.camera_memory[0]->release(
+            camHal->pictMem.camera_memory[0]);
+
+    /* TBD: deallocate_ion_memory */
+    //rc = QCameraHardwareInterface::deallocate_ion_memory(mem_info);
+    if(rc)
+        ALOGE("%s: ION memory de-allocation failed", __func__);
+
+#if CAPTURE
+    /************************************************************************/
+    /* - Enqueue capture buffer back to USB camera                          */
+    /************************************************************************/
+       if(0 == put_buf_to_cam(camHal)) {
+            ALOGD("%s: put_buf_to_cam success", __func__);
+        }
+        else
+            ALOGE("%s: put_buf_to_cam error", __func__);
+#endif
+
+    ALOGD("%s: X, rc: %d", __func__, rc);
+    return rc;
+}
+#endif //#if 0
+/******************************************************************************
+ * Function: cache_ops
+ * Description: This function calls ION ioctl for cache related operations
+ *
+ * Input parameters:
+ *  mem_info                - QCameraHalMemInfo_t structure with ION info
+ *  buf_ptr                 - Buffer pointer that needs to be cache operated
+ *  cmd                     - Cache command - clean/invalidate
+ *
+ * Return values:
+ *   MM_CAMERA_OK       No error
+ *   -1                 Error
+ *
+ * Notes: none
+ *****************************************************************************/
+int cache_ops(QCameraHalMemInfo_t *mem_info,
+                                    void *buf_ptr,
+                                    unsigned int cmd)
+{
+    struct ion_flush_data cache_inv_data;
+    struct ion_custom_data custom_data;
+    int ret = MM_CAMERA_OK;
+
+#ifdef USE_ION
+    if (NULL == mem_info) {
+        ALOGE("%s: mem_info is NULL, return here", __func__);
+        return -1;
+    }
+
+    memset(&cache_inv_data, 0, sizeof(cache_inv_data));
+    memset(&custom_data, 0, sizeof(custom_data));
+    cache_inv_data.vaddr = buf_ptr;
+    cache_inv_data.fd = mem_info->fd;
+    cache_inv_data.handle = mem_info->handle;
+    cache_inv_data.length = mem_info->size;
+    custom_data.cmd = cmd;
+    custom_data.arg = (unsigned long)&cache_inv_data;
+
+    ALOGD("%s: addr = %p, fd = %d, handle = %p length = %d, ION Fd = %d",
+         __func__, cache_inv_data.vaddr, cache_inv_data.fd,
+         cache_inv_data.handle, cache_inv_data.length,
+         mem_info->main_ion_fd);
+    if(mem_info->main_ion_fd > 0) {
+        if(ioctl(mem_info->main_ion_fd, ION_IOC_CUSTOM, &custom_data) < 0) {
+            ALOGE("%s: Cache Invalidate failed\n", __func__);
+            ret = -1;
+        }
+    }
+#endif
+
+    return ret;
+}
+
+/******************************************************************************
+ * Function: get_buf_from_cam
+ * Description: This funtions gets/acquires 1 capture buffer from the camera
+ *              driver. The fetched buffer is stored in curCaptureBuf
+ *
+ * Input parameters:
+ *   camHal              - camera HAL handle
+ *
+ * Return values:
+ *   0      No error
+ *   -1     Error
+ *
+ * Notes: none
+ *****************************************************************************/
+static int get_buf_from_cam(camera_hardware_t *camHal)
+{
+    int rc = -1;
+
+    ALOGD("%s: E", __func__);
+    {
+        memset(&camHal->curCaptureBuf, 0, sizeof(camHal->curCaptureBuf));
+
+        camHal->curCaptureBuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+        camHal->curCaptureBuf.memory = V4L2_MEMORY_MMAP;
+
+        if (-1 == ioctlLoop(camHal->fd, VIDIOC_DQBUF, &camHal->curCaptureBuf)){
+            switch (errno) {
+            case EAGAIN:
+                ALOGE("%s: EAGAIN error", __func__);
+                return 1;
+
+            case EIO:
+            /* Could ignore EIO, see spec. */
+
+            /* fall through */
+
+            default:
+            ALOGE("%s: VIDIOC_DQBUF error", __func__);
+            }
+        }
+        else
+        {
+            rc = 0;
+            ALOGD("%s: VIDIOC_DQBUF: %d successful, %d bytes",
+                 __func__, camHal->curCaptureBuf.index,
+                 camHal->curCaptureBuf.bytesused);
+        }
+    }
+    ALOGD("%s: X", __func__);
+    return rc;
+}
+
+/******************************************************************************
+ * Function: put_buf_to_cam
+ * Description: This funtion puts/releases 1 capture buffer back to the camera
+ *              driver
+ *
+ * Input parameters:
+ *   camHal              - camera HAL handle
+ *
+ * Return values:
+ *   0      No error
+ *   -1     Error
+ *
+ * Notes: none
+ *****************************************************************************/
+static int put_buf_to_cam(camera_hardware_t *camHal)
+{
+    ALOGD("%s: E", __func__);
+
+    camHal->curCaptureBuf.type        = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    camHal->curCaptureBuf.memory      = V4L2_MEMORY_MMAP;
+
+
+    if (-1 == ioctlLoop(camHal->fd, VIDIOC_QBUF, &camHal->curCaptureBuf))
+    {
+        ALOGE("%s: VIDIOC_QBUF failed ", __func__);
+        return 1;
+    }
+    ALOGD("%s: X", __func__);
+    return 0;
+}
+
+/******************************************************************************
+ * Function: put_buf_to_cam
+ * Description: This funtion gets/acquires 1 display buffer from the display
+ *              window
+ *
+ * Input parameters:
+ *  camHal                  - camera HAL handle
+ *  buffer_id               - Buffer id pointer. The id of buffer obtained
+ *                              by this function is returned in this arg
+ *
+ * Return values:
+ *   0      No error
+ *   -1     Error
+ *
+ * Notes: none
+ *****************************************************************************/
+static int get_buf_from_display(camera_hardware_t *camHal, int *buffer_id)
+{
+    int                     err = 0;
+    preview_stream_ops      *mPreviewWindow = NULL;
+    int                     stride = 0, cnt = 0;
+    buffer_handle_t         *buffer_handle = NULL;
+    struct private_handle_t *private_buffer_handle = NULL;
+
+    ALOGD("%s: E", __func__);
+
+    if (camHal == NULL) {
+        ALOGE("%s: camHal = NULL", __func__);
+        return -1;
+    }
+
+    mPreviewWindow = camHal->window;
+    if( mPreviewWindow == NULL) {
+        ALOGE("%s: mPreviewWindow = NULL", __func__);
+        return -1;
+    }
+    err = mPreviewWindow->dequeue_buffer(mPreviewWindow,
+                                    &buffer_handle,
+                                    &stride);
+    if(!err) {
+        ALOGD("%s: dequeue buf buffer_handle: %p\n", __func__, buffer_handle);
+
+        ALOGD("%s: mPreviewWindow->lock_buffer: %p",
+             __func__, mPreviewWindow->lock_buffer);
+        if(mPreviewWindow->lock_buffer) {
+            err = mPreviewWindow->lock_buffer(mPreviewWindow, buffer_handle);
+            ALOGD("%s: mPreviewWindow->lock_buffer success", __func__);
+        }
+        ALOGD("%s: camera call genlock_lock, hdl=%p",
+             __func__, (*buffer_handle));
+
+        if (GENLOCK_NO_ERROR !=
+            genlock_lock_buffer((native_handle_t *)(*buffer_handle),
+                                GENLOCK_WRITE_LOCK, GENLOCK_MAX_TIMEOUT)) {
+           ALOGE("%s: genlock_lock_buffer(WRITE) failed", __func__);
+       } else {
+         ALOGD("%s: genlock_lock_buffer hdl =%p", __func__, *buffer_handle);
+       }
+
+        private_buffer_handle = (struct private_handle_t *)(*buffer_handle);
+
+        ALOGD("%s: fd = %d, size = %d, offset = %d, stride = %d",
+             __func__, private_buffer_handle->fd,
+        private_buffer_handle->size, private_buffer_handle->offset, stride);
+
+        for(cnt = 0; cnt < camHal->previewMem.buffer_count + 2; cnt++) {
+            if(private_buffer_handle->fd ==
+               camHal->previewMem.private_buffer_handle[cnt]->fd) {
+                *buffer_id = cnt;
+                ALOGD("%s: deQueued fd = %d, index: %d",
+                     __func__, private_buffer_handle->fd, cnt);
+                break;
+            }
+        }
+    }
+    else
+        ALOGE("%s: dequeue buf failed \n", __func__);
+
+    ALOGD("%s: X", __func__);
+
+    return err;
+}
+
+/******************************************************************************
+ * Function: put_buf_to_display
+ * Description: This funtion puts/enqueues 1 buffer back to the display window
+ *
+ * Input parameters:
+ *  camHal                  - camera HAL handle
+ *  buffer_id               - id of the buffer that needs to be enqueued
+ *
+ * Return values:
+ *   0      No error
+ *   -1     Error
+ *
+ * Notes: none
+ *****************************************************************************/
+static int put_buf_to_display(camera_hardware_t *camHal, int buffer_id)
+{
+    int err = 0;
+    preview_stream_ops    *mPreviewWindow;
+
+    ALOGD("%s: E", __func__);
+
+    if (camHal == NULL) {
+        ALOGE("%s: camHal = NULL", __func__);
+        return -1;
+    }
+
+    mPreviewWindow = camHal->window;
+    if( mPreviewWindow == NULL) {
+        ALOGE("%s: mPreviewWindow = NULL", __func__);
+        return -1;
+    }
+
+    if (GENLOCK_FAILURE ==
+        genlock_unlock_buffer(
+            (native_handle_t *)
+            (*(camHal->previewMem.buffer_handle[buffer_id])))) {
+       ALOGE("%s: genlock_unlock_buffer failed: hdl =%p",
+            __func__, (*(camHal->previewMem.buffer_handle[buffer_id])) );
+    } else {
+      ALOGD("%s: genlock_unlock_buffer success: hdl =%p",
+           __func__, (*(camHal->previewMem.buffer_handle[buffer_id])) );
+    }
+
+    /* Cache clean the output buffer so that cache is written back */
+    cache_ops(&camHal->previewMem.mem_info[buffer_id],
+                         (void *)camHal->previewMem.camera_memory[buffer_id]->data,
+                         ION_IOC_CLEAN_CACHES);
+                         /*
+    cache_ops(&camHal->previewMem.mem_info[buffer_id],
+                         (void *)camHal->previewMem.camera_memory[buffer_id]->data,
+                         ION_IOC_CLEAN_INV_CACHES);
+*/
+    err = mPreviewWindow->enqueue_buffer(mPreviewWindow,
+      (buffer_handle_t *)camHal->previewMem.buffer_handle[buffer_id]);
+    if(!err) {
+        ALOGD("%s: enqueue buf successful: %p\n",
+             __func__, camHal->previewMem.buffer_handle[buffer_id]);
+    }else
+        ALOGE("%s: enqueue buf failed: %p\n",
+             __func__, camHal->previewMem.buffer_handle[buffer_id]);
+
+    ALOGD("%s: X", __func__);
+
+    return err;
+}
+
+/******************************************************************************
+ * Function: put_buf_to_display
+ * Description: This funtion transfers the content from capture buffer to
+ *              preiew display buffer after appropriate conversion
+ *
+ * Input parameters:
+ *  camHal                  - camera HAL handle
+ *  buffer_id               - id of the buffer that needs to be enqueued
+ *
+ * Return values:
+ *   0      No error
+ *   -1     Error
+ *
+ * Notes: none
+ *****************************************************************************/
+static int convert_data_frm_cam_to_disp(camera_hardware_t *camHal, int buffer_id)
+{
+    int rc = -1;
+
+    if(!camHal) {
+        ALOGE("%s: camHal is NULL", __func__);
+        return -1;
+    }
+    /* If input and output are raw formats, but different color format, */
+    /* call color conversion routine                                    */
+    if( (V4L2_PIX_FMT_YUYV == camHal->captureFormat) &&
+        (HAL_PIXEL_FORMAT_YCrCb_420_SP == camHal->dispFormat))
+    {
+        convert_YUYV_to_420_NV12(
+            (char *)camHal->buffers[camHal->curCaptureBuf.index].data,
+            (char *)camHal->previewMem.camera_memory[buffer_id]->data,
+            camHal->prevWidth,
+            camHal->prevHeight);
+        ALOGD("%s: Copied %d bytes from camera buffer %d to display buffer: %d",
+             __func__, camHal->curCaptureBuf.bytesused,
+             camHal->curCaptureBuf.index, buffer_id);
+        rc = 0;
+    }
+
+    /* If camera buffer is MJPEG encoded, call mjpeg decode call */
+    if(V4L2_PIX_FMT_MJPEG == camHal->captureFormat)
+    {
+        if(NULL == camHal->mjpegd)
+        {
+            rc = mjpegDecoderInit(&camHal->mjpegd);
+            if(rc < 0)
+                ALOGE("%s: mjpegDecoderInit Error: %d", __func__, rc);
+        }
+        if(camHal->mjpegd)
+        {
+            rc = mjpegDecode(
+                (void*)camHal->mjpegd,
+                (char *)camHal->buffers[camHal->curCaptureBuf.index].data,
+                camHal->curCaptureBuf.bytesused,
+                (char *)camHal->previewMem.camera_memory[buffer_id]->data,
+                (char *)camHal->previewMem.camera_memory[buffer_id]->data +
+                    camHal->prevWidth * camHal->prevHeight,
+                getMjpegdOutputFormat(camHal->dispFormat));
+            if(rc < 0)
+                ALOGE("%s: mjpegDecode Error: %d", __func__, rc);
+        }
+    }
+    return rc;
+}
+
+/******************************************************************************
+ * Function: launch_preview_thread
+ * Description: This is a wrapper function to start preview thread
+ *
+ * Input parameters:
+ *  camHal                  - camera HAL handle
+ *
+ * Return values:
+ *   0      No error
+ *   -1     Error
+ *
+ * Notes: none
+ *****************************************************************************/
+static int launch_preview_thread(camera_hardware_t *camHal)
+{
+    ALOGD("%s: E", __func__);
+    int rc = 0;
+
+    if(!camHal) {
+        ALOGE("%s: camHal is NULL", __func__);
+        return -1;
+    }
+
+    pthread_attr_t attr;
+    pthread_attr_init(&attr);
+    pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
+    pthread_create(&camHal->previewThread, &attr, previewloop, camHal);
+
+    ALOGD("%s: X", __func__);
+    return rc;
+}
+
+/******************************************************************************
+ * Function: launch_preview_thread
+ * Description: This is thread funtion for preivew loop
+ *
+ * Input parameters:
+ *  hcamHal                 - camera HAL handle
+ *
+ * Return values:
+ *   0      No error
+ *   -1     Error
+ *
+ * Notes: none
+ *****************************************************************************/
+static void * previewloop(void *hcamHal)
+{
+    int                 rc;
+    int                 buffer_id   = 0;
+    pid_t               tid         = 0;
+    camera_hardware_t   *camHal     = NULL;
+    int                 msgType     = 0;
+    camera_memory_t     *data       = NULL;
+    camera_frame_metadata_t *metadata= NULL;
+    camera_memory_t     *previewMem = NULL;
+
+    camHal = (camera_hardware_t *)hcamHal;
+    ALOGD("%s: E", __func__);
+
+    if(!camHal) {
+        ALOGE("%s: camHal is NULL", __func__);
+        return NULL ;
+    }
+
+    tid  = gettid();
+    /* TBR: Set appropriate thread priority */
+    androidSetThreadPriority(tid, ANDROID_PRIORITY_NORMAL);
+    prctl(PR_SET_NAME, (unsigned long)"Camera HAL preview thread", 0, 0, 0);
+
+    /************************************************************************/
+    /* - Time wait (select) on camera fd for input read buffer              */
+    /* - Check if any preview thread commands are set. If set, process      */
+    /* - Dequeue display buffer from surface                                */
+    /* - Dequeue capture buffer from USB camera                             */
+    /* - Convert capture format to display format                           */
+    /* - If preview frames callback is requested, callback with prvw buffers*/
+    /* - Enqueue display buffer back to surface                             */
+    /* - Enqueue capture buffer back to USB camera                          */
+    /************************************************************************/
+    while(1) {
+        fd_set fds;
+        struct timeval tv;
+        int r = 0;
+
+        FD_ZERO(&fds);
+#if CAPTURE
+        FD_SET(camHal->fd, &fds);
+#endif /* CAPTURE */
+
+    /************************************************************************/
+    /* - Time wait (select) on camera fd for input read buffer              */
+    /************************************************************************/
+        tv.tv_sec = 0;
+        tv.tv_usec = 500000;
+
+        ALOGD("%s: b4 select on camHal->fd + 1,fd: %d", __func__, camHal->fd);
+#if CAPTURE
+        r = select(camHal->fd + 1, &fds, NULL, NULL, &tv);
+#else
+        r = select(1, NULL, NULL, NULL, &tv);
+#endif /* CAPTURE */
+        ALOGD("%s: after select : %d", __func__, camHal->fd);
+
+        if (-1 == r) {
+            if (EINTR == errno)
+                continue;
+            ALOGE("%s: FDSelect error: %d", __func__, errno);
+        }
+
+        if (0 == r) {
+            ALOGD("%s: select timeout\n", __func__);
+        }
+
+        /* Protect the context for one iteration of preview loop */
+        /* this gets unlocked at the end of the while */
+        Mutex::Autolock autoLock(camHal->lock);
+
+    /************************************************************************/
+    /* - Check if any preview thread commands are set. If set, process      */
+    /************************************************************************/
+        if(camHal->prvwCmdPending)
+        {
+            /* command is serviced. Hence command pending = 0  */
+            camHal->prvwCmdPending--;
+            //sempost(ack)
+            if(USB_CAM_PREVIEW_EXIT == camHal->prvwCmd){
+                /* unlock before exiting the thread */
+                camHal->lock.unlock();
+                ALOGI("%s: Exiting coz USB_CAM_PREVIEW_EXIT", __func__);
+                return (void *)0;
+            }else if(USB_CAM_PREVIEW_TAKEPIC == camHal->prvwCmd){
+                rc = prvwThreadTakePictureInternal(camHal);
+                if(rc)
+                    ALOGE("%s: prvwThreadTakePictureInternal returned error",
+                    __func__);
+            }
+        }
+
+        /* Null check on preview window. If null, sleep */
+        if(!camHal->window) {
+            ALOGD("%s: sleeping coz camHal->window = NULL",__func__);
+            camHal->lock.unlock();
+            sleep(2);
+            continue;
+        }
+#if DISPLAY
+    /************************************************************************/
+    /* - Dequeue display buffer from surface                                */
+    /************************************************************************/
+        if(0 == get_buf_from_display(camHal, &buffer_id)) {
+            ALOGD("%s: get_buf_from_display success: %d",
+                 __func__, buffer_id);
+        }else{
+            ALOGE("%s: get_buf_from_display failed. Skipping the loop",
+                 __func__);
+            continue;
+        }
+#endif
+
+#if CAPTURE
+    /************************************************************************/
+    /* - Dequeue capture buffer from USB camera                             */
+    /************************************************************************/
+        if (0 == get_buf_from_cam(camHal))
+            ALOGD("%s: get_buf_from_cam success", __func__);
+        else
+            ALOGE("%s: get_buf_from_cam error", __func__);
+#endif
+
+#if FILE_DUMP_CAMERA
+        /* Debug code to dump frames from camera */
+        {
+            static int frame_cnt = 0;
+            /* currently hardcoded for Bytes-Per-Pixel = 1.5 */
+            fileDump("/data/USBcam.yuv",
+            (char*)camHal->buffers[camHal->curCaptureBuf.index].data,
+            camHal->prevWidth * camHal->prevHeight * 1.5,
+            &frame_cnt);
+        }
+#endif
+
+#if MEMSET
+        static int color = 30;
+        color += 50;
+        if(color > 200) {
+            color = 30;
+        }
+        ALOGE("%s: Setting to the color: %d\n", __func__, color);
+        /* currently hardcoded for format of type Bytes-Per-Pixel = 1.5 */
+        memset(camHal->previewMem.camera_memory[buffer_id]->data,
+               color, camHal->dispWidth * camHal->dispHeight * 1.5 + 2 * 1024);
+#else
+        convert_data_frm_cam_to_disp(camHal, buffer_id);
+        ALOGD("%s: Copied data to buffer_id: %d", __func__, buffer_id);
+#endif
+
+#if FILE_DUMP_B4_DISP
+        /* Debug code to dump display buffers */
+        {
+            static int frame_cnt = 0;
+            /* currently hardcoded for Bytes-Per-Pixel = 1.5 */
+            fileDump("/data/display.yuv",
+                (char*) camHal->previewMem.camera_memory[buffer_id]->data,
+                camHal->dispWidth * camHal->dispHeight * 1.5,
+                &frame_cnt);
+            ALOGD("%s: Written buf_index: %d ", __func__, buffer_id);
+        }
+#endif
+
+#if DISPLAY
+    /************************************************************************/
+    /* - Enqueue display buffer back to surface                             */
+    /************************************************************************/
+       if(0 == put_buf_to_display(camHal, buffer_id)) {
+            ALOGD("%s: put_buf_to_display success: %d", __func__, buffer_id);
+        }
+        else
+            ALOGE("%s: put_buf_to_display error", __func__);
+#endif
+
+#if CAPTURE
+     /************************************************************************/
+    /* - Enqueue capture buffer back to USB camera                          */
+    /************************************************************************/
+       if(0 == put_buf_to_cam(camHal)) {
+            ALOGD("%s: put_buf_to_cam success", __func__);
+        }
+        else
+            ALOGE("%s: put_buf_to_cam error", __func__);
+#endif
+
+#if CALL_BACK
+    /************************************************************************/
+    /* - If preview frames callback is requested, callback with prvw buffers*/
+    /************************************************************************/
+        /* TBD: change the 1.5 hardcoding to Bytes Per Pixel */
+        int previewBufSize = camHal->prevWidth * camHal->prevHeight * 1.5;
+
+        msgType |=  CAMERA_MSG_PREVIEW_FRAME;
+
+        if(previewBufSize !=
+            camHal->previewMem.private_buffer_handle[buffer_id]->size) {
+
+            previewMem = camHal->get_memory(
+                camHal->previewMem.private_buffer_handle[buffer_id]->fd,
+                previewBufSize,
+                1,
+                camHal->cb_ctxt);
+
+              if (!previewMem || !previewMem->data) {
+                  ALOGE("%s: get_memory failed.\n", __func__);
+              }
+              else {
+                  data = previewMem;
+                  ALOGD("%s: GetMemory successful. data = %p",
+                            __func__, data);
+                  ALOGD("%s: previewBufSize = %d, priv_buf_size: %d",
+                    __func__, previewBufSize,
+                    camHal->previewMem.private_buffer_handle[buffer_id]->size);
+              }
+        }
+        else{
+            data =   camHal->previewMem.camera_memory[buffer_id];
+            ALOGD("%s: No GetMemory, no invalid fmt. data = %p, idx=%d",
+                __func__, data, buffer_id);
+        }
+        /* Unlock and lock around the callback. */
+        /* Sometimes 'disable_msg' is issued in the callback context, */
+        /* leading to deadlock */
+        camHal->lock.unlock();
+        if((camHal->msgEnabledFlag & CAMERA_MSG_PREVIEW_FRAME) &&
+            camHal->data_cb){
+            ALOGD("%s: before data callback", __func__);
+            camHal->data_cb(msgType, data, 0,metadata, camHal->cb_ctxt);
+            ALOGD("%s: after data callback: %p", __func__, camHal->data_cb);
+        }
+        camHal->lock.lock();
+        if (previewMem)
+            previewMem->release(previewMem);
+#endif
+
+    }//while(1)
+    ALOGD("%s: X", __func__);
+    return (void *)0;
+}
+
+/******************************************************************************
+ * Function: get_uvc_device
+ * Description: This function loops through /dev/video entries and probes with
+ *              UVCIOC query. If the device responds to the query, then it is
+ *              detected as UVC webcam
+ * Input parameters:
+ *   devname             - String pointer. The function return dev entry
+ *                          name in this string
+ * Return values:
+ *      0   Success
+ *      -1  Error
+ * Notes: none
+ *****************************************************************************/
+static int get_uvc_device(char *devname)
+{
+    char    temp_devname[FILENAME_LENGTH];
+    FILE    *fp = NULL;
+    int     i = 0, ret = 0, fd;
+
+    ALOGD("%s: E", __func__);
+#if 1
+    strncpy(devname, "/dev/video1", FILENAME_LENGTH);
+
+/*
+    struct          stat st;
+
+    strncpy(dev_name, "/dev/video1", FILENAME_LENGTH);
+    if (-1 == stat(dev_name, &st)) {
+        ALOGE("%s: Cannot identify '%s': %d, %s\n",
+             __func__, dev_name, errno, strerror(errno));
+    }
+
+    if (!S_ISCHR(st.st_mode)) {
+        ALOGE("%s: %s is no device\n", __func__, dev_name);
+        rc = -1;
+    }
+*/
+
+#else
+
+    *devname = '\0';
+    /************************************************************************/
+    /* - List all /dev/video* entries to a file                             */
+    /* - Open the video list file and loop through the list                 */
+    /* - Send UVC specific control query and check the response             */
+    /* - If device responds to the query as success, device is UVC webcam   */
+    /************************************************************************/
+
+    /************************************************************************/
+    /* - List all /dev/video* entries to a file                             */
+    /************************************************************************/
+    /* Temporarily commented out. This logic doesnt seem to be working */
+    //system("ls > /data/video_dev_list");
+
+    /************************************************************************/
+    /* - Open the video list file and loop through the list                 */
+    /************************************************************************/
+
+    /* Temporarily commented out. This logic doesnt seem to be working */
+    /*
+    fp = fopen("/data/video_dev_list", "rb");
+    if(!fp) {
+        ALOGE("%s: Error in opening /data/video_dev_list ", __func__);
+        return -1;
+    }
+    */
+
+    /* Temporarily commented out. Looping logic changed due to issue in */
+    /* executing system("ls > /data/video_dev_list") */
+    //while(EOF != fscanf(fp, "%s", devname)){
+    while(1){
+        uvc_xu_control_query    xqry;
+
+        sprintf(temp_devname, "/dev/video%d", i);
+        ALOGD("%s: Probing %s \n", __func__, temp_devname);
+
+        fd = open(temp_devname, O_RDWR /* required */ | O_NONBLOCK, 0);
+        if(-1 != fd){
+            memset(&xqry, 0, sizeof(uvc_xu_control_query));
+            ret = ioctl(fd, UVCIOC_CTRL_QUERY, &xqry);
+            ALOGD("%s: UVCIOC ret: %d, errno: %d", __func__, ret, errno);
+            /****************************************************************/
+            /* if UVCIOC is executed successfully, ret = 0                  */
+            /* if UVCIOC is executed but Control Unit = 0 does not exist,   */
+            /*      ret = -1 and errno = ENOENT                             */
+            /* if UVCIOC doesnot execute, ret = -1 and errno = EINVAL       */
+            /****************************************************************/
+            if((0 == ret) || (ret && (ENOENT == errno))){
+                ALOGD("%s: Found UVC node: %s\n", __func__, temp_devname);
+                strncpy(devname, temp_devname, FILENAME_LENGTH);
+                /* Exit the loop at the first UVC node detection */
+                break;
+            }
+            close(fd);
+        }
+        /* Temporarily logic to probe video0 to video10 nodes */
+        if(i++ > 10)
+        {
+            if(fp)
+                fclose(fp);
+            break;
+        }
+    }
+#endif /* #if 0 */
+    ALOGD("%s: X", __func__);
+    return 0;
+} /* get_uvc_device */
+
+/******************************************************************************
+ * Function: fileDump
+ * Description: This is a utility function to dump buffers into a file
+ *
+ * Input parameters:
+ *  fn              - File name string
+ *  data            - pointer to character buffer that needs to be dumped
+ *  length          - Length of the buffer to be dumped
+ *  frm_cnt         - Pointer to frame count. This count is incremented by this
+ *                      function on successful file write
+ * Return values:
+ *      0   Success
+ *      -1  Error
+ * Notes: none
+ *****************************************************************************/
+static int fileDump(const char* fn, char* data, int length, int* frm_cnt)
+{
+
+    FILE *fp = NULL;
+    if (0 == *frm_cnt) {
+        fp = fopen(fn, "wb");
+        if (NULL == fp) {
+            ALOGE("%s: Error in opening %s", __func__, fn);
+        }
+        fclose(fp);
+    }
+    fp = fopen(fn, "ab");
+    if (NULL == fp) {
+        ALOGE("%s: Error in opening %s", __func__, fn);
+    }
+    fwrite(data, 1, length, fp);
+    fclose(fp);
+    (*frm_cnt)++;
+    ALOGD("%s: Written %d bytes for frame:%d, in %s",
+        __func__, length, *frm_cnt, fn);
+
+    return 0;
+}
+
+/******************************************************************************
+ * Function: launchTakePictureThread
+ * Description: This is a wrapper function to start take picture thread
+ *
+ * Input parameters:
+ *  camHal                  - camera HAL handle
+ *
+ * Return values:
+ *   0      No error
+ *   -1     Error
+ *
+ * Notes: none
+ *****************************************************************************/
+static int launchTakePictureThread(camera_hardware_t *camHal)
+{
+    ALOGD("%s: E", __func__);
+    int rc = 0;
+
+    if(!camHal) {
+        ALOGE("%s: camHal is NULL", __func__);
+        return -1;
+    }
+
+    pthread_attr_t attr;
+    pthread_attr_init(&attr);
+    /* create the thread in detatched state, when the thread exits all */
+    /* memory resources are freed up */
+    pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+    pthread_create(&camHal->takePictureThread, &attr, takePictureThread, camHal);
+
+    ALOGD("%s: X", __func__);
+    return rc;
+}
+
+/******************************************************************************
+ * Function: takePictureThread
+ * Description: This function is associated with take picture thread
+ *
+ * Input parameters:
+ *  camHal                  - camera HAL handle
+ *
+ * Return values:
+ *   0      No error
+ *   -1     Error
+ *
+ * Notes: none
+ *****************************************************************************/
+static void * takePictureThread(void *hcamHal)
+{
+    int                 rc = 0;
+    int                 buffer_id   = 0;
+    pid_t               tid         = 0;
+    camera_hardware_t   *camHal     = NULL;
+    int                 msgType     = 0;
+    int                 jpegLength  = 0;
+    QCameraHalMemInfo_t *mem_info   = NULL;
+
+    camHal = (camera_hardware_t *)hcamHal;
+    ALOGI("%s: E", __func__);
+
+    if(!camHal) {
+        ALOGE("%s: camHal is NULL", __func__);
+        return NULL ;
+    }
+
+    tid  = gettid();
+    /* TBR: Set appropriate thread priority */
+    androidSetThreadPriority(tid, ANDROID_PRIORITY_NORMAL);
+    prctl(PR_SET_NAME, (unsigned long)"Camera HAL preview thread", 0, 0, 0);
+
+    /************************************************************************/
+    /* - If requested for shutter notfication, notify                       */
+    /* - Initialize USB camera with snapshot parameters                     */
+    /* - Time wait (select) on camera fd for camera frame availability      */
+    /* - Dequeue capture buffer from USB camera                             */
+    /* - Send capture buffer to JPEG encoder for JPEG compression           */
+    /* - If jpeg frames callback is requested, callback with jpeg buffers   */
+    /* - Enqueue capture buffer back to USB camera                          */
+    /* - Free USB camera resources and close camera                         */
+    /* - If preview was stopped for taking picture, restart the preview     */
+    /************************************************************************/
+
+    Mutex::Autolock autoLock(camHal->lock);
+    /************************************************************************/
+    /* - If requested for shutter notfication, notify                       */
+    /************************************************************************/
+#if 0 /* TBD: Temporarily commented out due to an issue. Sometimes it takes */
+    /* long time to get back the lock once unlocked and notify callback */
+    if (camHal->msgEnabledFlag & CAMERA_MSG_SHUTTER){
+        camHal->lock.unlock();
+        camHal->notify_cb(CAMERA_MSG_SHUTTER, 0, 0, camHal->cb_ctxt);
+        camHal->lock.lock();
+    }
+#endif
+    /************************************************************************/
+    /* - Initialize USB camera with snapshot parameters                     */
+    /************************************************************************/
+    USB_CAM_OPEN(camHal);
+
+#if JPEG_ON_USB_CAMERA
+    rc = initUsbCamera(camHal, camHal->pictWidth, camHal->pictHeight,
+                        V4L2_PIX_FMT_MJPEG);
+#else
+    rc = initUsbCamera(camHal, camHal->pictWidth, camHal->pictHeight,
+                        V4L2_PIX_FMT_YUYV);
+#endif
+    ERROR_CHECK_EXIT_THREAD(rc, "initUsbCamera");
+
+    rc = startUsbCamCapture(camHal);
+    ERROR_CHECK_EXIT_THREAD(rc, "startUsbCamCapture");
+
+    /************************************************************************/
+    /* - Time wait (select) on camera fd for camera frame availability      */
+    /************************************************************************/
+    {
+        fd_set fds;
+        struct timeval tv;
+        int r = 0;
+
+        FD_ZERO(&fds);
+        FD_SET(camHal->fd, &fds);
+
+        tv.tv_sec = 1;
+        tv.tv_usec = 0;
+
+        do{
+            ALOGD("%s: b4 select on camHal->fd : %d", __func__, camHal->fd);
+            r = select(camHal->fd + 1, &fds, NULL, NULL, &tv);
+            ALOGD("%s: after select", __func__);
+        }while((0 == r) || ((-1 == r) && (EINTR == errno)));
+
+        if ((-1 == r) && (EINTR != errno)){
+            ALOGE("%s: FDSelect ret = %d error: %d", __func__, r, errno);
+            return (void *)-1;
+        }
+
+    }
+    /************************************************************************/
+    /* - Dequeue capture buffer from USB camera                             */
+    /************************************************************************/
+    if (0 == get_buf_from_cam(camHal))
+        ALOGD("%s: get_buf_from_cam success", __func__);
+    else
+        ALOGE("%s: get_buf_from_cam error", __func__);
+
+    /************************************************************************/
+    /* - Send capture buffer to JPEG encoder for JPEG compression           */
+    /************************************************************************/
+    mem_info = &camHal->pictMem.mem_info[0];
+    mem_info->size = MAX_JPEG_BUFFER_SIZE;
+
+    rc = allocate_ion_memory(mem_info,
+                        ((0x1 << CAMERA_ZSL_ION_HEAP_ID) |
+                        (0x1 << CAMERA_ZSL_ION_FALLBACK_HEAP_ID)));
+    if(rc)
+        ALOGE("%s: ION memory allocation failed", __func__);
+
+    camHal->pictMem.camera_memory[0] = camHal->get_memory(
+                        mem_info->fd, mem_info->size, 1, camHal->cb_ctxt);
+    if(!camHal->pictMem.camera_memory[0])
+        ALOGE("%s: get_mem failed", __func__);
+
+#if FREAD_JPEG_PICTURE
+    jpegLength = readFromFile("/data/tempVGA.jpeg",
+                    (char*)camHal->pictMem.camera_memory[0]->data,
+                    camHal->pictMem.camera_memory[0]->size);
+    camHal->pictMem.camera_memory[0]->size = jpegLength;
+
+#elif JPEG_ON_USB_CAMERA
+    memcpy((char*)camHal->pictMem.camera_memory[0]->data,
+            (char *)camHal->buffers[camHal->curCaptureBuf.index].data,
+            camHal->curCaptureBuf.bytesused);
+    camHal->pictMem.camera_memory[0]->size = camHal->curCaptureBuf.bytesused;
+    jpegLength = camHal->curCaptureBuf.bytesused;
+
+#else
+    rc = encodeJpeg(camHal);
+    ERROR_CHECK_EXIT_THREAD(rc, "jpeg_encode");
+#endif
+    if(jpegLength <= 0)
+        ALOGI("%s: jpegLength : %d", __func__, jpegLength);
+
+     ALOGD("%s: jpegLength : %d", __func__, jpegLength);
+    /************************************************************************/
+    /* - If jpeg frames callback is requested, callback with jpeg buffers   */
+    /************************************************************************/
+    /* TBD: CAMERA_MSG_RAW_IMAGE data call back */
+
+    if ((camHal->msgEnabledFlag & CAMERA_MSG_COMPRESSED_IMAGE) &&
+            (camHal->data_cb)){
+        /* Unlock temporarily, callback might call HAL api in turn */
+        camHal->lock.unlock();
+
+        camHal->data_cb(CAMERA_MSG_COMPRESSED_IMAGE,
+                        camHal->pictMem.camera_memory[0],
+                        0, NULL, camHal->cb_ctxt);
+        camHal->lock.lock();
+    }
+
+    /* release heap memory after the call back */
+    if(camHal->pictMem.camera_memory[0])
+        camHal->pictMem.camera_memory[0]->release(
+            camHal->pictMem.camera_memory[0]);
+
+    rc = deallocate_ion_memory(mem_info);
+    if(rc)
+        ALOGE("%s: ION memory de-allocation failed", __func__);
+
+    /************************************************************************/
+    /* - Enqueue capture buffer back to USB camera                          */
+    /************************************************************************/
+    if(0 == put_buf_to_cam(camHal)) {
+        ALOGD("%s: put_buf_to_cam success", __func__);
+    }
+    else
+        ALOGE("%s: put_buf_to_cam error", __func__);
+
+    /************************************************************************/
+    /* - Free USB camera resources and close camera                         */
+    /************************************************************************/
+    rc = stopUsbCamCapture(camHal);
+    ERROR_CHECK_EXIT_THREAD(rc, "stopUsbCamCapture");
+
+    rc = unInitV4L2mmap(camHal);
+    ERROR_CHECK_EXIT_THREAD(rc, "unInitV4L2mmap");
+
+    USB_CAM_CLOSE(camHal);
+    /************************************************************************/
+    /* - If preview was stopped for taking picture, restart the preview     */
+    /************************************************************************/
+    if(camHal->prvwStoppedForPicture)
+    {
+        struct camera_device    device;
+        device.priv = (void *)camHal;
+
+        USB_CAM_OPEN(camHal);
+        /* Unlock temporarily coz usbcam_start_preview has a lock */
+        camHal->lock.unlock();
+        rc = usbcam_start_preview(&device);
+        if(rc)
+            ALOGE("%s: start_preview error after take picture", __func__);
+        camHal->lock.lock();
+        camHal->prvwStoppedForPicture = 0;
+    }
+
+    /* take picture activity is done */
+    camHal->takePictInProgress = 0;
+
+    ALOGI("%s: X", __func__);
+    return (void *)0;
+}
+
+/******************************************************************************
+ * Function: allocate_ion_memory
+ * Description: This function is allocates ION memory
+ *
+ * Input parameters:
+ *  camHal                  - camera HAL handle
+ *
+ * Return values:
+ *   0      No error
+ *   -1     Error
+ *
+ * Notes: none
+ *****************************************************************************/
+static int allocate_ion_memory(QCameraHalMemInfo_t *mem_info, int ion_type)
+{
+    int                         rc = 0;
+    struct ion_handle_data      handle_data;
+    struct ion_allocation_data  alloc;
+    struct ion_fd_data          ion_info_fd;
+    int                         main_ion_fd = 0;
+
+    main_ion_fd = open("/dev/ion", O_RDONLY);
+    if (main_ion_fd <= 0) {
+        ALOGE("Ion dev open failed %s\n", strerror(errno));
+        goto ION_OPEN_FAILED;
+    }
+
+    memset(&alloc, 0, sizeof(alloc));
+    alloc.len = mem_info->size;
+    /* to make it page size aligned */
+    alloc.len = (alloc.len + 4095) & (~4095);
+    alloc.align = 4096;
+    alloc.flags = ION_FLAG_CACHED;
+    alloc.heap_id_mask = ion_type;
+    rc = ioctl(main_ion_fd, ION_IOC_ALLOC, &alloc);
+    if (rc < 0) {
+        ALOGE("ION allocation failed\n");
+        goto ION_ALLOC_FAILED;
+    }
+
+    memset(&ion_info_fd, 0, sizeof(ion_info_fd));
+    ion_info_fd.handle = alloc.handle;
+    rc = ioctl(main_ion_fd, ION_IOC_SHARE, &ion_info_fd);
+    if (rc < 0) {
+        ALOGE("ION map failed %s\n", strerror(errno));
+        goto ION_MAP_FAILED;
+    }
+
+    mem_info->main_ion_fd = main_ion_fd;
+    mem_info->fd = ion_info_fd.fd;
+    mem_info->handle = ion_info_fd.handle;
+    mem_info->size = alloc.len;
+    return 0;
+
+ION_MAP_FAILED:
+    memset(&handle_data, 0, sizeof(handle_data));
+    handle_data.handle = ion_info_fd.handle;
+    ioctl(main_ion_fd, ION_IOC_FREE, &handle_data);
+ION_ALLOC_FAILED:
+    close(main_ion_fd);
+ION_OPEN_FAILED:
+    return -1;
+}
+
+/******************************************************************************
+ * Function: deallocate_ion_memory
+ * Description: This function de allocates ION memory
+ *
+ * Input parameters:
+ *  camHal                  - camera HAL handle
+ *
+ * Return values:
+ *   0      No error
+ *   -1     Error
+ *
+ * Notes: none
+ *****************************************************************************/
+static int deallocate_ion_memory(QCameraHalMemInfo_t *mem_info)
+{
+  struct ion_handle_data handle_data;
+  int rc = 0;
+
+  if (mem_info->fd > 0) {
+      close(mem_info->fd);
+      mem_info->fd = 0;
+  }
+
+  if (mem_info->main_ion_fd > 0) {
+      memset(&handle_data, 0, sizeof(handle_data));
+      handle_data.handle = mem_info->handle;
+      ioctl(mem_info->main_ion_fd, ION_IOC_FREE, &handle_data);
+      close(mem_info->main_ion_fd);
+      mem_info->main_ion_fd = 0;
+  }
+  return rc;
+}
+
+/******************************************************************************
+ * Function: readFromFile
+ * Description: This function reads data from the given file into given buffer
+ *
+ * Input parameters:
+ *  camHal                  - camera HAL handle
+ *
+ * Return values:
+ *   int    bytesRead
+ *
+ * Notes: none
+ *****************************************************************************/
+static int readFromFile(char* fileName, char* buffer, int bufferSize)
+{
+    int bytesRead = 0, fileSize = 0;
+    FILE *fp;
+
+    fp = fopen(fileName, "rb");
+    if(!fp){
+        ALOGE("%s: Error in opening %s ", __func__, fileName);
+        return bytesRead;
+    }
+
+    /* If file is bigger for given buffer, exit */
+    if (fileSize > bufferSize){
+        ALOGE("%s: Error %d > %d", __func__, fileSize, bufferSize);
+        return bytesRead;
+    }
+
+    bytesRead = fread(buffer, 1, bufferSize, fp);
+    ALOGD(" %s: bytesRead: %d", __func__, bytesRead);
+
+    return bytesRead;
+}
+
+/******************************************************************************
+ * Function: encodeJpeg
+ * Description: This function initializes Jpeg encoder and calls jpeg encoder
+ *              call and waits for the encode to complete
+ *
+ * Input parameters:
+ *  camHal                  - camera HAL handle
+ *
+ * Return values:
+ *   0  No Error
+ *  -1  Error
+ *
+ * Notes: none
+ *****************************************************************************/
+int encodeJpeg(camera_hardware_t *camHal)
+{
+    int                 rc = 0;
+    mm_jpeg_ops_t       mmJpegOps;
+    int                 jpegEncHdl  = 0;
+    mm_jpeg_job         mmJpegJob;
+    src_image_buffer_info   *srcBuf = NULL;
+    QCameraHalMemInfo_t jpegInMemInfo;
+    camera_memory_t*    jpegInMem;
+    uint32_t            jobId;
+
+    ALOGI("%s: E", __func__);
+
+    /************************************************************************/
+    /* - Allocate Jpeg input buffer from ION memory                         */
+    /************************************************************************/
+    jpegInMemInfo.size = camHal->pictWidth * camHal->pictHeight * 2;
+    rc = allocate_ion_memory(&jpegInMemInfo,
+                        ((0x1 << CAMERA_ZSL_ION_HEAP_ID) |
+                        (0x1 << CAMERA_ZSL_ION_FALLBACK_HEAP_ID)));
+    ERROR_CHECK_EXIT(rc, "allocate_ion_memory");
+
+    jpegInMem = camHal->get_memory(
+                        jpegInMemInfo.fd, jpegInMemInfo.size, 1, camHal->cb_ctxt);
+    if(!jpegInMem){
+        ALOGE("%s: get_mem failed", __func__);
+        return -1;
+    }
+
+    rc = convert_YUYV_to_420_NV12(
+        (char *)camHal->buffers[camHal->curCaptureBuf.index].data,
+        (char *)jpegInMem->data, camHal->pictWidth, camHal->pictHeight);
+    ERROR_CHECK_EXIT(rc, "convert_YUYV_to_420_NV12");
+    /************************************************************************/
+    /* - Populate JPEG encoding parameters from the camHal context          */
+    /************************************************************************/
+    memset(&mmJpegJob, 0, sizeof(mmJpegJob));
+
+    mmJpegJob.job_type              = JPEG_JOB_TYPE_ENCODE;
+    mmJpegJob.encode_job.jpeg_cb    = jpegEncodeCb;
+    mmJpegJob.encode_job.userdata   = (void *)camHal;
+    /* TBD: Rotation to be set from settings sent from app */
+    mmJpegJob.encode_job.encode_parm.rotation           = 0;
+    mmJpegJob.encode_job.encode_parm.exif_numEntries    = 0;
+    mmJpegJob.encode_job.encode_parm.exif_data          = NULL;
+
+    /* TBD: Add thumbnail support */
+    mmJpegJob.encode_job.encode_parm.buf_info.src_imgs.src_img_num = 1;
+    mmJpegJob.encode_job.encode_parm.buf_info.src_imgs.is_video_frame = 0;
+
+    /* Fill main image information */
+    srcBuf = &mmJpegJob.encode_job.encode_parm.buf_info.src_imgs.src_img[0];
+    srcBuf->type                = JPEG_SRC_IMAGE_TYPE_MAIN;
+    srcBuf->img_fmt             = JPEG_SRC_IMAGE_FMT_YUV;
+    /* TBD: convert from YUYV to CRCBH2V2 */
+    srcBuf->color_format        = MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+    srcBuf->num_bufs            = 1;
+    srcBuf->src_image[0].fd        = jpegInMemInfo.fd;
+    srcBuf->src_image[0].buf_vaddr = (uint8_t*)jpegInMem->data;
+    //srcBuf->src_image[0].offset    = 0;
+    srcBuf->src_dim.width       = camHal->pictWidth;
+    srcBuf->src_dim.height      = camHal->pictHeight;
+    srcBuf->out_dim.width       = camHal->pictWidth;
+    srcBuf->out_dim.height      = camHal->pictHeight;
+    srcBuf->crop.offset_x       = 0;
+    srcBuf->crop.offset_y       = 0;
+    srcBuf->crop.width          = srcBuf->src_dim.width;
+    srcBuf->crop.height         = srcBuf->src_dim.height;
+    srcBuf->quality             = camHal->pictJpegQlty;
+
+    /* TBD:Fill thumbnail image information */
+
+    /* Fill out buf information */
+    mmJpegJob.encode_job.encode_parm.buf_info.sink_img.buf_vaddr =
+                            (uint8_t*)camHal->pictMem.camera_memory[0]->data;
+    mmJpegJob.encode_job.encode_parm.buf_info.sink_img.fd = 0;
+    /* TBD: hard coded for 1.5 bytes per pixel */
+    mmJpegJob.encode_job.encode_parm.buf_info.sink_img.buf_len =
+                            camHal->pictWidth * camHal->pictHeight * 1.5;
+
+    /************************************************************************/
+    /* - Initialize jpeg encoder and call Jpeg encoder start                */
+    /************************************************************************/
+    memset(&mmJpegOps, 0, sizeof(mm_jpeg_ops_t));
+    jpegEncHdl = jpeg_open(&mmJpegOps);
+    if(!jpegEncHdl){
+        ALOGE("%s: Failed to open Jpeg Encoder instance", __func__);
+    }else
+        ALOGD("%s: jpegEncHdl = %d", __func__, jpegEncHdl);
+
+    camHal->jpegEncInProgress = 1;
+    rc = mmJpegOps.start_job(jpegEncHdl, &mmJpegJob, &jobId);
+
+    /************************************************************************/
+    /* - Wait for JPEG encoder to complete encoding                         */
+    /************************************************************************/
+    pthread_mutex_init(&camHal->jpegEncMutex, NULL);
+    pthread_cond_init(&camHal->jpegEncCond, NULL);
+
+    pthread_mutex_lock(&camHal->jpegEncMutex);
+    while(camHal->jpegEncInProgress)
+        pthread_cond_wait(&camHal->jpegEncCond, &camHal->jpegEncMutex);
+    pthread_mutex_unlock(&camHal->jpegEncMutex);
+
+    /************************************************************************/
+    /* - De-allocate Jpeg input buffer from ION memory                      */
+    /************************************************************************/
+    if(jpegInMem)
+        jpegInMem->release(jpegInMem);
+
+    rc = deallocate_ion_memory(&jpegInMemInfo);
+    if(rc)
+        ALOGE("%s: ION memory de-allocation failed", __func__);
+
+    ALOGI("%s: X rc = %d", __func__, rc);
+    return rc;
+}
+
+/******************************************************************************
+ * Function: jpegEncodeCb
+ * Description: This is a call back function registered with JPEG encoder.
+ *              Jpeg encoder calls this function on completion of encoding
+ *
+ * Input parameters:
+ *  camHal                  - camera HAL handle
+ *
+ * Return values:
+ *   0  No Error
+ *  -1  Error
+ *
+ * Notes: none
+ *****************************************************************************/
+void jpegEncodeCb   (jpeg_job_status_t status,
+                       uint8_t thumbnailDroppedFlag,
+                       uint32_t client_hdl,
+                       uint32_t jobId,
+                       uint8_t* out_data,
+                       uint32_t data_size,
+                       void *userData)
+{
+    int rc = 0;
+    camera_hardware_t *camHal = NULL;
+
+    ALOGI("%s: E status = %d", __func__, status);
+
+    camHal = (camera_hardware_t*) userData;
+
+    if(JPEG_JOB_STATUS_DONE == status){
+        ALOGD("%s: JPEG encode successful. out_data:%p, size: %d", __func__,
+            out_data, data_size);
+        camHal->jpegEncInProgress = 0;
+    }
+
+    pthread_mutex_lock(&camHal->jpegEncMutex);
+    pthread_cond_signal(&camHal->jpegEncCond);
+    pthread_mutex_unlock(&camHal->jpegEncMutex);
+
+    ALOGI("%s: X", __func__);
+    return;
+}
+
+/******************************************************************************/
+}; // namespace android